From 26443410022536a1c13775570f7bc9477780c40e Mon Sep 17 00:00:00 2001 From: William Wen Date: Tue, 14 Nov 2023 17:31:19 -0800 Subject: [PATCH 1/5] Support multi-line error locations in traceback (attempt 2) --- Lib/test/test_doctest.py | 3 + Lib/test/test_exceptions.py | 3 +- Lib/test/test_repl.py | 3 +- Lib/test/test_sys.py | 6 +- Lib/test/test_traceback.py | 433 ++++++++++++++++++++++++++--- Lib/test/test_warnings/__init__.py | 4 +- Lib/traceback.py | 338 +++++++++++++++++----- 7 files changed, 675 insertions(+), 115 deletions(-) diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py index cb4e2157bb228b..ce30b49f0ba595 100644 --- a/Lib/test/test_doctest.py +++ b/Lib/test/test_doctest.py @@ -2905,6 +2905,9 @@ def test_unicode(): """ Traceback (most recent call last): File ... exec(compile(example.source, filename, "single", + ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + compileflags, True), test.globs) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "", line 1, in raise Exception('clé') Exception: clé diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index 8ccf08703e5389..c57488e44aecc6 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -2080,6 +2080,7 @@ def test_multiline_not_highlighted(self): """, [ ' 1 < 2 and', + ' 3 > 4', 'AssertionError', ], ), @@ -2087,7 +2088,7 @@ def test_multiline_not_highlighted(self): for source, expected in cases: with self.subTest(source): result = self.write_source(source) - self.assertEqual(result[-2:], expected) + self.assertEqual(result[-len(expected):], expected) class SyntaxErrorTests(unittest.TestCase): diff --git a/Lib/test/test_repl.py b/Lib/test/test_repl.py index 7533376e015e73..a28d1595f44533 100644 --- a/Lib/test/test_repl.py +++ b/Lib/test/test_repl.py @@ -161,10 +161,11 @@ def foo(x): output = kill_python(p) self.assertEqual(p.returncode, 0) - traceback_lines = output.splitlines()[-7:-1] + traceback_lines = output.splitlines()[-8:-1] expected_lines = [ ' File "", line 1, in ', ' foo(0)', + ' ~~~^^^', ' File "", line 2, in foo', ' 1 / x', ' ~~^~~', diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index ae73f5a7de7421..83ea8413386a77 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -1115,8 +1115,10 @@ def check(tracebacklimit, expected): b'Traceback (most recent call last):', b' File "", line 8, in ', b' f2()', + b' ~~^^', b' File "", line 6, in f2', b' f1()', + b' ~~^^', b' File "", line 4, in f1', b' 1 / 0', b' ~~^~~', @@ -1124,8 +1126,8 @@ def check(tracebacklimit, expected): ] check(10, traceback) check(3, traceback) - check(2, traceback[:1] + traceback[3:]) - check(1, traceback[:1] + traceback[5:]) + check(2, traceback[:1] + traceback[4:]) + check(1, traceback[:1] + traceback[7:]) check(0, [traceback[-1]]) check(-1, [traceback[-1]]) check(1<<1000, traceback) diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index b43dca6f640b9a..144e66a485bc46 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -578,6 +578,7 @@ def f(): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+1}, in f\n' ' if True: raise ValueError("basic caret tests")\n' ' ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n' @@ -596,6 +597,7 @@ def f_with_unicode(): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+1}, in f_with_unicode\n' ' if True: raise ValueError("Ĥellö Wörld")\n' ' ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n' @@ -613,6 +615,7 @@ def foo(a: THIS_DOES_NOT_EXIST ) -> int: 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+1}, in f_with_type\n' ' def foo(a: THIS_DOES_NOT_EXIST ) -> int:\n' ' ^^^^^^^^^^^^^^^^^^^\n' @@ -633,9 +636,14 @@ def f_with_multiline(): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+1}, in f_with_multiline\n' ' if True: raise ValueError(\n' - ' ^^^^^^^^^^^^^^^^^' + ' ^^^^^^^^^^^^^^^^^\n' + ' "error over multiple lines"\n' + ' ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n' + ' )\n' + ' ^' ) result_lines = self.get_exception(f_with_multiline) self.assertEqual(result_lines, expected_f.splitlines()) @@ -664,9 +672,10 @@ def f_with_multiline(): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+2}, in f_with_multiline\n' ' return compile(code, "?", "exec")\n' - ' ^^^^^^^^^^^^^^^^^^^^^^^^^^\n' + ' ~~~~~~~^^^^^^^^^^^^^^^^^^^\n' ' File "?", line 7\n' ' foo(a, z\n' ' ^' @@ -689,9 +698,12 @@ def f_with_multiline(): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+2}, in f_with_multiline\n' ' 2 + 1 /\n' - ' ^^^' + ' ~~^\n' + ' 0\n' + ' ~' ) result_lines = self.get_exception(f_with_multiline) self.assertEqual(result_lines, expected_f.splitlines()) @@ -706,6 +718,7 @@ def f_with_binary_operator(): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+2}, in f_with_binary_operator\n' ' return 10 + divisor / 0 + 30\n' ' ~~~~~~~~^~~\n' @@ -723,6 +736,7 @@ def f_with_binary_operator(): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+2}, in f_with_binary_operator\n' ' return 10 + áóí / 0 + 30\n' ' ~~~~^~~\n' @@ -740,6 +754,7 @@ def f_with_binary_operator(): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+2}, in f_with_binary_operator\n' ' return 10 + divisor // 0 + 30\n' ' ~~~~~~~~^^~~\n' @@ -751,16 +766,102 @@ def test_caret_for_binary_operators_with_spaces_and_parenthesis(self): def f_with_binary_operator(): a = 1 b = "" - return ( a ) + b + return ( a ) +b lineno_f = f_with_binary_operator.__code__.co_firstlineno expected_error = ( 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+3}, in f_with_binary_operator\n' - ' return ( a ) + b\n' - ' ~~~~~~~~~~^~~\n' + ' return ( a ) +b\n' + ' ~~~~~~~~~~^~\n' + ) + result_lines = self.get_exception(f_with_binary_operator) + self.assertEqual(result_lines, expected_error.splitlines()) + + def test_caret_for_binary_operators_multiline(self): + def f_with_binary_operator(): + b = 1 + c = "" + a = b \ + +\ + c # test + return a + + lineno_f = f_with_binary_operator.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + ' ~~~~~~~~^^\n' + f' File "{__file__}", line {lineno_f+3}, in f_with_binary_operator\n' + ' a = b \\\n' + ' ~~~~~~\n' + ' +\\\n' + ' ^~\n' + ' c # test\n' + ' ~\n' + ) + result_lines = self.get_exception(f_with_binary_operator) + self.assertEqual(result_lines, expected_error.splitlines()) + + def test_caret_for_binary_operators_multiline_two_char(self): + def f_with_binary_operator(): + b = 1 + c = "" + a = ( + (b # test + + ) \ + # + + << (c # test + \ + ) # test + ) + return a + + lineno_f = f_with_binary_operator.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + ' ~~~~~~~~^^\n' + f' File "{__file__}", line {lineno_f+4}, in f_with_binary_operator\n' + ' (b # test +\n' + ' ~~~~~~~~~~~~\n' + ' ) \\\n' + ' ~~~~\n' + ' # +\n' + ' ~~~\n' + ' << (c # test\n' + ' ^^~~~~~~~~~~~\n' + ' \\\n' + ' ~\n' + ' ) # test\n' + ' ~\n' + ) + result_lines = self.get_exception(f_with_binary_operator) + self.assertEqual(result_lines, expected_error.splitlines()) + + def test_caret_for_binary_operators_multiline_with_unicode(self): + def f_with_binary_operator(): + b = 1 + a = ("ááá" + + "áá") + b + return a + + lineno_f = f_with_binary_operator.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + ' ~~~~~~~~^^\n' + f' File "{__file__}", line {lineno_f+2}, in f_with_binary_operator\n' + ' a = ("ááá" +\n' + ' ~~~~~~~~\n' + ' "áá") + b\n' + ' ~~~~~~^~~\n' ) result_lines = self.get_exception(f_with_binary_operator) self.assertEqual(result_lines, expected_error.splitlines()) @@ -775,6 +876,7 @@ def f_with_subscript(): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+2}, in f_with_subscript\n' " return some_dict['x']['y']['z']\n" ' ~~~~~~~~~~~~~~~~~~~^^^^^\n' @@ -792,6 +894,7 @@ def f_with_subscript(): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+2}, in f_with_subscript\n' " return some_dict['ó']['á']['í']['beta']\n" ' ~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^\n' @@ -810,6 +913,7 @@ def f_with_binary_operator(): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+3}, in f_with_binary_operator\n' ' return b [ a ] + c\n' ' ~~~~~~^^^^^^^^^\n' @@ -817,6 +921,226 @@ def f_with_binary_operator(): result_lines = self.get_exception(f_with_binary_operator) self.assertEqual(result_lines, expected_error.splitlines()) + def test_caret_for_subscript_multiline(self): + def f_with_subscript(): + bbbbb = {} + ccc = 1 + ddd = 2 + b = bbbbb \ + [ ccc # test + + + ddd \ + + ] # test + return b + + lineno_f = f_with_subscript.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + ' ~~~~~~~~^^\n' + f' File "{__file__}", line {lineno_f+4}, in f_with_subscript\n' + ' b = bbbbb \\\n' + ' ~~~~~~~\n' + ' [ ccc # test\n' + ' ^^^^^^^^^^^^^\n' + ' \n' + ' \n' + ' + ddd \\\n' + ' ^^^^^^^^\n' + ' \n' + ' \n' + ' ] # test\n' + ' ^\n' + ) + result_lines = self.get_exception(f_with_subscript) + self.assertEqual(result_lines, expected_error.splitlines()) + + def test_caret_for_call(self): + def f_with_call(): + def f1(a): + def f2(b): + raise RuntimeError("fail") + return f2 + return f1("x")("y") + + lineno_f = f_with_call.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + ' ~~~~~~~~^^\n' + f' File "{__file__}", line {lineno_f+5}, in f_with_call\n' + ' return f1("x")("y")\n' + ' ~~~~~~~^^^^^\n' + f' File "{__file__}", line {lineno_f+3}, in f2\n' + ' raise RuntimeError("fail")\n' + ) + result_lines = self.get_exception(f_with_call) + self.assertEqual(result_lines, expected_error.splitlines()) + + def test_caret_for_call_unicode(self): + def f_with_call(): + def f1(a): + def f2(b): + raise RuntimeError("fail") + return f2 + return f1("ó")("á") + + lineno_f = f_with_call.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + ' ~~~~~~~~^^\n' + f' File "{__file__}", line {lineno_f+5}, in f_with_call\n' + ' return f1("ó")("á")\n' + ' ~~~~~~~^^^^^\n' + f' File "{__file__}", line {lineno_f+3}, in f2\n' + ' raise RuntimeError("fail")\n' + ) + result_lines = self.get_exception(f_with_call) + self.assertEqual(result_lines, expected_error.splitlines()) + + def test_caret_for_call_with_spaces_and_parenthesis(self): + def f_with_binary_operator(): + def f(a): + raise RuntimeError("fail") + return f ( "x" ) + 2 + + lineno_f = f_with_binary_operator.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + ' ~~~~~~~~^^\n' + f' File "{__file__}", line {lineno_f+3}, in f_with_binary_operator\n' + ' return f ( "x" ) + 2\n' + ' ~~~~~~^^^^^^^^^^^\n' + f' File "{__file__}", line {lineno_f+2}, in f\n' + ' raise RuntimeError("fail")\n' + ) + result_lines = self.get_exception(f_with_binary_operator) + self.assertEqual(result_lines, expected_error.splitlines()) + + def test_caret_for_call_multiline(self): + def f_with_call(): + class C: + def y(self, a): + def f(b): + raise RuntimeError("fail") + return f + def g(x): + return C() + a = (g(1).y)( + 2 + )(3)(4) + return a + + lineno_f = f_with_call.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + ' ~~~~~~~~^^\n' + f' File "{__file__}", line {lineno_f+8}, in f_with_call\n' + ' a = (g(1).y)(\n' + ' ~~~~~~~~~\n' + ' 2\n' + ' ~\n' + ' )(3)(4)\n' + ' ~^^^\n' + f' File "{__file__}", line {lineno_f+4}, in f\n' + ' raise RuntimeError("fail")\n' + ) + result_lines = self.get_exception(f_with_call) + self.assertEqual(result_lines, expected_error.splitlines()) + + def test_many_lines(self): + def f(): + x = 1 + if True: x += ( + "a" + + "a" + ) # test + + lineno_f = f.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + ' ~~~~~~~~^^\n' + f' File "{__file__}", line {lineno_f+2}, in f\n' + ' if True: x += (\n' + ' ^^^^^^\n' + ' ...<2 lines>...\n' + ' ) # test\n' + ' ^\n' + ) + result_lines = self.get_exception(f) + self.assertEqual(result_lines, expected_error.splitlines()) + + def test_many_lines_no_caret(self): + def f(): + x = 1 + x += ( + "a" + + "a" + ) + + lineno_f = f.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + ' ~~~~~~~~^^\n' + f' File "{__file__}", line {lineno_f+2}, in f\n' + ' x += (\n' + ' ...<2 lines>...\n' + ' )\n' + ) + result_lines = self.get_exception(f) + self.assertEqual(result_lines, expected_error.splitlines()) + + def test_many_lines_binary_op(self): + def f_with_binary_operator(): + b = 1 + c = "a" + a = ( + b + + b + ) + ( + c + + c + + c + ) + return a + + lineno_f = f_with_binary_operator.__code__.co_firstlineno + expected_error = ( + 'Traceback (most recent call last):\n' + f' File "{__file__}", line {self.callable_line}, in get_exception\n' + ' callable()\n' + ' ~~~~~~~~^^\n' + f' File "{__file__}", line {lineno_f+3}, in f_with_binary_operator\n' + ' a = (\n' + ' ~\n' + ' b +\n' + ' ~~~\n' + ' b\n' + ' ~\n' + ' ) + (\n' + ' ~~^~~\n' + ' c +\n' + ' ~~~\n' + ' ...<2 lines>...\n' + ' )\n' + ' ~\n' + ) + result_lines = self.get_exception(f_with_binary_operator) + self.assertEqual(result_lines, expected_error.splitlines()) + def test_traceback_specialization_with_syntax_error(self): bytecode = compile("1 / 0 / 1 / 2\n", TESTFN, "exec") @@ -833,6 +1157,7 @@ def test_traceback_specialization_with_syntax_error(self): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{TESTFN}", line {lineno_f}, in \n' " 1 $ 0 / 1 / 2\n" ' ^^^^^\n' @@ -855,6 +1180,7 @@ def test_traceback_very_long_line(self): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{TESTFN}", line {lineno_f}, in \n' f' {source}\n' f' {" "*len("if True: ") + "^"*256}\n' @@ -872,6 +1198,7 @@ def f_with_subscript(): 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+2}, in f_with_subscript\n' " some_dict['x']['y']['z']\n" ' ~~~~~~~~~~~~~~~~~~~^^^^^\n' @@ -891,6 +1218,7 @@ def exc(): f' + Exception Group Traceback (most recent call last):\n' f' | File "{__file__}", line {self.callable_line}, in get_exception\n' f' | callable()\n' + f' | ~~~~~~~~^^\n' f' | File "{__file__}", line {exc.__code__.co_firstlineno + 1}, in exc\n' f' | if True: raise ExceptionGroup("eg", [ValueError(1), TypeError(2)])\n' f' | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n' @@ -956,6 +1284,7 @@ def g(): pass 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_applydescs + 1}, in applydecs\n' ' @dec_error\n' ' ^^^^^^^^^\n' @@ -974,6 +1303,7 @@ class A: pass 'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' ' callable()\n' + ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_applydescs_class + 1}, in applydecs_class\n' ' @dec_error\n' ' ^^^^^^^^^\n' @@ -992,6 +1322,7 @@ def f(): "Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", " callable()", + " ~~~~~~~~^^", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f", " .method", " ^^^^^^", @@ -1008,6 +1339,7 @@ def f(): "Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", " callable()", + " ~~~~~~~~^^", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f", " method", ] @@ -1023,6 +1355,7 @@ def f(): "Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", " callable()", + " ~~~~~~~~^^", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f", " . method", " ^^^^^^", @@ -1038,6 +1371,7 @@ def f(): "Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", " callable()", + " ~~~~~~~~^^", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 1}, in f", " width", ] @@ -1054,6 +1388,7 @@ def f(): "Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", " callable()", + " ~~~~~~~~^^", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f", " raise ValueError(width)", ] @@ -1072,9 +1407,12 @@ def f(): "Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", " callable()", + " ~~~~~~~~^^", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 4}, in f", f" print(1, www(", - f" ^^^^^^^", + f" ~~~~~~^", + f" th))", + f" ^^^^^", ] self.assertEqual(actual, expected) @@ -1089,6 +1427,7 @@ def f(): f"Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", f" callable()", + f" ~~~~~~~~^^", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 3}, in f", f" return 说明说明 / şçöğıĤellö", f" ~~~~~~~~~^~~~~~~~~~~~", @@ -1105,6 +1444,7 @@ def f(): f"Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", f" callable()", + f" ~~~~~~~~^^", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 1}, in f", f' return "✨🐍" + func_说明说明("📗🚛",', f" ^^^^^^^^^^^^^", @@ -1127,6 +1467,7 @@ def f(): f"Traceback (most recent call last):", f" File \"{__file__}\", line {self.callable_line}, in get_exception", f" callable()", + f" ~~~~~~~~^^", f" File \"{__file__}\", line {f.__code__.co_firstlineno + 8}, in f", f' return my_dct["✨🚛✨"]["说明"]["🐍"]["说明"]["🐍🐍"]', f" ~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^", @@ -1141,6 +1482,7 @@ def f(): expected = ['Traceback (most recent call last):', f' File "{__file__}", line {self.callable_line}, in get_exception', ' callable()', + ' ~~~~~~~~^^', f' File "{__file__}", line {f.__code__.co_firstlineno + 1}, in f', ' raise MemoryError()'] self.assertEqual(actual, expected) @@ -1187,6 +1529,14 @@ class TracebackFormatMixin: def some_exception(self): raise KeyError('blah') + def _filter_debug_ranges(self, expected): + return [line for line in expected if not set(line.strip()) <= set("^~")] + + def _maybe_filter_debug_ranges(self, expected): + if not self.DEBUG_RANGES: + return self._filter_debug_ranges(expected) + return expected + @cpython_only def check_traceback_format(self, cleanup_func=None): from _testcapi import traceback_print @@ -1199,6 +1549,11 @@ def check_traceback_format(self, cleanup_func=None): cleanup_func(tb.tb_next) traceback_fmt = 'Traceback (most recent call last):\n' + \ ''.join(traceback.format_tb(tb)) + # clear caret lines from traceback_fmt since internal API does + # not emit them + traceback_fmt = "\n".join( + self._filter_debug_ranges(traceback_fmt.splitlines()) + ) + "\n" file_ = StringIO() traceback_print(tb, file_) python_fmt = file_.getvalue() @@ -1291,12 +1646,16 @@ def f(): 'Traceback (most recent call last):\n' f' File "{__file__}", line {lineno_f+5}, in _check_recursive_traceback_display\n' ' f()\n' + ' ~^^\n' f' File "{__file__}", line {lineno_f+1}, in f\n' ' f()\n' + ' ~^^\n' f' File "{__file__}", line {lineno_f+1}, in f\n' ' f()\n' + ' ~^^\n' f' File "{__file__}", line {lineno_f+1}, in f\n' ' f()\n' + ' ~^^\n' # XXX: The following line changes depending on whether the tests # are run through the interactive interpreter or with -m # It also varies depending on the platform (stack size) @@ -1305,7 +1664,7 @@ def f(): 'RecursionError: maximum recursion depth exceeded\n' ) - expected = result_f.splitlines() + expected = self._maybe_filter_debug_ranges(result_f.splitlines()) actual = stderr_f.getvalue().splitlines() # Check the output text matches expectations @@ -1337,13 +1696,13 @@ def g(count=10): result_g = ( f' File "{__file__}", line {lineno_g+2}, in g\n' ' return g(count-1)\n' - ' ^^^^^^^^^^\n' + ' ~^^^^^^^^^\n' f' File "{__file__}", line {lineno_g+2}, in g\n' ' return g(count-1)\n' - ' ^^^^^^^^^^\n' + ' ~^^^^^^^^^\n' f' File "{__file__}", line {lineno_g+2}, in g\n' ' return g(count-1)\n' - ' ^^^^^^^^^^\n' + ' ~^^^^^^^^^\n' ' [Previous line repeated 7 more times]\n' f' File "{__file__}", line {lineno_g+3}, in g\n' ' raise ValueError\n' @@ -1353,11 +1712,10 @@ def g(count=10): 'Traceback (most recent call last):\n' f' File "{__file__}", line {lineno_g+7}, in _check_recursive_traceback_display\n' ' g()\n' + ' ~^^\n' ) - expected = (tb_line + result_g).splitlines() + expected = self._maybe_filter_debug_ranges((tb_line + result_g).splitlines()) actual = stderr_g.getvalue().splitlines() - if not self.DEBUG_RANGES: - expected = [line for line in expected if not set(line.strip()) == {"^"}] self.assertEqual(actual, expected) # Check 2 different repetitive sections @@ -1379,23 +1737,23 @@ def h(count=10): 'Traceback (most recent call last):\n' f' File "{__file__}", line {lineno_h+7}, in _check_recursive_traceback_display\n' ' h()\n' + ' ~^^\n' f' File "{__file__}", line {lineno_h+2}, in h\n' ' return h(count-1)\n' - ' ^^^^^^^^^^\n' + ' ~^^^^^^^^^\n' f' File "{__file__}", line {lineno_h+2}, in h\n' ' return h(count-1)\n' - ' ^^^^^^^^^^\n' + ' ~^^^^^^^^^\n' f' File "{__file__}", line {lineno_h+2}, in h\n' ' return h(count-1)\n' - ' ^^^^^^^^^^\n' + ' ~^^^^^^^^^\n' ' [Previous line repeated 7 more times]\n' f' File "{__file__}", line {lineno_h+3}, in h\n' ' g()\n' + ' ~^^\n' ) - expected = (result_h + result_g).splitlines() + expected = self._maybe_filter_debug_ranges((result_h + result_g).splitlines()) actual = stderr_h.getvalue().splitlines() - if not self.DEBUG_RANGES: - expected = [line for line in expected if not set(line.strip()) == {"^"}] self.assertEqual(actual, expected) # Check the boundary conditions. First, test just below the cutoff. @@ -1409,26 +1767,25 @@ def h(count=10): result_g = ( f' File "{__file__}", line {lineno_g+2}, in g\n' ' return g(count-1)\n' - ' ^^^^^^^^^^\n' + ' ~^^^^^^^^^\n' f' File "{__file__}", line {lineno_g+2}, in g\n' ' return g(count-1)\n' - ' ^^^^^^^^^^\n' + ' ~^^^^^^^^^\n' f' File "{__file__}", line {lineno_g+2}, in g\n' ' return g(count-1)\n' - ' ^^^^^^^^^^\n' + ' ~^^^^^^^^^\n' f' File "{__file__}", line {lineno_g+3}, in g\n' ' raise ValueError\n' 'ValueError\n' ) tb_line = ( 'Traceback (most recent call last):\n' - f' File "{__file__}", line {lineno_g+81}, in _check_recursive_traceback_display\n' + f' File "{__file__}", line {lineno_g+80}, in _check_recursive_traceback_display\n' ' g(traceback._RECURSIVE_CUTOFF)\n' + ' ~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n' ) - expected = (tb_line + result_g).splitlines() + expected = self._maybe_filter_debug_ranges((tb_line + result_g).splitlines()) actual = stderr_g.getvalue().splitlines() - if not self.DEBUG_RANGES: - expected = [line for line in expected if not set(line.strip()) == {"^"}] self.assertEqual(actual, expected) # Second, test just above the cutoff. @@ -1442,13 +1799,13 @@ def h(count=10): result_g = ( f' File "{__file__}", line {lineno_g+2}, in g\n' ' return g(count-1)\n' - ' ^^^^^^^^^^\n' + ' ~^^^^^^^^^\n' f' File "{__file__}", line {lineno_g+2}, in g\n' ' return g(count-1)\n' - ' ^^^^^^^^^^\n' + ' ~^^^^^^^^^\n' f' File "{__file__}", line {lineno_g+2}, in g\n' ' return g(count-1)\n' - ' ^^^^^^^^^^\n' + ' ~^^^^^^^^^\n' ' [Previous line repeated 1 more time]\n' f' File "{__file__}", line {lineno_g+3}, in g\n' ' raise ValueError\n' @@ -1456,13 +1813,12 @@ def h(count=10): ) tb_line = ( 'Traceback (most recent call last):\n' - f' File "{__file__}", line {lineno_g+114}, in _check_recursive_traceback_display\n' + f' File "{__file__}", line {lineno_g+112}, in _check_recursive_traceback_display\n' ' g(traceback._RECURSIVE_CUTOFF + 1)\n' + ' ~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n' ) - expected = (tb_line + result_g).splitlines() + expected = self._maybe_filter_debug_ranges((tb_line + result_g).splitlines()) actual = stderr_g.getvalue().splitlines() - if not self.DEBUG_RANGES: - expected = [line for line in expected if not set(line.strip()) == {"^"}] self.assertEqual(actual, expected) @requires_debug_ranges() @@ -1942,6 +2298,7 @@ def exc(): f' + Exception Group Traceback (most recent call last):\n' f' | File "{__file__}", line {self.callable_line}, in get_exception\n' f' | exception_or_callable()\n' + f' | ~~~~~~~~~~~~~~~~~~~~~^^\n' f' | File "{__file__}", line {exc.__code__.co_firstlineno + 1}, in exc\n' f' | raise ExceptionGroup("eg", [ValueError(1), TypeError(2)])\n' f' | ExceptionGroup: eg (2 sub-exceptions)\n' @@ -1977,6 +2334,7 @@ def exc(): f' + Exception Group Traceback (most recent call last):\n' f' | File "{__file__}", line {self.callable_line}, in get_exception\n' f' | exception_or_callable()\n' + f' | ~~~~~~~~~~~~~~~~~~~~~^^\n' f' | File "{__file__}", line {exc.__code__.co_firstlineno + 5}, in exc\n' f' | raise EG("eg2", [ValueError(3), TypeError(4)]) from e\n' f' | ExceptionGroup: eg2 (2 sub-exceptions)\n' @@ -2028,6 +2386,7 @@ def exc(): f'Traceback (most recent call last):\n' f' File "{__file__}", line {self.callable_line}, in get_exception\n' f' exception_or_callable()\n' + f' ~~~~~~~~~~~~~~~~~~~~~^^\n' f' File "{__file__}", line {exc.__code__.co_firstlineno + 8}, in exc\n' f' raise ImportError(5)\n' f'ImportError: 5\n') @@ -2074,6 +2433,7 @@ def exc(): f' + Exception Group Traceback (most recent call last):\n' f' | File "{__file__}", line {self.callable_line}, in get_exception\n' f' | exception_or_callable()\n' + f' | ~~~~~~~~~~~~~~~~~~~~~^^\n' f' | File "{__file__}", line {exc.__code__.co_firstlineno + 11}, in exc\n' f' | raise EG("top", [VE(5)])\n' f' | ExceptionGroup: top (1 sub-exception)\n' @@ -2233,6 +2593,7 @@ def exc(): expected = (f' + Exception Group Traceback (most recent call last):\n' f' | File "{__file__}", line {self.callable_line}, in get_exception\n' f' | exception_or_callable()\n' + f' | ~~~~~~~~~~~~~~~~~~~~~^^\n' f' | File "{__file__}", line {exc.__code__.co_firstlineno + 9}, in exc\n' f' | raise ExceptionGroup("nested", excs)\n' f' | ExceptionGroup: nested (2 sub-exceptions)\n' @@ -2284,6 +2645,7 @@ def exc(): expected = (f' + Exception Group Traceback (most recent call last):\n' f' | File "{__file__}", line {self.callable_line}, in get_exception\n' f' | exception_or_callable()\n' + f' | ~~~~~~~~~~~~~~~~~~~~~^^\n' f' | File "{__file__}", line {exc.__code__.co_firstlineno + 10}, in exc\n' f' | raise ExceptionGroup("nested", excs)\n' f' | ExceptionGroup: nested (2 sub-exceptions)\n' @@ -3108,6 +3470,7 @@ def test_exception_group_format(self): f' | Traceback (most recent call last):', f' | File "{__file__}", line {lno_g+9}, in _get_exception_group', f' | f()', + f' | ~^^', f' | File "{__file__}", line {lno_f+1}, in f', f' | 1/0', f' | ~^~', @@ -3116,6 +3479,7 @@ def test_exception_group_format(self): f' | Traceback (most recent call last):', f' | File "{__file__}", line {lno_g+13}, in _get_exception_group', f' | g(42)', + f' | ~^^^^', f' | File "{__file__}", line {lno_g+1}, in g', f' | raise ValueError(v)', f' | ValueError: 42', @@ -3124,6 +3488,7 @@ def test_exception_group_format(self): f' | Traceback (most recent call last):', f' | File "{__file__}", line {lno_g+20}, in _get_exception_group', f' | g(24)', + f' | ~^^^^', f' | File "{__file__}", line {lno_g+1}, in g', f' | raise ValueError(v)', f' | ValueError: 24', diff --git a/Lib/test/test_warnings/__init__.py b/Lib/test/test_warnings/__init__.py index 2c523230e7e97f..9076dc6135bb79 100644 --- a/Lib/test/test_warnings/__init__.py +++ b/Lib/test/test_warnings/__init__.py @@ -1235,8 +1235,8 @@ def test_conflicting_envvar_and_command_line(self): b" File \"\", line 1, in ", b' import sys, warnings; sys.stdout.write(str(sys.warnoptions)); warnings.w' b"arn('Message', DeprecationWarning)", - b' ^^^^^^^^^^' - b'^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^', + b' ~~~~~~~~~~' + b'~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^', b"DeprecationWarning: Message"]) def test_default_filter_configuration(self): diff --git a/Lib/traceback.py b/Lib/traceback.py index b25a7291f6be51..a86fc2f149ea74 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -273,7 +273,7 @@ class FrameSummary: """ __slots__ = ('filename', 'lineno', 'end_lineno', 'colno', 'end_colno', - 'name', '_line', 'locals') + 'name', '_line', '_line_dedented', 'locals') def __init__(self, filename, lineno, name, *, lookup_line=True, locals=None, line=None, @@ -289,8 +289,12 @@ def __init__(self, filename, lineno, name, *, lookup_line=True, """ self.filename = filename self.lineno = lineno + self.end_lineno = lineno if end_lineno is None else end_lineno + self.colno = colno + self.end_colno = end_colno self.name = name self._line = line + self._line_dedented = None if lookup_line: self.line self.locals = {k: _safe_string(v, 'local', func=repr) @@ -328,13 +332,27 @@ def _original_line(self): self.line return self._line + @property + def _dedented_lines(self): + # Returns _original_line, but dedented + self.line + if self._line_dedented is None: + if self._line is not None: + self._line_dedented = textwrap.dedent(self._line).rstrip() + return self._line_dedented + @property def line(self): if self._line is None: if self.lineno is None: return None - self._line = linecache.getline(self.filename, self.lineno) - return self._line.strip() + end_lineno = self.lineno if self.end_lineno is None else self.end_lineno + self._line = "" + for lineno in range(self.lineno, end_lineno + 1): + # treat errors and empty lines as the same + self._line += linecache.getline(self.filename, lineno).rstrip() + "\n" + # return only the first line + return self._line.partition("\n")[0].strip() def walk_stack(f): @@ -486,56 +504,138 @@ def format_frame_summary(self, frame_summary): filename = "" row.append(' File "{}", line {}, in {}\n'.format( filename, frame_summary.lineno, frame_summary.name)) - if frame_summary.line: - stripped_line = frame_summary.line.strip() - row.append(' {}\n'.format(stripped_line)) - - line = frame_summary._original_line - orig_line_len = len(line) - frame_line_len = len(frame_summary.line.lstrip()) - stripped_characters = orig_line_len - frame_line_len + if frame_summary._dedented_lines: if ( - frame_summary.colno is not None - and frame_summary.end_colno is not None + frame_summary.colno is None or + frame_summary.end_colno is None ): - start_offset = _byte_offset_to_character_offset( - line, frame_summary.colno) - end_offset = _byte_offset_to_character_offset( - line, frame_summary.end_colno) - code_segment = line[start_offset:end_offset] - - anchors = None - if frame_summary.lineno == frame_summary.end_lineno: - with suppress(Exception): - anchors = _extract_caret_anchors_from_line_segment(code_segment) - else: - # Don't count the newline since the anchors only need to - # go up until the last character of the line. - end_offset = len(line.rstrip()) - - # show indicators if primary char doesn't span the frame line - if end_offset - start_offset < len(stripped_line) or ( - anchors and anchors.right_start_offset - anchors.left_end_offset > 0): - # When showing this on a terminal, some of the non-ASCII characters - # might be rendered as double-width characters, so we need to take - # that into account when calculating the length of the line. - dp_start_offset = _display_width(line, start_offset) + 1 - dp_end_offset = _display_width(line, end_offset) + 1 - - row.append(' ') - row.append(' ' * (dp_start_offset - stripped_characters)) - - if anchors: - dp_left_end_offset = _display_width(code_segment, anchors.left_end_offset) - dp_right_start_offset = _display_width(code_segment, anchors.right_start_offset) - row.append(anchors.primary_char * dp_left_end_offset) - row.append(anchors.secondary_char * (dp_right_start_offset - dp_left_end_offset)) - row.append(anchors.primary_char * (dp_end_offset - dp_start_offset - dp_right_start_offset)) - else: - row.append('^' * (dp_end_offset - dp_start_offset)) + # only output first line if column information is missing + row.append(textwrap.indent(frame_summary.line, ' ') + "\n") + else: + # get first and last line + all_lines_original = frame_summary._original_line.splitlines() + first_line = all_lines_original[0] + last_line = all_lines_original[frame_summary.end_lineno - frame_summary.lineno] + + # character index of the start/end of the instruction + start_offset = _byte_offset_to_character_offset(first_line, frame_summary.colno) + end_offset = _byte_offset_to_character_offset(last_line, frame_summary.end_colno) + + all_lines = frame_summary._dedented_lines.splitlines()[ + :frame_summary.end_lineno - frame_summary.lineno + 1 + ] - row.append('\n') + # adjust start/end offset based on dedent + dedent_characters = len(first_line) - len(all_lines[0]) + start_offset -= dedent_characters + end_offset -= dedent_characters + start_offset = max(0, start_offset) + end_offset = max(0, end_offset) + + # When showing this on a terminal, some of the non-ASCII characters + # might be rendered as double-width characters, so we need to take + # that into account when calculating the length of the line. + dp_start_offset = _display_width(all_lines[0], offset=start_offset) + dp_end_offset = _display_width(all_lines[-1], offset=end_offset) + + # get exact code segment corresponding to the instruction + segment = "\n".join(all_lines) + segment = segment[start_offset:len(segment) - (len(all_lines[-1]) - end_offset)] + + # attempt to parse for anchors + anchors: Optional[_Anchors] = None + try: + anchors = _extract_caret_anchors_from_line_segment(segment) + except Exception: + pass + + # only use carets if there are anchors or the carets do not span all lines + show_carets = False + if anchors or all_lines[0][:start_offset].lstrip() or all_lines[-1][end_offset:].rstrip(): + show_carets = True + + result = [] + + # only display first line, last line, and lines around anchor start/end + significant_lines = {0, len(all_lines) - 1} + + anchors_left_end_offset = 0 + anchors_right_start_offset = 0 + primary_char = "^" + secondary_char = "^" + if anchors: + anchors_left_end_offset = anchors.left_end_offset + anchors_right_start_offset = anchors.right_start_offset + # computed anchor positions do not take start_offset into account, + # so account for it here + if anchors.left_end_lineno == 0: + anchors_left_end_offset += start_offset + if anchors.right_start_lineno == 0: + anchors_right_start_offset += start_offset + + # account for display width + anchors_left_end_offset = _display_width( + all_lines[anchors.left_end_lineno], offset=anchors_left_end_offset + ) + anchors_right_start_offset = _display_width( + all_lines[anchors.right_start_lineno], offset=anchors_right_start_offset + ) + + primary_char = anchors.primary_char + secondary_char = anchors.secondary_char + significant_lines.update( + range(anchors.left_end_lineno - 1, anchors.left_end_lineno + 2) + ) + significant_lines.update( + range(anchors.right_start_lineno - 1, anchors.right_start_lineno + 2) + ) + # remove bad line numbers + significant_lines.discard(-1) + significant_lines.discard(len(all_lines)) + + # output all_lines[lineno] along with carets + def output_line(lineno): + result.append(all_lines[lineno] + "\n") + if not show_carets: + return + num_spaces = len(all_lines[lineno]) - len(all_lines[lineno].lstrip()) + carets = [] + num_carets = dp_end_offset if lineno == len(all_lines) - 1 else _display_width(all_lines[lineno]) + # compute caret character for each position + for col in range(num_carets): + if col < num_spaces or (lineno == 0 and col < dp_start_offset): + # before first non-ws char of the line, or before start of instruction + carets.append(' ') + elif anchors and ( + lineno > anchors.left_end_lineno or + (lineno == anchors.left_end_lineno and col >= anchors_left_end_offset) + ) and ( + lineno < anchors.right_start_lineno or + (lineno == anchors.right_start_lineno and col < anchors_right_start_offset) + ): + # within anchors + carets.append(secondary_char) + else: + carets.append(primary_char) + result.append("".join(carets) + "\n") + + # display significant lines + sig_lines_list = sorted(significant_lines) + for i, lineno in enumerate(sig_lines_list): + if i: + linediff = lineno - sig_lines_list[i - 1] + if linediff == 2: + # 1 line in between - just output it + output_line(lineno - 1) + elif linediff > 2: + # > 1 line in between - abbreviate + result.append(f"...<{linediff - 1} lines>...\n") + output_line(lineno) + + row.append( + textwrap.indent(textwrap.dedent("".join(result)), ' ', lambda line: True) + ) if frame_summary.locals: for name, value in sorted(frame_summary.locals.items()): row.append(' {name} = {value}\n'.format(name=name, value=value)) @@ -598,7 +698,9 @@ def _byte_offset_to_character_offset(str, offset): _Anchors = collections.namedtuple( "_Anchors", [ + "left_end_lineno", "left_end_offset", + "right_start_lineno", "right_start_offset", "primary_char", "secondary_char", @@ -607,68 +709,154 @@ def _byte_offset_to_character_offset(str, offset): ) def _extract_caret_anchors_from_line_segment(segment): + """ + Given source code `segment` corresponding to a FrameSummary, determine: + - for binary ops, the location of the binary op + - for indexing and function calls, the location of the brackets. + `segment` is expected to be a valid Python expression. + """ import ast try: - tree = ast.parse(segment) + # Without parentheses, `segment` is parsed as a statement. + # Binary ops, subscripts, and calls are expressions, so + # we can wrap them with parentheses to parse them as + # (possibly multi-line) expressions. + tree = ast.parse("(\n" + segment + "\n)") except SyntaxError: return None if len(tree.body) != 1: return None - normalize = lambda offset: _byte_offset_to_character_offset(segment, offset) + lines = segment.splitlines() + + # get character index given byte offset + def normalize(lineno, offset): + return _byte_offset_to_character_offset(lines[lineno], offset) + + # Gets the next valid character index in `lines`, if + # the current location is not valid. Handles empty lines. + def next_valid_char(lineno, col): + while lineno < len(lines) and col >= len(lines[lineno]): + col = 0 + lineno += 1 + assert lineno < len(lines) and col < len(lines[lineno]) + return lineno, col + + # Get the next valid character index in `lines`. + def increment(lineno, col): + col += 1 + lineno, col = next_valid_char(lineno, col) + return lineno, col + + # Get the next valid character at least on the next line + def nextline(lineno, col): + col = 0 + lineno += 1 + lineno, col = next_valid_char(lineno, col) + return lineno, col + + # Get the next valid non-"\#" character that satisfies the `stop` predicate + def increment_until(lineno, col, stop): + while True: + ch = lines[lineno][col] + if ch in "\\#": + lineno, col = nextline(lineno, col) + elif not stop(ch): + lineno, col = increment(lineno, col) + else: + break + return lineno, col + + # Get the lineno/col position of the end of `expr`. If `force_valid` is True, + # forces the position to be a valid character (e.g. if the position is beyond the + # end of the line, move to the next line) + def setup_positions(expr, force_valid=True): + # -2 since end_lineno is 1-indexed and because we added an extra + # bracket + newline to `segment` when calling ast.parse + lineno = expr.end_lineno - 2 + col = normalize(lineno, expr.end_col_offset) + return next_valid_char(lineno, col) if force_valid else (lineno, col) + statement = tree.body[0] match statement: case ast.Expr(expr): match expr: case ast.BinOp(): - operator_start = normalize(expr.left.end_col_offset) - operator_end = normalize(expr.right.col_offset) - operator_str = segment[operator_start:operator_end] - operator_offset = len(operator_str) - len(operator_str.lstrip()) + # ast gives these locations for BinOp subexpressions + # ( left_expr ) + ( right_expr ) + # left^^^^^ right^^^^^ + lineno, col = setup_positions(expr.left) - left_anchor = expr.left.end_col_offset + operator_offset - right_anchor = left_anchor + 1 + # First operator character is the first non-space/')' character + lineno, col = increment_until(lineno, col, lambda x: not x.isspace() and x != ')') + + # binary op is 1 or 2 characters long, on the same line, + # before the right subexpression + right_col = col + 1 if ( - operator_offset + 1 < len(operator_str) - and not operator_str[operator_offset + 1].isspace() + right_col < len(lines[lineno]) + and ( + # operator char should not be in the right subexpression + expr.right.lineno - 2 > lineno or + right_col < normalize(expr.right.lineno - 2, expr.right.col_offset) + ) + and not (ch := lines[lineno][right_col]).isspace() + and ch not in "\\#" ): - right_anchor += 1 + right_col += 1 - while left_anchor < len(segment) and ((ch := segment[left_anchor]).isspace() or ch in ")#"): - left_anchor += 1 - right_anchor += 1 - return _Anchors(normalize(left_anchor), normalize(right_anchor)) + # right_col can be invalid since it is exclusive + return _Anchors(lineno, col, lineno, right_col) case ast.Subscript(): - left_anchor = normalize(expr.value.end_col_offset) - right_anchor = normalize(expr.slice.end_col_offset + 1) - while left_anchor < len(segment) and ((ch := segment[left_anchor]).isspace() or ch != "["): - left_anchor += 1 - while right_anchor < len(segment) and ((ch := segment[right_anchor]).isspace() or ch != "]"): - right_anchor += 1 - if right_anchor < len(segment): - right_anchor += 1 - return _Anchors(left_anchor, right_anchor) + # ast gives these locations for value and slice subexpressions + # ( value_expr ) [ slice_expr ] + # value^^^^^ slice^^^^^ + # subscript^^^^^^^^^^^^^^^^^^^^ + + # find left bracket + left_lineno, left_col = setup_positions(expr.value) + left_lineno, left_col = increment_until(left_lineno, left_col, lambda x: x == '[') + # find right bracket (final character of expression) + right_lineno, right_col = setup_positions(expr, force_valid=False) + return _Anchors(left_lineno, left_col, right_lineno, right_col) + case ast.Call(): + # ast gives these locations for function call expressions + # ( func_expr ) (args, kwargs) + # func^^^^^ + # call^^^^^^^^^^^^^^^^^^^^^^^^ + + # find left bracket + left_lineno, left_col = setup_positions(expr.func) + left_lineno, left_col = increment_until(left_lineno, left_col, lambda x: x == '(') + # find right bracket (final character of expression) + right_lineno, right_col = setup_positions(expr, force_valid=False) + return _Anchors(left_lineno, left_col, right_lineno, right_col) return None _WIDE_CHAR_SPECIFIERS = "WF" -def _display_width(line, offset): +def _display_width(line, offset=None): """Calculate the extra amount of width space the given source code segment might take if it were to be displayed on a fixed width output device. Supports wide unicode characters and emojis.""" # Fast track for ASCII-only strings if line.isascii(): + if offset is None: + return len(line) return offset import unicodedata + if offset is not None: + line = line[:offset] + return sum( 2 if unicodedata.east_asian_width(char) in _WIDE_CHAR_SPECIFIERS else 1 - for char in line[:offset] + for char in line ) From c1f8d0b9d76b887a654b290c5cf1d4171b509559 Mon Sep 17 00:00:00 2001 From: "blurb-it[bot]" <43283697+blurb-it[bot]@users.noreply.github.com> Date: Wed, 15 Nov 2023 01:36:05 +0000 Subject: [PATCH 2/5] =?UTF-8?q?=F0=9F=93=9C=F0=9F=A4=96=20Added=20by=20blu?= =?UTF-8?q?rb=5Fit.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../next/Library/2023-11-15-01-36-04.gh-issue-106922.qslOVH.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 Misc/NEWS.d/next/Library/2023-11-15-01-36-04.gh-issue-106922.qslOVH.rst diff --git a/Misc/NEWS.d/next/Library/2023-11-15-01-36-04.gh-issue-106922.qslOVH.rst b/Misc/NEWS.d/next/Library/2023-11-15-01-36-04.gh-issue-106922.qslOVH.rst new file mode 100644 index 00000000000000..a6dae826c45598 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-11-15-01-36-04.gh-issue-106922.qslOVH.rst @@ -0,0 +1 @@ +Display multiple lines with `traceback` when errors span multiple lines. From 315284422f2120e77509167bf2274c4f8d5cd87d Mon Sep 17 00:00:00 2001 From: William Wen Date: Fri, 17 Nov 2023 11:32:04 -0800 Subject: [PATCH 3/5] fix lint and doc test failures --- Doc/library/traceback.rst | 13 +++++++++---- .../2023-11-15-01-36-04.gh-issue-106922.qslOVH.rst | 2 +- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/Doc/library/traceback.rst b/Doc/library/traceback.rst index 408da7fc5f0645..8ca124058a3e99 100644 --- a/Doc/library/traceback.rst +++ b/Doc/library/traceback.rst @@ -515,27 +515,32 @@ The output for the example would look similar to this: *** print_tb: File "", line 10, in lumberjack() + ~~~~~~~~~~^^ *** print_exception: Traceback (most recent call last): File "", line 10, in lumberjack() + ~~~~~~~~~~^^ File "", line 4, in lumberjack bright_side_of_life() + ~~~~~~~~~~~~~~~~~~~^^ IndexError: tuple index out of range *** print_exc: Traceback (most recent call last): File "", line 10, in lumberjack() + ~~~~~~~~~~^^ File "", line 4, in lumberjack bright_side_of_life() + ~~~~~~~~~~~~~~~~~~~^^ IndexError: tuple index out of range *** format_exc, first and last line: Traceback (most recent call last): IndexError: tuple index out of range *** format_exception: ['Traceback (most recent call last):\n', - ' File "", line 10, in \n lumberjack()\n', - ' File "", line 4, in lumberjack\n bright_side_of_life()\n', + ' File "", line 10, in \n lumberjack()\n ~~~~~~~~~~^^\n', + ' File "", line 4, in lumberjack\n bright_side_of_life()\n ~~~~~~~~~~~~~~~~~~~^^\n', ' File "", line 7, in bright_side_of_life\n return tuple()[0]\n ~~~~~~~^^^\n', 'IndexError: tuple index out of range\n'] *** extract_tb: @@ -543,8 +548,8 @@ The output for the example would look similar to this: , line 4 in lumberjack>, , line 7 in bright_side_of_life>] *** format_tb: - [' File "", line 10, in \n lumberjack()\n', - ' File "", line 4, in lumberjack\n bright_side_of_life()\n', + [' File "", line 10, in \n lumberjack()\n ~~~~~~~~~~^^\n', + ' File "", line 4, in lumberjack\n bright_side_of_life()\n ~~~~~~~~~~~~~~~~~~~^^\n', ' File "", line 7, in bright_side_of_life\n return tuple()[0]\n ~~~~~~~^^^\n'] *** tb_lineno: 10 diff --git a/Misc/NEWS.d/next/Library/2023-11-15-01-36-04.gh-issue-106922.qslOVH.rst b/Misc/NEWS.d/next/Library/2023-11-15-01-36-04.gh-issue-106922.qslOVH.rst index a6dae826c45598..b68e75ab87cd0b 100644 --- a/Misc/NEWS.d/next/Library/2023-11-15-01-36-04.gh-issue-106922.qslOVH.rst +++ b/Misc/NEWS.d/next/Library/2023-11-15-01-36-04.gh-issue-106922.qslOVH.rst @@ -1 +1 @@ -Display multiple lines with `traceback` when errors span multiple lines. +Display multiple lines with ``traceback`` when errors span multiple lines. From 425c87452a5020a4aba5cba48cb372b9083da6cf Mon Sep 17 00:00:00 2001 From: William Wen Date: Tue, 21 Nov 2023 14:02:31 -0800 Subject: [PATCH 4/5] address pr comments --- Lib/test/test_traceback.py | 4 +- Lib/traceback.py | 99 +++++++++++++++++++------------------- 2 files changed, 51 insertions(+), 52 deletions(-) diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index 144e66a485bc46..defcaf4b7ff259 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -797,7 +797,7 @@ def f_with_binary_operator(): ' callable()\n' ' ~~~~~~~~^^\n' f' File "{__file__}", line {lineno_f+3}, in f_with_binary_operator\n' - ' a = b \\\n' + ' a = b \\\n' ' ~~~~~~\n' ' +\\\n' ' ^~\n' @@ -2914,7 +2914,7 @@ def test_basics(self): def test_lazy_lines(self): linecache.clearcache() f = traceback.FrameSummary("f", 1, "dummy", lookup_line=False) - self.assertEqual(None, f._line) + self.assertEqual(None, f._lines) linecache.lazycache("f", globals()) self.assertEqual( '"""Test cases for traceback module"""', diff --git a/Lib/traceback.py b/Lib/traceback.py index a86fc2f149ea74..c946b3f538a267 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -273,7 +273,7 @@ class FrameSummary: """ __slots__ = ('filename', 'lineno', 'end_lineno', 'colno', 'end_colno', - 'name', '_line', '_line_dedented', 'locals') + 'name', '_lines', '_lines_dedented', 'locals') def __init__(self, filename, lineno, name, *, lookup_line=True, locals=None, line=None, @@ -293,15 +293,12 @@ def __init__(self, filename, lineno, name, *, lookup_line=True, self.colno = colno self.end_colno = end_colno self.name = name - self._line = line - self._line_dedented = None + self._lines = line + self._lines_dedented = None if lookup_line: self.line self.locals = {k: _safe_string(v, 'local', func=repr) for k, v in locals.items()} if locals else None - self.end_lineno = end_lineno - self.colno = colno - self.end_colno = end_colno def __eq__(self, other): if isinstance(other, FrameSummary): @@ -326,33 +323,39 @@ def __repr__(self): def __len__(self): return 4 + def _set_lines(self): + if ( + self._lines is None + and self.lineno is not None + and self.end_lineno is not None + ): + lines = [] + for lineno in range(self.lineno, self.end_lineno + 1): + # treat errors (empty string) and empty lines (newline) as the same + lines.append(linecache.getline(self.filename, lineno).rstrip()) + self._lines = "\n".join(lines) + "\n" + @property - def _original_line(self): + def _original_lines(self): # Returns the line as-is from the source, without modifying whitespace. - self.line - return self._line + self._set_lines() + return self._lines @property def _dedented_lines(self): - # Returns _original_line, but dedented - self.line - if self._line_dedented is None: - if self._line is not None: - self._line_dedented = textwrap.dedent(self._line).rstrip() - return self._line_dedented + # Returns _original_lines, but dedented (and rstripped) + self._set_lines() + if self._lines_dedented is None and self._lines is not None: + self._lines_dedented = textwrap.dedent(self._lines).rstrip() + return self._lines_dedented @property def line(self): - if self._line is None: - if self.lineno is None: - return None - end_lineno = self.lineno if self.end_lineno is None else self.end_lineno - self._line = "" - for lineno in range(self.lineno, end_lineno + 1): - # treat errors and empty lines as the same - self._line += linecache.getline(self.filename, lineno).rstrip() + "\n" - # return only the first line - return self._line.partition("\n")[0].strip() + self._set_lines() + if self._lines is None: + return None + # return only the first line, stripped + return self._lines.partition("\n")[0].strip() def walk_stack(f): @@ -513,7 +516,7 @@ def format_frame_summary(self, frame_summary): row.append(textwrap.indent(frame_summary.line, ' ') + "\n") else: # get first and last line - all_lines_original = frame_summary._original_line.splitlines() + all_lines_original = frame_summary._original_lines.splitlines() first_line = all_lines_original[0] last_line = all_lines_original[frame_summary.end_lineno - frame_summary.lineno] @@ -527,10 +530,8 @@ def format_frame_summary(self, frame_summary): # adjust start/end offset based on dedent dedent_characters = len(first_line) - len(all_lines[0]) - start_offset -= dedent_characters - end_offset -= dedent_characters - start_offset = max(0, start_offset) - end_offset = max(0, end_offset) + start_offset = max(0, start_offset - dedent_characters) + end_offset = max(0, end_offset - dedent_characters) # When showing this on a terminal, some of the non-ASCII characters # might be rendered as double-width characters, so we need to take @@ -543,11 +544,9 @@ def format_frame_summary(self, frame_summary): segment = segment[start_offset:len(segment) - (len(all_lines[-1]) - end_offset)] # attempt to parse for anchors - anchors: Optional[_Anchors] = None - try: + anchors = None + with suppress(Exception): anchors = _extract_caret_anchors_from_line_segment(segment) - except Exception: - pass # only use carets if there are anchors or the carets do not span all lines show_carets = False @@ -594,8 +593,8 @@ def format_frame_summary(self, frame_summary): significant_lines.discard(-1) significant_lines.discard(len(all_lines)) - # output all_lines[lineno] along with carets def output_line(lineno): + """output all_lines[lineno] along with carets""" result.append(all_lines[lineno] + "\n") if not show_carets: return @@ -731,34 +730,35 @@ def _extract_caret_anchors_from_line_segment(segment): lines = segment.splitlines() - # get character index given byte offset def normalize(lineno, offset): + """Get character index given byte offset""" return _byte_offset_to_character_offset(lines[lineno], offset) - # Gets the next valid character index in `lines`, if - # the current location is not valid. Handles empty lines. def next_valid_char(lineno, col): + """Gets the next valid character index in `lines`, if + the current location is not valid. Handles empty lines. + """ while lineno < len(lines) and col >= len(lines[lineno]): col = 0 lineno += 1 assert lineno < len(lines) and col < len(lines[lineno]) return lineno, col - # Get the next valid character index in `lines`. def increment(lineno, col): + """Get the next valid character index in `lines`.""" col += 1 lineno, col = next_valid_char(lineno, col) return lineno, col - # Get the next valid character at least on the next line def nextline(lineno, col): + """Get the next valid character at least on the next line""" col = 0 lineno += 1 lineno, col = next_valid_char(lineno, col) return lineno, col - # Get the next valid non-"\#" character that satisfies the `stop` predicate def increment_until(lineno, col, stop): + """Get the next valid non-"\\#" character that satisfies the `stop` predicate""" while True: ch = lines[lineno][col] if ch in "\\#": @@ -769,10 +769,11 @@ def increment_until(lineno, col, stop): break return lineno, col - # Get the lineno/col position of the end of `expr`. If `force_valid` is True, - # forces the position to be a valid character (e.g. if the position is beyond the - # end of the line, move to the next line) def setup_positions(expr, force_valid=True): + """Get the lineno/col position of the end of `expr`. If `force_valid` is True, + forces the position to be a valid character (e.g. if the position is beyond the + end of the line, move to the next line) + """ # -2 since end_lineno is 1-indexed and because we added an extra # bracket + newline to `segment` when calling ast.parse lineno = expr.end_lineno - 2 @@ -843,20 +844,18 @@ def _display_width(line, offset=None): code segment might take if it were to be displayed on a fixed width output device. Supports wide unicode characters and emojis.""" + if offset is None: + offset = len(line) + # Fast track for ASCII-only strings if line.isascii(): - if offset is None: - return len(line) return offset import unicodedata - if offset is not None: - line = line[:offset] - return sum( 2 if unicodedata.east_asian_width(char) in _WIDE_CHAR_SPECIFIERS else 1 - for char in line + for char in line[:offset] ) From 99094bbd2ef2873a93023e985caebca1d6307a90 Mon Sep 17 00:00:00 2001 From: William Wen Date: Fri, 1 Dec 2023 11:24:09 -0800 Subject: [PATCH 5/5] add more comments, address some nits --- Lib/traceback.py | 25 +++++++++++++++++++++---- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/Lib/traceback.py b/Lib/traceback.py index 3ee3b614de2619..a0485a7023d07d 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -344,10 +344,10 @@ def _original_lines(self): @property def _dedented_lines(self): - # Returns _original_lines, but dedented (and rstripped) + # Returns _original_lines, but dedented self._set_lines() if self._lines_dedented is None and self._lines is not None: - self._lines_dedented = textwrap.dedent(self._lines).rstrip() + self._lines_dedented = textwrap.dedent(self._lines) return self._lines_dedented @property @@ -508,7 +508,7 @@ def format_frame_summary(self, frame_summary): filename = "" row.append(' File "{}", line {}, in {}\n'.format( filename, frame_summary.lineno, frame_summary.name)) - if frame_summary._dedented_lines: + if frame_summary._dedented_lines and frame_summary._dedented_lines.strip(): if ( frame_summary.colno is None or frame_summary.end_colno is None @@ -519,6 +519,7 @@ def format_frame_summary(self, frame_summary): # get first and last line all_lines_original = frame_summary._original_lines.splitlines() first_line = all_lines_original[0] + # assume all_lines_original has enough lines (since we constructed it) last_line = all_lines_original[frame_summary.end_lineno - frame_summary.lineno] # character index of the start/end of the instruction @@ -722,7 +723,23 @@ def _extract_caret_anchors_from_line_segment(segment): # Binary ops, subscripts, and calls are expressions, so # we can wrap them with parentheses to parse them as # (possibly multi-line) expressions. - tree = ast.parse("(\n" + segment + "\n)") + # e.g. if we try to highlight the addition in + # x = ( + # a + + # b + # ) + # then we would ast.parse + # a + + # b + # which is not a valid statement because of the newline. + # Adding brackets makes it a valid expression. + # ( + # a + + # b + # ) + # Line locations will be different than the original, + # which is taken into account later on. + tree = ast.parse(f"(\n{segment}\n)") except SyntaxError: return None pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy