diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000000..70971c53b5a --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,8 @@ +# When making commits that are strictly formatting/style changes, add the +# commit hash here, so git blame can ignore the change. +# See docs for more details: +# https://git-scm.com/docs/git-config#Documentation/git-config.txt-blameignoreRevsFile + +# Example entries: +# # initial black-format +# # rename something internal diff --git a/.gitignore b/.gitignore index e2f217b672b..1fc0e22a320 100644 --- a/.gitignore +++ b/.gitignore @@ -7,7 +7,6 @@ docs/source/api/generated docs/source/config/options docs/source/config/shortcuts/*.csv docs/source/interactive/magics-generated.txt -docs/source/config/shortcuts/*.csv docs/gh-pages jupyter_notebook/notebook/static/mathjax jupyter_notebook/static/style/*.map @@ -24,3 +23,8 @@ __pycache__ .cache .coverage *.swp +.vscode +.pytest_cache +.python-version +venv*/ +.idea/ diff --git a/.mailmap b/.mailmap index 4123a50f6d5..8d4757e6865 100644 --- a/.mailmap +++ b/.mailmap @@ -1,4 +1,5 @@ A. J. Holyoake ajholyoake +Alok Singh Alok Singh <8325708+alok@users.noreply.github.com> Aaron Culich Aaron Culich Aron Ahmadia ahmadia Benjamin Ragan-Kelley @@ -82,6 +83,10 @@ Julia Evans Julia Evans Kester Tong KesterTong Kyle Kelley Kyle Kelley Kyle Kelley rgbkrk +kd2718 +Kory Donati kory donati +Kory Donati Kory Donati +Kory Donati koryd Laurent Dufréchou Laurent Dufréchou Laurent Dufréchou laurent dufrechou <> @@ -89,6 +94,7 @@ Laurent Dufréchou laurent.dufrechou <> Laurent Dufréchou Laurent Dufrechou <> Laurent Dufréchou laurent.dufrechou@gmail.com <> Laurent Dufréchou ldufrechou +Luciana da Costa Marques luciana Lorena Pantano Lorena Luis Pedro Coelho Luis Pedro Coelho Marc Molla marcmolla @@ -97,6 +103,7 @@ Matthias Bussonnier Matthias BUSSONNIER Bussonnier Matthias Matthias Bussonnier Matthias BUSSONNIER Matthias Bussonnier Matthias Bussonnier +Matthias Bussonnier Matthias Bussonnier Michael Droettboom Michael Droettboom Nicholas Bollweg Nicholas Bollweg (Nick) Nicolas Rougier diff --git a/.meeseeksdev.yml b/.meeseeksdev.yml new file mode 100644 index 00000000000..b52022dde07 --- /dev/null +++ b/.meeseeksdev.yml @@ -0,0 +1,22 @@ +users: + LucianaMarques: + can: + - tag +special: + everyone: + can: + - say + - tag + - untag + - close + config: + tag: + only: + - good first issue + - async/await + - backported + - help wanted + - documentation + - notebook + - tab-completion + - windows diff --git a/.travis.yml b/.travis.yml index 724a99b2705..00c5e3f6bbc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,54 +1,118 @@ # http://travis-ci.org/#!/ipython/ipython language: python +os: linux + +addons: + apt: + packages: + - graphviz + python: - - "nightly" - 3.6 - - 3.5 - - 3.4 - - 3.3 + sudo: false + env: global: - PATH=$TRAVIS_BUILD_DIR/pandoc:$PATH + group: edge + before_install: - - 'if [[ $GROUP != js* ]]; then COVERAGE=""; fi' + - | + # install Python on macOS + if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then + env | sort + if ! which python$TRAVIS_PYTHON_VERSION; then + HOMEBREW_NO_AUTO_UPDATE=1 brew tap minrk/homebrew-python-frameworks + HOMEBREW_NO_AUTO_UPDATE=1 brew cask install python-framework-${TRAVIS_PYTHON_VERSION/./} + fi + python3 -m pip install virtualenv + python3 -m virtualenv -p $(which python$TRAVIS_PYTHON_VERSION) ~/travis-env + source ~/travis-env/bin/activate + fi + - python --version + install: - - pip install setuptools pip --upgrade - - pip install -e file://$PWD#egg=ipython[test] --upgrade - - pip install codecov --upgrade - - sudo apt-get install graphviz + - pip install pip --upgrade + - pip install setuptools --upgrade + - pip install -e file://$PWD#egg=ipython[test] --upgrade + - pip install trio curio --upgrade --upgrade-strategy eager + - pip install pytest 'matplotlib !=3.2.0' mypy + - pip install codecov check-manifest --upgrade + script: - - cd /tmp && iptest --coverage xml && cd - - # On the latest Python only, make sure that the docs build. - - | - if [[ "$TRAVIS_PYTHON_VERSION" == "3.6" ]]; then - pip install -r docs/requirements.txt - make -C docs/ html SPHINXOPTS="-W" - fi + - check-manifest + - | + if [[ "$TRAVIS_PYTHON_VERSION" == "nightly" ]]; then + # on nightly fake parso known the grammar + cp /home/travis/virtualenv/python3.9-dev/lib/python3.9/site-packages/parso/python/grammar38.txt /home/travis/virtualenv/python3.9-dev/lib/python3.9/site-packages/parso/python/grammar39.txt + fi + - cd /tmp && iptest --coverage xml && cd - + - pytest IPython + - mypy --ignore-missing-imports -m IPython.terminal.ptutils + # On the latest Python (on Linux) only, make sure that the docs build. + - | + if [[ "$TRAVIS_PYTHON_VERSION" == "3.7" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then + pip install -r docs/requirements.txt + python tools/fixup_whats_new_pr.py + make -C docs/ html SPHINXOPTS="-W" + fi + after_success: - - cp /tmp/ipy_coverage.xml ./ - - cp /tmp/.coverage ./ - - codecov + - cp /tmp/ipy_coverage.xml ./ + - cp /tmp/.coverage ./ + - codecov matrix: - allow_failures: - - python: nightly + include: + - arch: amd64 + python: "3.7" + dist: xenial + sudo: true + - arch: amd64 + python: "3.8-dev" + dist: xenial + sudo: true + - arch: amd64 + python: "3.7-dev" + dist: xenial + sudo: true + - arch: amd64 + python: "nightly" + dist: xenial + sudo: true + - arch: arm64 + python: "nightly" + dist: bionic + env: ARM64=True + sudo: true + - os: osx + language: generic + python: 3.6 + env: TRAVIS_PYTHON_VERSION=3.6 + - os: osx + language: generic + python: 3.7 + env: TRAVIS_PYTHON_VERSION=3.7 + allow_failures: + - python: nightly before_deploy: - - rm -rf dist/ - - python setup.py sdist - - python setup.py bdist_wheel + - rm -rf dist/ + - python setup.py sdist + - python setup.py bdist_wheel deploy: - provider: releases - api_key: - secure: Y/Ae9tYs5aoBU8bDjN2YrwGG6tCbezj/h3Lcmtx8HQavSbBgXnhnZVRb2snOKD7auqnqjfT/7QMm4ZyKvaOEgyggGktKqEKYHC8KOZ7yp8I5/UMDtk6j9TnXpSqqBxPiud4MDV76SfRYEQiaDoG4tGGvSfPJ9KcNjKrNvSyyxns= - file: dist/* - file_glob: true - skip_cleanup: true - on: - repo: ipython/ipython - all_branches: true # Backports are released from e.g. 5.x branch - tags: true - python: 3.6 # Any version should work, but we only need one + provider: releases + api_key: + secure: Y/Ae9tYs5aoBU8bDjN2YrwGG6tCbezj/h3Lcmtx8HQavSbBgXnhnZVRb2snOKD7auqnqjfT/7QMm4ZyKvaOEgyggGktKqEKYHC8KOZ7yp8I5/UMDtk6j9TnXpSqqBxPiud4MDV76SfRYEQiaDoG4tGGvSfPJ9KcNjKrNvSyyxns= + file: dist/* + file_glob: true + skip_cleanup: true + on: + repo: ipython/ipython + all_branches: true # Backports are released from e.g. 5.x branch + tags: true + python: 3.6 # Any version should work, but we only need one + condition: $TRAVIS_OS_NAME = "linux" diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 752486042b3..3aecb233319 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,3 +1,32 @@ +## Triaging Issues + +On the IPython repository, we strive to trust users and give them responsibility. +By using one of our bots, any user can close issues or add/remove +labels by mentioning the bot and asking it to do things on your behalf. + +To close an issue (or PR), even if you did not create it, use the following: + +> @meeseeksdev close + +This command can be in the middle of another comment, but must start on its +own line. + +To add labels to an issue, ask the bot to `tag` with a comma-separated list of +tags to add: + +> @meeseeksdev tag windows, documentation + +Only already pre-created tags can be added. So far, the list is limited to: +`async/await`, `backported`, `help wanted`, `documentation`, `notebook`, +`tab-completion`, `windows` + +To remove a label, use the `untag` command: + +> @meeseeksdev untag windows, documentation + +We'll be adding additional capabilities for the bot and will share them here +when they are ready to be used. + ## Opening an Issue When opening a new Issue, please take the following steps: @@ -6,13 +35,13 @@ When opening a new Issue, please take the following steps: Keyword searches for your error messages are most helpful. 2. If possible, try updating to master and reproducing your issue, because we may have already fixed it. -3. Try to include a minimal reproducible test case +3. Try to include a minimal reproducible test case. 4. Include relevant system information. Start with the output of: python -c "import IPython; print(IPython.sys_info())" - And include any relevant package versions, depending on the issue, - such as matplotlib, numpy, Qt, Qt bindings (PyQt/PySide), tornado, web browser, etc. + And include any relevant package versions, depending on the issue, such as + matplotlib, numpy, Qt, Qt bindings (PyQt/PySide), tornado, web browser, etc. ## Pull Requests @@ -26,8 +55,8 @@ Some guidelines on contributing to IPython: The worst case is that the PR is closed. * Pull Requests should generally be made against master * Pull Requests should be tested, if feasible: - - bugfixes should include regression tests - - new behavior should at least get minimal exercise + - bugfixes should include regression tests. + - new behavior should at least get minimal exercise. * New features and backwards-incompatible changes should be documented by adding a new file to the [pr](docs/source/whatsnew/pr) directory, see [the README.md there](docs/source/whatsnew/pr/README.md) for details. @@ -43,3 +72,24 @@ particularly for PRs that affect `IPython.parallel` or Windows. For more detailed information, see our [GitHub Workflow](https://github.com/ipython/ipython/wiki/Dev:-GitHub-workflow). +## Running Tests + +All the tests can by running +```shell +iptest +``` + +All the tests for a single module (for example **test_alias**) can be run by using the fully qualified path to the module. +```shell +iptest IPython.core.tests.test_alias +``` + +Only a single test (for example **test_alias_lifecycle**) within a single file can be run by adding the specific test after a `:` at the end: +```shell +iptest IPython.core.tests.test_alias:test_alias_lifecycle +``` + +For convenience, the full path to a file can often be used instead of the module path on unix systems. For example we can run all the tests by using +```shell +iptest IPython/core/tests/test_alias.py +``` diff --git a/COPYING.rst b/COPYING.rst index 59674acdc8d..e5c79ef38f0 100644 --- a/COPYING.rst +++ b/COPYING.rst @@ -3,39 +3,8 @@ ============================= IPython is licensed under the terms of the Modified BSD License (also known as -New or Revised or 3-Clause BSD), as follows: +New or Revised or 3-Clause BSD). See the LICENSE file. -- Copyright (c) 2008-2014, IPython Development Team -- Copyright (c) 2001-2007, Fernando Perez -- Copyright (c) 2001, Janko Hauser -- Copyright (c) 2001, Nathaniel Gray - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -Redistributions of source code must retain the above copyright notice, this -list of conditions and the following disclaimer. - -Redistributions in binary form must reproduce the above copyright notice, this -list of conditions and the following disclaimer in the documentation and/or -other materials provided with the distribution. - -Neither the name of the IPython Development Team nor the names of its -contributors may be used to endorse or promote products derived from this -software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. About the IPython Development Team ---------------------------------- @@ -45,9 +14,7 @@ Fernando Perez began IPython in 2001 based on code from Janko Hauser the project lead. The IPython Development Team is the set of all contributors to the IPython -project. This includes all of the IPython subprojects. A full list with -details is kept in the documentation directory, in the file -``about/credits.txt``. +project. This includes all of the IPython subprojects. The core team that coordinates development on GitHub can be found here: https://github.com/ipython/. diff --git a/IPython/__init__.py b/IPython/__init__.py index d3a5d63550e..c17ec76a602 100644 --- a/IPython/__init__.py +++ b/IPython/__init__.py @@ -2,7 +2,7 @@ """ IPython: tools for interactive and parallel computing in Python. -http://ipython.org +https://ipython.org """ #----------------------------------------------------------------------------- # Copyright (c) 2008-2011, IPython Development Team. @@ -27,12 +27,13 @@ #----------------------------------------------------------------------------- # Don't forget to also update setup.py when this changes! -if sys.version_info < (3,3): +if sys.version_info < (3, 6): raise ImportError( """ -IPython 6.0+ does not support Python 2.6, 2.7, 3.0, 3.1, or 3.2. +IPython 7.10+ supports Python 3.6 and above. When using Python 2.7, please install IPython 5.x LTS Long Term Support version. -Beginning with IPython 6.0, Python 3.3 and above is required. +Python 3.3 and 3.4 were supported up to IPython 6.x. +Python 3.5 was supported with IPython 7.0 to 7.9. See IPython `README.rst` file for more information: @@ -64,6 +65,10 @@ __license__ = release.license __version__ = release.version version_info = release.version_info +# list of CVEs that should have been patched in this release. +# this is informational and should not be relied upon. +__patched_cves__ = {"CVE-2022-21699"} + def embed_kernel(module=None, local_ns=None, **kwargs): """Embed and start an IPython kernel in a given scope. @@ -75,7 +80,7 @@ def embed_kernel(module=None, local_ns=None, **kwargs): Parameters ---------- - module : ModuleType, optional + module : types.ModuleType, optional The module to load into IPython globals (default: caller) local_ns : dict, optional The namespace to load into IPython user namespace (default: caller) diff --git a/IPython/config.py b/IPython/config.py index cf2bacafad1..964f46f10ac 100644 --- a/IPython/config.py +++ b/IPython/config.py @@ -7,7 +7,7 @@ import sys from warnings import warn -from IPython.utils.shimmodule import ShimModule, ShimWarning +from .utils.shimmodule import ShimModule, ShimWarning warn("The `IPython.config` package has been deprecated since IPython 4.0. " "You should import from traitlets.config instead.", ShimWarning) diff --git a/IPython/conftest.py b/IPython/conftest.py new file mode 100644 index 00000000000..8b2af8c020a --- /dev/null +++ b/IPython/conftest.py @@ -0,0 +1,69 @@ +import types +import sys +import builtins +import os +import pytest +import pathlib +import shutil + +from .testing import tools + + +def get_ipython(): + from .terminal.interactiveshell import TerminalInteractiveShell + if TerminalInteractiveShell._instance: + return TerminalInteractiveShell.instance() + + config = tools.default_config() + config.TerminalInteractiveShell.simple_prompt = True + + # Create and initialize our test-friendly IPython instance. + shell = TerminalInteractiveShell.instance(config=config) + return shell + + +@pytest.fixture(scope='session', autouse=True) +def work_path(): + path = pathlib.Path("./tmp-ipython-pytest-profiledir") + os.environ["IPYTHONDIR"] = str(path.absolute()) + if path.exists(): + raise ValueError('IPython dir temporary path already exists ! Did previous test run exit successfully ?') + path.mkdir() + yield + shutil.rmtree(str(path.resolve())) + + +def nopage(strng, start=0, screen_lines=0, pager_cmd=None): + if isinstance(strng, dict): + strng = strng.get("text/plain", "") + print(strng) + + +def xsys(self, cmd): + """Replace the default system call with a capturing one for doctest. + """ + # We use getoutput, but we need to strip it because pexpect captures + # the trailing newline differently from commands.getoutput + print(self.getoutput(cmd, split=False, depth=1).rstrip(), end="", file=sys.stdout) + sys.stdout.flush() + + +# for things to work correctly we would need this as a session fixture; +# unfortunately this will fail on some test that get executed as _collection_ +# time (before the fixture run), in particular parametrized test that contain +# yields. so for now execute at import time. +#@pytest.fixture(autouse=True, scope='session') +def inject(): + + builtins.get_ipython = get_ipython + builtins._ip = get_ipython() + builtins.ip = get_ipython() + builtins.ip.system = types.MethodType(xsys, ip) + builtins.ip.builtin_trap.activate() + from .core import page + + page.pager_page = nopage + # yield + + +inject() diff --git a/IPython/consoleapp.py b/IPython/consoleapp.py index 14903bdc74c..c2bbe1888f5 100644 --- a/IPython/consoleapp.py +++ b/IPython/consoleapp.py @@ -6,7 +6,7 @@ from warnings import warn -warn("The `IPython.consoleapp` package has been deprecated. " - "You should import from jupyter_client.consoleapp instead.") +warn("The `IPython.consoleapp` package has been deprecated since IPython 4.0." + "You should import from jupyter_client.consoleapp instead.", stacklevel=2) from jupyter_client.consoleapp import * diff --git a/IPython/core/alias.py b/IPython/core/alias.py index ee377d5ccf3..2ad990231a0 100644 --- a/IPython/core/alias.py +++ b/IPython/core/alias.py @@ -25,7 +25,7 @@ import sys from traitlets.config.configurable import Configurable -from IPython.core.error import UsageError +from .error import UsageError from traitlets import List, Instance from logging import error @@ -204,6 +204,8 @@ def __init__(self, shell=None, **kwargs): def init_aliases(self): # Load default & user aliases for name, cmd in self.default_aliases + self.user_aliases: + if cmd.startswith('ls ') and self.shell.colors == 'NoColor': + cmd = cmd.replace(' --color', '') self.soft_define_alias(name, cmd) @property diff --git a/IPython/core/application.py b/IPython/core/application.py index fea2b50f561..4f679df18e3 100644 --- a/IPython/core/application.py +++ b/IPython/core/application.py @@ -133,7 +133,7 @@ def _config_file_name_changed(self, change): config_file_paths = List(Unicode()) @default('config_file_paths') def _config_file_paths_default(self): - return [os.getcwd()] + return [] extra_config_file = Unicode( help="""Path to an extra config file to load. @@ -293,7 +293,7 @@ def load_config_file(self, suppress_errors=IPYTHON_SUPPRESS_CONFIG_ERRORS): printed on screen. For testing, the suppress_errors option is set to False, so errors will make tests fail. - `supress_errors` default value is to be `None` in which case the + `suppress_errors` default value is to be `None` in which case the behavior default to the one of `traitlets.Application`. The default value can be set : diff --git a/IPython/core/async_helpers.py b/IPython/core/async_helpers.py new file mode 100644 index 00000000000..fb4cc193250 --- /dev/null +++ b/IPython/core/async_helpers.py @@ -0,0 +1,173 @@ +""" +Async helper function that are invalid syntax on Python 3.5 and below. + +This code is best effort, and may have edge cases not behaving as expected. In +particular it contain a number of heuristics to detect whether code is +effectively async and need to run in an event loop or not. + +Some constructs (like top-level `return`, or `yield`) are taken care of +explicitly to actually raise a SyntaxError and stay as close as possible to +Python semantics. +""" + + +import ast +import sys +import inspect +from textwrap import dedent, indent + + +class _AsyncIORunner: + + def __call__(self, coro): + """ + Handler for asyncio autoawait + """ + import asyncio + + return asyncio.get_event_loop().run_until_complete(coro) + + def __str__(self): + return 'asyncio' + +_asyncio_runner = _AsyncIORunner() + + +def _curio_runner(coroutine): + """ + handler for curio autoawait + """ + import curio + + return curio.run(coroutine) + + +def _trio_runner(async_fn): + import trio + + async def loc(coro): + """ + We need the dummy no-op async def to protect from + trio's internal. See https://github.com/python-trio/trio/issues/89 + """ + return await coro + + return trio.run(loc, async_fn) + + +def _pseudo_sync_runner(coro): + """ + A runner that does not really allow async execution, and just advance the coroutine. + + See discussion in https://github.com/python-trio/trio/issues/608, + + Credit to Nathaniel Smith + + """ + try: + coro.send(None) + except StopIteration as exc: + return exc.value + else: + # TODO: do not raise but return an execution result with the right info. + raise RuntimeError( + "{coro_name!r} needs a real async loop".format(coro_name=coro.__name__) + ) + + +def _asyncify(code: str) -> str: + """wrap code in async def definition. + + And setup a bit of context to run it later. + """ + res = dedent( + """ + async def __wrapper__(): + try: + {usercode} + finally: + locals() + """ + ).format(usercode=indent(code, " " * 8)) + return res + + +class _AsyncSyntaxErrorVisitor(ast.NodeVisitor): + """ + Find syntax errors that would be an error in an async repl, but because + the implementation involves wrapping the repl in an async function, it + is erroneously allowed (e.g. yield or return at the top level) + """ + def __init__(self): + if sys.version_info >= (3,8): + raise ValueError('DEPRECATED in Python 3.8+') + self.depth = 0 + super().__init__() + + def generic_visit(self, node): + func_types = (ast.FunctionDef, ast.AsyncFunctionDef) + invalid_types_by_depth = { + 0: (ast.Return, ast.Yield, ast.YieldFrom), + 1: (ast.Nonlocal,) + } + + should_traverse = self.depth < max(invalid_types_by_depth.keys()) + if isinstance(node, func_types) and should_traverse: + self.depth += 1 + super().generic_visit(node) + self.depth -= 1 + elif isinstance(node, invalid_types_by_depth[self.depth]): + raise SyntaxError() + else: + super().generic_visit(node) + + +def _async_parse_cell(cell: str) -> ast.AST: + """ + This is a compatibility shim for pre-3.7 when async outside of a function + is a syntax error at the parse stage. + + It will return an abstract syntax tree parsed as if async and await outside + of a function were not a syntax error. + """ + if sys.version_info < (3, 7): + # Prior to 3.7 you need to asyncify before parse + wrapped_parse_tree = ast.parse(_asyncify(cell)) + return wrapped_parse_tree.body[0].body[0] + else: + return ast.parse(cell) + + +def _should_be_async(cell: str) -> bool: + """Detect if a block of code need to be wrapped in an `async def` + + Attempt to parse the block of code, it it compile we're fine. + Otherwise we wrap if and try to compile. + + If it works, assume it should be async. Otherwise Return False. + + Not handled yet: If the block of code has a return statement as the top + level, it will be seen as async. This is a know limitation. + """ + if sys.version_info > (3, 8): + try: + code = compile(cell, "<>", "exec", flags=getattr(ast,'PyCF_ALLOW_TOP_LEVEL_AWAIT', 0x0)) + return inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE + except (SyntaxError, MemoryError): + return False + try: + # we can't limit ourself to ast.parse, as it __accepts__ to parse on + # 3.7+, but just does not _compile_ + code = compile(cell, "<>", "exec") + except (SyntaxError, MemoryError): + try: + parse_tree = _async_parse_cell(cell) + + # Raise a SyntaxError if there are top-level return or yields + v = _AsyncSyntaxErrorVisitor() + v.visit(parse_tree) + + except (SyntaxError, MemoryError): + return False + return True + return False diff --git a/IPython/core/compilerop.py b/IPython/core/compilerop.py index 3dc083c90a7..c4771af7303 100644 --- a/IPython/core/compilerop.py +++ b/IPython/core/compilerop.py @@ -35,12 +35,13 @@ import linecache import operator import time +from contextlib import contextmanager #----------------------------------------------------------------------------- # Constants #----------------------------------------------------------------------------- -# Roughtly equal to PyCF_MASK | PyCF_MASK_OBSOLETE as defined in pythonrun.h, +# Roughly equal to PyCF_MASK | PyCF_MASK_OBSOLETE as defined in pythonrun.h, # this is used as a bitmask to extract future-related code flags. PyCF_MASK = functools.reduce(operator.or_, (getattr(__future__, fname).compiler_flag @@ -52,7 +53,7 @@ def code_name(code, number=0): """ Compute a (probably) unique name for code for caching. - + This now expects code to be unicode. """ hash_digest = hashlib.sha1(code.encode("utf-8")).hexdigest() @@ -71,7 +72,7 @@ class CachingCompiler(codeop.Compile): def __init__(self): codeop.Compile.__init__(self) - + # This is ugly, but it must be done this way to allow multiple # simultaneous ipython instances to coexist. Since Python itself # directly accesses the data structures in the linecache module, and @@ -90,10 +91,11 @@ def __init__(self): # stdlib that call it outside our control go through our codepath # (otherwise we'd lose our tracebacks). linecache.checkcache = check_linecache_ipython - + + def ast_parse(self, source, filename='', symbol='exec'): """Parse code to an AST with the current compiler flags active. - + Arguments are exactly the same as ast.parse (in the standard library), and are passed to the built-in compile function.""" return compile(source, filename, symbol, self.flags | PyCF_ONLY_AST, 1) @@ -109,10 +111,10 @@ def compiler_flags(self): """Flags currently active in the compilation process. """ return self.flags - + def cache(self, code, number=0): """Make a name for a block of code, and cache the code. - + Parameters ---------- code : str @@ -120,7 +122,7 @@ def cache(self, code, number=0): number : int A number which forms part of the code's name. Used for the execution counter. - + Returns ------- The name of the cached code (as a string). Pass this as the filename @@ -133,6 +135,21 @@ def cache(self, code, number=0): linecache._ipython_cache[name] = entry return name + @contextmanager + def extra_flags(self, flags): + ## bits that we'll set to 1 + turn_on_bits = ~self.flags & flags + + + self.flags = self.flags | flags + try: + yield + finally: + # turn off only the bits we turned on so that something like + # __future__ that set flags stays. + self.flags &= ~turn_on_bits + + def check_linecache_ipython(*args): """Call linecache.checkcache() safely protecting our cached values. """ diff --git a/IPython/core/completer.py b/IPython/core/completer.py index cee7833f6f3..bc114f0f66b 100644 --- a/IPython/core/completer.py +++ b/IPython/core/completer.py @@ -67,9 +67,9 @@ Starting with IPython 6.0, this module can make use of the Jedi library to generate completions both using static analysis of the code, and dynamically -inspecting multiple namespaces. The APIs attached to this new mechanism is -unstable and will raise unless use in an :any:`provisionalcompleter` context -manager. +inspecting multiple namespaces. Jedi is an autocompletion and static analysis +for Python. The APIs attached to this new mechanism is unstable and will +raise unless use in an :any:`provisionalcompleter` context manager. You will find that the following are experimental: @@ -84,7 +84,7 @@ We welcome any feedback on these new API, and we also encourage you to try this module in debug mode (start IPython with ``--Completer.debug=True``) in order -to have extra logging information is :any:`jedi` is crashing, or if current +to have extra logging information if :any:`jedi` is crashing, or if current IPython completer pending deprecations are returning results not yet handled by :any:`jedi` @@ -126,12 +126,12 @@ from contextlib import contextmanager from importlib import import_module -from typing import Iterator, List, Tuple, Iterable, Union +from typing import Iterator, List, Tuple, Iterable from types import SimpleNamespace from traitlets.config.configurable import Configurable from IPython.core.error import TryNext -from IPython.core.inputsplitter import ESC_MAGIC +from IPython.core.inputtransformer2 import ESC_MAGIC from IPython.core.latex_symbols import latex_symbols, reverse_latex_symbol from IPython.core.oinspect import InspectColors from IPython.utils import generics @@ -185,11 +185,11 @@ def provisionalcompleter(action='ignore'): """ - This contest manager has to be used in any place where unstable completer + This context manager has to be used in any place where unstable completer behavior and API may be called. >>> with provisionalcompleter(): - ... completer.do_experimetal_things() # works + ... completer.do_experimental_things() # works >>> completer.do_experimental_things() # raises. @@ -198,12 +198,11 @@ def provisionalcompleter(action='ignore'): By using this context manager you agree that the API in use may change without warning, and that you won't complain if they do so. - You also understand that if the API is not to you liking you should report - a bug to explain your use case upstream and improve the API and will loose - credibility if you complain after the API is make stable. + You also understand that, if the API is not to your liking, you should report + a bug to explain your use case upstream. - We'll be happy to get your feedback , feature request and improvement on - any of the unstable APIs ! + We'll be happy to get your feedback, feature requests, and improvements on + any of the unstable APIs! """ with warnings.catch_warnings(): warnings.filterwarnings(action, category=ProvisionalCompleterWarning) @@ -350,7 +349,7 @@ class Completion: Completion object used and return by IPython completers. .. warning:: Unstable - + This function is unstable, API may change without warning. It will also raise unless use in proper context manager. @@ -578,7 +577,7 @@ class Completer(Configurable): use_jedi = Bool(default_value=JEDI_INSTALLED, help="Experimental: Use Jedi to generate autocompletions. " - "Default to True if jedi is installed").tag(config=True) + "Default to True if jedi is installed.").tag(config=True) jedi_compute_type_timeout = Int(default_value=400, help="""Experimental: restrict time (in milliseconds) during which Jedi can compute types. @@ -591,7 +590,7 @@ class Completer(Configurable): 'information for experimental jedi integration.')\ .tag(config=True) - backslash_combining_completions = Bool(True, + backslash_combining_completions = Bool(True, help="Enable unicode completions, e.g. \\alpha . " "Includes completion of latex commands, unicode names, and expanding " "unicode characters back to latex commands.").tag(config=True) @@ -627,6 +626,8 @@ def __init__(self, namespace=None, global_namespace=None, **kwargs): else: self.global_namespace = global_namespace + self.custom_matchers = [] + super(Completer, self).__init__(**kwargs) def complete(self, text, state): @@ -693,7 +694,7 @@ def attr_matches(self, text): # Another option, seems to work great. Catches things like ''. m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) - + if m: expr, attr = m.group(1, 3) elif self.greedy: @@ -703,7 +704,7 @@ def attr_matches(self, text): expr, attr = m2.group(1,2) else: return [] - + try: obj = eval(expr, self.namespace) except: @@ -738,7 +739,7 @@ def get__all__entries(obj): words = getattr(obj, '__all__') except: return [] - + return [w for w in words if isinstance(w, str)] @@ -887,14 +888,14 @@ def _safe_isinstance(obj, module, class_name): def back_unicode_name_matches(text): u"""Match unicode characters back to unicode name - + This does ``☃`` -> ``\\snowman`` Note that snowman is not a valid python3 combining character but will be expanded. Though it will not recombine back to the snowman character by the completion machinery. This will not either back-complete standard sequences like \\n, \\b ... - + Used on Python 3 only. """ if len(text)<2: @@ -917,7 +918,7 @@ def back_unicode_name_matches(text): def back_latex_name_matches(text:str): """Match latex characters back to unicode name - + This does ``\\ℵ`` -> ``\\aleph`` Used on Python 3 only. @@ -991,7 +992,9 @@ def _make_signature(completion)-> str: class IPCompleter(Completer): """Extension of the completer class with IPython-specific features""" - + + _names = None + @observe('greedy') def _greedy_changed(self, change): """update the splitter and readline delims when greedy is changed""" @@ -999,36 +1002,39 @@ def _greedy_changed(self, change): self.splitter.delims = GREEDY_DELIMS else: self.splitter.delims = DELIMS - + + dict_keys_only = Bool(False, + help="""Whether to show dict key matches only""") + merge_completions = Bool(True, help="""Whether to merge completion results into a single list - + If False, only the completion results from the first non-empty completer will be returned. """ ).tag(config=True) omit__names = Enum((0,1,2), default_value=2, help="""Instruct the completer to omit private method names - + Specifically, when completing on ``object.``. - + When 2 [default]: all names that start with '_' will be excluded. - + When 1: all 'magic' names (``__foo__``) will be excluded. - + When 0: nothing will be excluded. """ ).tag(config=True) limit_to__all__ = Bool(False, help=""" DEPRECATED as of version 5.0. - + Instruct the completer to use __all__ for the completion - + Specifically, when completing on ``object.``. - + When True: only those names in obj.__all__ will be included. - + When False [default]: the __all__ attribute is ignored """, ).tag(config=True) @@ -1061,7 +1067,7 @@ def __init__(self, shell=None, namespace=None, global_namespace=None, secondary optional dict for completions, to handle cases (such as IPython embedded inside functions) where both Python scopes are visible. - + use_readline : bool, optional DEPRECATED, ignored since IPython 6.0, will have no effects """ @@ -1113,14 +1119,19 @@ def __init__(self, shell=None, namespace=None, global_namespace=None, @property def matchers(self): """All active matcher routines for completion""" + if self.dict_keys_only: + return [self.dict_key_matches] + if self.use_jedi: return [ + *self.custom_matchers, self.file_matches, self.magic_matches, self.dict_key_matches, ] else: return [ + *self.custom_matchers, self.python_matches, self.file_matches, self.magic_matches, @@ -1128,10 +1139,15 @@ def matchers(self): self.dict_key_matches, ] - def all_completions(self, text): + def all_completions(self, text) -> List[str]: """ - Wrapper around the complete method for the benefit of emacs. + Wrapper around the completion methods for the benefit of emacs. """ + prefix = text.rpartition('.')[0] + with provisionalcompleter(): + return ['.'.join([prefix, c.text]) if prefix and self.use_jedi else c.text + for c in self.completions(text, len(text))] + return self.complete(text)[1] def _clean_glob(self, text): @@ -1359,18 +1375,18 @@ def _jedi_matches(self, cursor_column:int, cursor_line:int, text:str): try_jedi = True try: - # should we check the type of the node is Error ? + # find the first token in the current tree -- if it is a ' or " then we are in a string + completing_string = False try: - # jedi < 0.11 - from jedi.parser.tree import ErrorLeaf - except ImportError: - # jedi >= 0.11 - from parso.tree import ErrorLeaf + first_child = next(c for c in interpreter._get_module().tree_node.children if hasattr(c, 'value')) + except StopIteration: + pass + else: + # note the value may be ', ", or it may also be ''' or """, or + # in some cases, """what/you/typed..., but all of these are + # strings. + completing_string = len(first_child.value) > 0 and first_child.value[0] in {"'", '"'} - next_to_last_tree = interpreter._get_module().tree_node.children[-2] - completing_string = False - if isinstance(next_to_last_tree, ErrorLeaf): - completing_string = next_to_last_tree.value.lstrip()[0] in {'"', "'"} # if we are in a string jedi is likely not the right candidate for # now. Skip it. try_jedi = not completing_string @@ -1532,24 +1548,19 @@ def python_func_kw_matches(self,text): usedNamedArgs.add(token) - # lookup the candidate callable matches either using global_matches - # or attr_matches for dotted names - if len(ids) == 1: - callableMatches = self.global_matches(ids[0]) - else: - callableMatches = self.attr_matches('.'.join(ids[::-1])) argMatches = [] - for callableMatch in callableMatches: - try: - namedArgs = self._default_arguments(eval(callableMatch, - self.namespace)) - except: - continue + try: + callableObj = '.'.join(ids[::-1]) + namedArgs = self._default_arguments(eval(callableObj, + self.namespace)) # Remove used named arguments from the list, no need to show twice for namedArg in set(namedArgs) - usedNamedArgs: if namedArg.startswith(text): argMatches.append(u"%s=" %namedArg) + except: + pass + return argMatches def dict_key_matches(self, text): @@ -1592,7 +1603,7 @@ def get_keys(obj): $ ''' regexps = self.__dict_key_regexps = { - False: re.compile(dict_key_re_fmt % ''' + False: re.compile(dict_key_re_fmt % r''' # identifiers separated by . (?!\d)\w+ (?:\.(?!\d)\w+)* @@ -1621,7 +1632,7 @@ def get_keys(obj): closing_quote, token_offset, matches = match_dict_keys(keys, prefix, self.splitter.delims) if not matches: return matches - + # get the cursor position of # - the text being completed # - the start of the key text @@ -1632,13 +1643,13 @@ def get_keys(obj): completion_start = key_start + token_offset else: key_start = completion_start = match.end() - + # grab the leading prefix, to make sure all completions start with `text` if text_start > key_start: leading = '' else: leading = text[text_start:completion_start] - + # the index of the `[` character bracket_idx = match.end(1) @@ -1657,18 +1668,18 @@ def get_keys(obj): # brackets were opened inside text, maybe close them if not continuation.startswith(']'): suf += ']' - + return [leading + k + suf for k in matches] def unicode_name_matches(self, text): u"""Match Latex-like syntax for unicode characters base on the name of the character. - + This does ``\\GREEK SMALL LETTER ETA`` -> ``η`` Works only on valid python 3 identifier, or on combining characters that will combine to form a valid identifier. - + Used on Python 3 only. """ slashpos = text.rfind('\\') @@ -1686,10 +1697,8 @@ def unicode_name_matches(self, text): def latex_matches(self, text): u"""Match Latex syntax for unicode characters. - - This does both ``\\alp`` -> ``\\alpha`` and ``\\alpha`` -> ``α`` - Used on Python 3 only. + This does both ``\\alp`` -> ``\\alpha`` and ``\\alpha`` -> ``α`` """ slashpos = text.rfind('\\') if slashpos > -1: @@ -1702,7 +1711,8 @@ def latex_matches(self, text): # If a user has partially typed a latex symbol, give them # a full list of options \al -> [\aleph, \alpha] matches = [k for k in latex_symbols if k.startswith(s)] - return s, matches + if matches: + return s, matches return u'', [] def dispatch_custom_completer(self, text): @@ -1758,13 +1768,13 @@ def completions(self, text: str, offset: int)->Iterator[Completion]: Returns an iterator over the possible completions .. warning:: Unstable - + This function is unstable, API may change without warning. It will also raise unless use in proper context manager. Parameters ---------- - + text:str Full text of the current input, multi line string. offset:int @@ -1793,7 +1803,7 @@ def completions(self, text: str, offset: int)->Iterator[Completion]: and usual IPython completion. .. note:: - + Completions are not completely deduplicated yet. If identical completions are coming from different sources this function does not ensure that each completion object will only be present once. @@ -1972,8 +1982,8 @@ def _complete(self, *, cursor_line, cursor_pos, line_buffer=None, text=None, # if text is either None or an empty string, rely on the line buffer if (not line_buffer) and full_text: line_buffer = full_text.split('\n')[cursor_line] - if not text: - text = self.splitter.split_line(line_buffer, cursor_pos) + if not text: # issue #11508: check line_buffer before calling split_line + text = self.splitter.split_line(line_buffer, cursor_pos) if line_buffer else '' if self.backslash_combining_completions: # allow deactivation of these on windows. @@ -1983,12 +1993,13 @@ def _complete(self, *, cursor_line, cursor_pos, line_buffer=None, text=None, return latex_text, latex_matches, ['latex_matches']*len(latex_matches), () name_text = '' name_matches = [] - for meth in (self.unicode_name_matches, back_latex_name_matches, back_unicode_name_matches): + # need to add self.fwd_unicode_match() function here when done + for meth in (self.unicode_name_matches, back_latex_name_matches, back_unicode_name_matches, self.fwd_unicode_match): name_text, name_matches = meth(base_text) if name_text: return name_text, name_matches[:MATCHES_LIMIT], \ [meth.__qualname__]*min(len(name_matches), MATCHES_LIMIT), () - + # If no line buffer is given, assume the input text is all there was if line_buffer is None: @@ -2006,7 +2017,7 @@ def _complete(self, *, cursor_line, cursor_pos, line_buffer=None, text=None, # Start with a clean slate of completions matches = [] - custom_res = self.dispatch_custom_completer(text) + # FIXME: we should extend our api to return a dict with completions for # different types of objects. The rlcomplete() method could then # simply collapse the dict into a list for readline, but we'd have @@ -2017,29 +2028,24 @@ def _complete(self, *, cursor_line, cursor_pos, line_buffer=None, text=None, full_text = line_buffer completions = self._jedi_matches( cursor_pos, cursor_line, full_text) - if custom_res is not None: - # did custom completers produce something? - matches = [(m, 'custom') for m in custom_res] + + if self.merge_completions: + matches = [] + for matcher in self.matchers: + try: + matches.extend([(m, matcher.__qualname__) + for m in matcher(text)]) + except: + # Show the ugly traceback if the matcher causes an + # exception, but do NOT crash the kernel! + sys.excepthook(*sys.exc_info()) else: - # Extend the list of completions with the results of each - # matcher, so we return results to the user from all - # namespaces. - if self.merge_completions: - matches = [] - for matcher in self.matchers: - try: - matches.extend([(m, matcher.__qualname__) - for m in matcher(text)]) - except: - # Show the ugly traceback if the matcher causes an - # exception, but do NOT crash the kernel! - sys.excepthook(*sys.exc_info()) - else: - for matcher in self.matchers: - matches = [(m, matcher.__qualname__) - for m in matcher(text)] - if matches: - break + for matcher in self.matchers: + matches = [(m, matcher.__qualname__) + for m in matcher(text)] + if matches: + break + seen = set() filtered_matches = set() for m in matches: @@ -2048,13 +2054,39 @@ def _complete(self, *, cursor_line, cursor_pos, line_buffer=None, text=None, filtered_matches.add(m) seen.add(t) - _filtered_matches = sorted( - set(filtered_matches), key=lambda x: completions_sorting_key(x[0]))\ - [:MATCHES_LIMIT] + _filtered_matches = sorted(filtered_matches, key=lambda x: completions_sorting_key(x[0])) + custom_res = [(m, 'custom') for m in self.dispatch_custom_completer(text) or []] + + _filtered_matches = custom_res or _filtered_matches + + _filtered_matches = _filtered_matches[:MATCHES_LIMIT] _matches = [m[0] for m in _filtered_matches] origins = [m[1] for m in _filtered_matches] self.matches = _matches return text, _matches, origins, completions + + def fwd_unicode_match(self, text:str) -> Tuple[str, list]: + if self._names is None: + self._names = [] + for c in range(0,0x10FFFF + 1): + try: + self._names.append(unicodedata.name(chr(c))) + except ValueError: + pass + + slashpos = text.rfind('\\') + # if text starts with slash + if slashpos > -1: + s = text[slashpos+1:] + candidates = [x for x in self._names if x.startswith(s)] + if candidates: + return s, candidates + else: + return '', () + + # if text does not start with slash + else: + return u'', () diff --git a/IPython/core/completerlib.py b/IPython/core/completerlib.py index 3c66d73be45..7860cb67dcb 100644 --- a/IPython/core/completerlib.py +++ b/IPython/core/completerlib.py @@ -30,9 +30,9 @@ from zipimport import zipimporter # Our own imports -from IPython.core.completer import expand_user, compress_user -from IPython.core.error import TryNext -from IPython.utils._process_common import arg_split +from .completer import expand_user, compress_user +from .error import TryNext +from ..utils._process_common import arg_split # FIXME: this should be pulled in with the right call via the component system from IPython import get_ipython @@ -52,7 +52,7 @@ TIMEOUT_GIVEUP = 20 # Regular expression for the python import statement -import_re = re.compile(r'(?P[a-zA-Z_][a-zA-Z0-9_]*?)' +import_re = re.compile(r'(?P[^\W\d]\w*?)' r'(?P[/\\]__init__)?' r'(?P%s)$' % r'|'.join(re.escape(s) for s in _suffixes)) @@ -165,7 +165,7 @@ def try_import(mod: str, only_modules=False) -> List[str]: except: return [] - m_is_init = hasattr(m, '__file__') and '__init__' in m.__file__ + m_is_init = '__init__' in (getattr(m, '__file__', '') or '') completions = [] if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init: @@ -185,7 +185,7 @@ def try_import(mod: str, only_modules=False) -> List[str]: #----------------------------------------------------------------------------- def quick_completer(cmd, completions): - """ Easily create a trivial completer for a command. + r""" Easily create a trivial completer for a command. Takes either a list of completions, or all completions in string (that will be split on whitespace). diff --git a/IPython/core/crashhandler.py b/IPython/core/crashhandler.py index f3abc1c6fe0..1e0b429d09a 100644 --- a/IPython/core/crashhandler.py +++ b/IPython/core/crashhandler.py @@ -29,6 +29,8 @@ from IPython.utils.sysinfo import sys_info from IPython.utils.py3compat import input +from IPython.core.release import __version__ as version + #----------------------------------------------------------------------------- # Code #----------------------------------------------------------------------------- @@ -68,7 +70,7 @@ """ _lite_message_template = """ -If you suspect this is an IPython bug, please report it at: +If you suspect this is an IPython {version} bug, please report it at: https://github.com/ipython/ipython/issues or send an email to the mailing list at {email} @@ -179,13 +181,14 @@ def __call__(self, etype, evalue, etb): print('Could not create crash report on disk.', file=sys.stderr) return - # Inform user on stderr of what happened - print('\n'+'*'*70+'\n', file=sys.stderr) - print(self.message_template.format(**self.info), file=sys.stderr) + with report: + # Inform user on stderr of what happened + print('\n'+'*'*70+'\n', file=sys.stderr) + print(self.message_template.format(**self.info), file=sys.stderr) + + # Construct report on disk + report.write(self.make_report(traceback)) - # Construct report on disk - report.write(self.make_report(traceback)) - report.close() input("Hit to quit (your terminal may close):") def make_report(self,traceback): @@ -221,5 +224,5 @@ def crash_handler_lite(etype, evalue, tb): else: # we are not in a shell, show generic config config = "c." - print(_lite_message_template.format(email=author_email, config=config), file=sys.stderr) + print(_lite_message_template.format(email=author_email, config=config, version=version), file=sys.stderr) diff --git a/IPython/core/debugger.py b/IPython/core/debugger.py index 0e0d40111c6..a330baa450e 100644 --- a/IPython/core/debugger.py +++ b/IPython/core/debugger.py @@ -153,10 +153,7 @@ def __init__(self, colors=None): # at least raise that limit to 80 chars, which should be enough for # most interactive uses. try: - try: - from reprlib import aRepr # Py 3 - except ImportError: - from repr import aRepr # Py 2 + from reprlib import aRepr aRepr.maxstring = 80 except: # This is only a user-facing convenience, so any error we encounter @@ -176,7 +173,7 @@ def __call__(self): self.debugger.set_trace(sys._getframe().f_back) -RGX_EXTRA_INDENT = re.compile('(?<=\n)\s+') +RGX_EXTRA_INDENT = re.compile(r'(?<=\n)\s+') def strip_indentation(multiline_string): @@ -195,22 +192,6 @@ def wrapper(*args, **kw): return wrapper -def _file_lines(fname): - """Return the contents of a named file as a list of lines. - - This function never raises an IOError exception: if the file can't be - read, it simply returns an empty list.""" - - try: - outfile = open(fname) - except IOError: - return [] - else: - out = outfile.readlines() - outfile.close() - return out - - class Pdb(OldPdb): """Modified Pdb class, does not load readline. @@ -220,7 +201,19 @@ class Pdb(OldPdb): """ def __init__(self, color_scheme=None, completekey=None, - stdin=None, stdout=None, context=5): + stdin=None, stdout=None, context=5, **kwargs): + """Create a new IPython debugger. + + :param color_scheme: Deprecated, do not use. + :param completekey: Passed to pdb.Pdb. + :param stdin: Passed to pdb.Pdb. + :param stdout: Passed to pdb.Pdb. + :param context: Number of lines of source code context to show when + displaying stacktrace information. + :param kwargs: Passed to pdb.Pdb. + The possibilities are python version dependent, see the python + docs for more info. + """ # Parent constructor: try: @@ -230,7 +223,8 @@ def __init__(self, color_scheme=None, completekey=None, except (TypeError, ValueError): raise ValueError("Context must be a positive integer") - OldPdb.__init__(self, completekey, stdin, stdout) + # `kwargs` ensures full compatibility with stdlib's `pdb.Pdb`. + OldPdb.__init__(self, completekey, stdin, stdout, **kwargs) # IPython changes... self.shell = get_ipython() @@ -286,26 +280,31 @@ def __init__(self, color_scheme=None, completekey=None, # Set the prompt - the default prompt is '(Pdb)' self.prompt = prompt + self.skip_hidden = True def set_colors(self, scheme): """Shorthand access to the color table scheme selector method.""" self.color_scheme_table.set_active_scheme(scheme) self.parser.style = scheme + + def hidden_frames(self, stack): + """ + Given an index in the stack return wether it should be skipped. + + This is used in up/down and where to skip frames. + """ + ip_hide = [s[0].f_locals.get("__tracebackhide__", False) for s in stack] + ip_start = [i for i, s in enumerate(ip_hide) if s == "__ipython_bottom__"] + if ip_start: + ip_hide = [h if i > ip_start[0] else True for (i, h) in enumerate(ip_hide)] + return ip_hide + def interaction(self, frame, traceback): try: OldPdb.interaction(self, frame, traceback) except KeyboardInterrupt: - sys.stdout.write('\n' + self.shell.get_exception_only()) - - def new_do_up(self, arg): - OldPdb.do_up(self, arg) - do_u = do_up = decorate_fn_with_doc(new_do_up, OldPdb.do_up) - - def new_do_down(self, arg): - OldPdb.do_down(self, arg) - - do_d = do_down = decorate_fn_with_doc(new_do_down, OldPdb.do_down) + self.stdout.write("\n" + self.shell.get_exception_only()) def new_do_frame(self, arg): OldPdb.do_frame(self, arg) @@ -326,6 +325,8 @@ def new_do_restart(self, arg): return self.do_quit(arg) def print_stack_trace(self, context=None): + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal if context is None: context = self.context try: @@ -335,12 +336,25 @@ def print_stack_trace(self, context=None): except (TypeError, ValueError): raise ValueError("Context must be a positive integer") try: - for frame_lineno in self.stack: + skipped = 0 + for hidden, frame_lineno in zip(self.hidden_frames(self.stack), self.stack): + if hidden and self.skip_hidden: + skipped += 1 + continue + if skipped: + print( + f"{Colors.excName} [... skipping {skipped} hidden frame(s)]{ColorsNormal}\n" + ) + skipped = 0 self.print_stack_entry(frame_lineno, context=context) + if skipped: + print( + f"{Colors.excName} [... skipping {skipped} hidden frame(s)]{ColorsNormal}\n" + ) except KeyboardInterrupt: pass - def print_stack_entry(self,frame_lineno, prompt_prefix='\n-> ', + def print_stack_entry(self, frame_lineno, prompt_prefix='\n-> ', context=None): if context is None: context = self.context @@ -350,7 +364,7 @@ def print_stack_entry(self,frame_lineno, prompt_prefix='\n-> ', raise ValueError("Context must be a positive integer") except (TypeError, ValueError): raise ValueError("Context must be a positive integer") - print(self.format_stack_entry(frame_lineno, '', context)) + print(self.format_stack_entry(frame_lineno, '', context), file=self.stdout) # vds: >> frame, lineno = frame_lineno @@ -364,9 +378,9 @@ def format_stack_entry(self, frame_lineno, lprefix=': ', context=None): try: context=int(context) if context <= 0: - print("Context must be a positive integer") + print("Context must be a positive integer", file=self.stdout) except (TypeError, ValueError): - print("Context must be a positive integer") + print("Context must be a positive integer", file=self.stdout) try: import reprlib # Py 3 except ImportError: @@ -488,11 +502,21 @@ def print_list_lines(self, filename, first, last): src.append(line) self.lineno = lineno - print(''.join(src)) + print(''.join(src), file=self.stdout) except KeyboardInterrupt: pass + def do_skip_hidden(self, arg): + """ + Change whether or not we should skip frames with the + __tracebackhide__ attribute. + """ + if arg.strip().lower() in ("true", "yes"): + self.skip_hidden = True + elif arg.strip().lower() in ("false", "no"): + self.skip_hidden = False + def do_list(self, arg): """Print lines of code from the current stack frame """ @@ -511,7 +535,7 @@ def do_list(self, arg): else: first = max(1, int(x) - 5) except: - print('*** Error in argument:', repr(arg)) + print('*** Error in argument:', repr(arg), file=self.stdout) return elif self.lineno is None: first = max(1, self.curframe.f_lineno - 5) @@ -628,13 +652,148 @@ def do_where(self, arg): Take a number as argument as an (optional) number of context line to print""" if arg: - context = int(arg) + try: + context = int(arg) + except ValueError as err: + self.error(err) + return self.print_stack_trace(context) else: self.print_stack_trace() do_w = do_where + def stop_here(self, frame): + hidden = False + if self.skip_hidden: + hidden = frame.f_locals.get("__tracebackhide__", False) + if hidden: + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + print(f"{Colors.excName} [... skipped 1 hidden frame]{ColorsNormal}\n") + + return super().stop_here(frame) + + def do_up(self, arg): + """u(p) [count] + Move the current frame count (default one) levels up in the + stack trace (to an older frame). + + Will skip hidden frames. + """ + ## modified version of upstream that skips + # frames with __tracebackide__ + if self.curindex == 0: + self.error("Oldest frame") + return + try: + count = int(arg or 1) + except ValueError: + self.error("Invalid frame count (%s)" % arg) + return + skipped = 0 + if count < 0: + _newframe = 0 + else: + _newindex = self.curindex + counter = 0 + hidden_frames = self.hidden_frames(self.stack) + for i in range(self.curindex - 1, -1, -1): + frame = self.stack[i][0] + if hidden_frames[i] and self.skip_hidden: + skipped += 1 + continue + counter += 1 + if counter >= count: + break + else: + # if no break occured. + self.error("all frames above hidden") + return + + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + _newframe = i + self._select_frame(_newframe) + if skipped: + print( + f"{Colors.excName} [... skipped {skipped} hidden frame(s)]{ColorsNormal}\n" + ) + + def do_down(self, arg): + """d(own) [count] + Move the current frame count (default one) levels down in the + stack trace (to a newer frame). + + Will skip hidden frames. + """ + if self.curindex + 1 == len(self.stack): + self.error("Newest frame") + return + try: + count = int(arg or 1) + except ValueError: + self.error("Invalid frame count (%s)" % arg) + return + if count < 0: + _newframe = len(self.stack) - 1 + else: + _newindex = self.curindex + counter = 0 + skipped = 0 + hidden_frames = self.hidden_frames(self.stack) + for i in range(self.curindex + 1, len(self.stack)): + frame = self.stack[i][0] + if hidden_frames[i] and self.skip_hidden: + skipped += 1 + continue + counter += 1 + if counter >= count: + break + else: + self.error("all frames bellow hidden") + return + + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + if skipped: + print( + f"{Colors.excName} [... skipped {skipped} hidden frame(s)]{ColorsNormal}\n" + ) + _newframe = i + + self._select_frame(_newframe) + + do_d = do_down + do_u = do_up + +class InterruptiblePdb(Pdb): + """Version of debugger where KeyboardInterrupt exits the debugger altogether.""" + + def cmdloop(self): + """Wrap cmdloop() such that KeyboardInterrupt stops the debugger.""" + try: + return OldPdb.cmdloop(self) + except KeyboardInterrupt: + self.stop_here = lambda frame: False + self.do_quit("") + sys.settrace(None) + self.quitting = False + raise + + def _cmdloop(self): + while True: + try: + # keyboard interrupts allow for an easy way to cancel + # the current command, so allow them during interactive input + self.allow_kbdint = True + self.cmdloop() + self.allow_kbdint = False + break + except KeyboardInterrupt: + self.message('--KeyboardInterrupt--') + raise + def set_trace(frame=None): """ diff --git a/IPython/core/display.py b/IPython/core/display.py index 1b98dbb3a37..424414a662f 100644 --- a/IPython/core/display.py +++ b/IPython/core/display.py @@ -13,6 +13,8 @@ import sys import warnings from copy import deepcopy +from os.path import splitext +from pathlib import Path, PurePath from IPython.utils.py3compat import cast_unicode from IPython.testing.skipdoctest import skip_doctest @@ -238,16 +240,22 @@ def display(*objs, include=None, exclude=None, metadata=None, transient=None, di want to use. Here is a list of the names of the special methods and the values they must return: - - `_repr_html_`: return raw HTML as a string - - `_repr_json_`: return a JSONable dict - - `_repr_jpeg_`: return raw JPEG data - - `_repr_png_`: return raw PNG data - - `_repr_svg_`: return raw SVG data as a string - - `_repr_latex_`: return LaTeX commands in a string surrounded by "$". + - `_repr_html_`: return raw HTML as a string, or a tuple (see below). + - `_repr_json_`: return a JSONable dict, or a tuple (see below). + - `_repr_jpeg_`: return raw JPEG data, or a tuple (see below). + - `_repr_png_`: return raw PNG data, or a tuple (see below). + - `_repr_svg_`: return raw SVG data as a string, or a tuple (see below). + - `_repr_latex_`: return LaTeX commands in a string surrounded by "$", + or a tuple (see below). - `_repr_mimebundle_`: return a full mimebundle containing the mapping from all mimetypes to data. Use this for any mime-type not listed above. + The above functions may also return the object's metadata alonside the + data. If the metadata is available, the functions will return a tuple + containing the data and metadata, in that order. If there is no metadata + available, then the functions will return the data only. + When you are directly writing your own classes, you can adapt them for display in IPython by following the above approach. But in practice, you often need to work with existing classes that you can't easily modify. @@ -288,6 +296,13 @@ def display(*objs, include=None, exclude=None, metadata=None, transient=None, di if transient: kwargs['transient'] = transient + if not objs and display_id: + # if given no objects, but still a request for a display_id, + # we assume the user wants to insert an empty output that + # can be updated later + objs = [{}] + raw = True + if not raw: format = InteractiveShell.instance().display_formatter.format @@ -587,6 +602,9 @@ def __init__(self, data=None, url=None, filename=None, metadata=None): metadata : dict Dict of metadata associated to be the object when displayed """ + if isinstance(data, (Path, PurePath)): + data = str(data) + if data is not None and isinstance(data, str): if data.startswith('http') and url is None: url = data @@ -597,9 +615,12 @@ def __init__(self, data=None, url=None, filename=None, metadata=None): filename = data data = None - self.data = data self.url = url self.filename = filename + # because of @data.setter methods in + # subclasses ensure url and filename are set + # before assigning to self.data + self.data = data if metadata is not None: self.metadata = metadata @@ -634,23 +655,36 @@ def reload(self): with open(self.filename, self._read_flags) as f: self.data = f.read() elif self.url is not None: - try: - # Deferred import - from urllib.request import urlopen - response = urlopen(self.url) - self.data = response.read() - # extract encoding from header, if there is one: - encoding = None + # Deferred import + from urllib.request import urlopen + response = urlopen(self.url) + data = response.read() + # extract encoding from header, if there is one: + encoding = None + if 'content-type' in response.headers: for sub in response.headers['content-type'].split(';'): sub = sub.strip() if sub.startswith('charset'): encoding = sub.split('=')[-1].strip() break - # decode data, if an encoding was specified - if encoding: - self.data = self.data.decode(encoding, 'replace') - except: - self.data = None + if 'content-encoding' in response.headers: + # TODO: do deflate? + if 'gzip' in response.headers['content-encoding']: + import gzip + from io import BytesIO + with gzip.open(BytesIO(data), 'rt', encoding=encoding) as fp: + encoding = None + data = fp.read() + + # decode data, if an encoding was specified + # We only touch self.data once since + # subclasses such as SVG have @data.setter methods + # that transform self.data into ... well svg. + if encoding: + self.data = data.decode(encoding, 'replace') + else: + self.data = data + class TextDisplayObject(DisplayObject): """Validate that display data is text""" @@ -666,8 +700,25 @@ def _repr_pretty_(self, pp, cycle): class HTML(TextDisplayObject): + def __init__(self, data=None, url=None, filename=None, metadata=None): + def warn(): + if not data: + return False + + # + # Avoid calling lower() on the entire data, because it could be a + # long string and we're only interested in its beginning and end. + # + prefix = data[:10].lower() + suffix = data[-10:].lower() + return prefix.startswith("