diff --git a/CHANGELOG.md b/CHANGELOG.md index dfefc690195c..89394a75e278 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,49 @@ ## Next release +## Mypy 1.13 + +We’ve just uploaded mypy 1.13 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). +Mypy is a static type checker for Python. You can install it as follows: + + python3 -m pip install -U mypy + +You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io). + +Note that unlike typical releases, Mypy 1.13 does not have any changes to type checking semantics +from 1.12.1. + +### Improved performance + +Mypy 1.13 contains several performance improvements. Users can expect mypy to be 5-20% faster. +In environments with long search paths (such as environments using many editable installs), mypy +can be significantly faster, e.g. 2.2x faster in the use case targeted by these improvements. + +Mypy 1.13 allows use of the `orjson` library for handling the cache instead of the stdlib `json`, +for improved performance. You can ensure the presence of `orjson` using the `faster-cache` extra: + + python3 -m pip install -U mypy[faster-cache] + +Mypy may depend on `orjson` by default in the future. + +These improvements were contributed by Shantanu. + +List of changes: +* Significantly speed up file handling error paths (Shantanu, PR [17920](https://github.com/python/mypy/pull/17920)) +* Use fast path in modulefinder more often (Shantanu, PR [17950](https://github.com/python/mypy/pull/17950)) +* Let mypyc optimise os.path.join (Shantanu, PR [17949](https://github.com/python/mypy/pull/17949)) +* Make is_sub_path faster (Shantanu, PR [17962](https://github.com/python/mypy/pull/17962)) +* Speed up stubs suggestions (Shantanu, PR [17965](https://github.com/python/mypy/pull/17965)) +* Use sha1 for hashing (Shantanu, PR [17953](https://github.com/python/mypy/pull/17953)) +* Use orjson instead of json, when available (Shantanu, PR [17955](https://github.com/python/mypy/pull/17955)) +* Add faster-cache extra, test in CI (Shantanu, PR [17978](https://github.com/python/mypy/pull/17978)) + +### Acknowledgements +Thanks to all mypy contributors who contributed to this release: + +- Shantanu Jain +- Jukka Lehtosalo + ## Mypy 1.12 We’ve just uploaded mypy 1.12 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type @@ -261,6 +304,12 @@ This feature was contributed by Ivan Levkivskyi (PR [17457](https://github.com/p Please see [git log](https://github.com/python/typeshed/commits/main?after=91a58b07cdd807b1d965e04ba85af2adab8bf924+0&branch=main&path=stdlib) for full list of standard library typeshed stub changes. +### Mypy 1.12.1 + * Fix crash when showing partially analyzed type in error message (Ivan Levkivskyi, PR [17961](https://github.com/python/mypy/pull/17961)) + * Fix iteration over union (when self type is involved) (Shantanu, PR [17976](https://github.com/python/mypy/pull/17976)) + * Fix type object with type var default in union context (Jukka Lehtosalo, PR [17991](https://github.com/python/mypy/pull/17991)) + * Revert change to `os.path` stubs affecting use of `os.PathLike[Any]` (Shantanu, PR [17995](https://github.com/python/mypy/pull/17995)) + ### Acknowledgements Thanks to all mypy contributors who contributed to this release: diff --git a/misc/apply-cache-diff.py b/misc/apply-cache-diff.py index 29c55247de92..8ede9766bd06 100644 --- a/misc/apply-cache-diff.py +++ b/misc/apply-cache-diff.py @@ -8,13 +8,13 @@ from __future__ import annotations import argparse -import json import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from mypy.metastore import FilesystemMetadataStore, MetadataStore, SqliteMetadataStore +from mypy.util import json_dumps, json_loads def make_cache(input_dir: str, sqlite: bool) -> MetadataStore: @@ -26,10 +26,10 @@ def make_cache(input_dir: str, sqlite: bool) -> MetadataStore: def apply_diff(cache_dir: str, diff_file: str, sqlite: bool = False) -> None: cache = make_cache(cache_dir, sqlite) - with open(diff_file) as f: - diff = json.load(f) + with open(diff_file, "rb") as f: + diff = json_loads(f.read()) - old_deps = json.loads(cache.read("@deps.meta.json")) + old_deps = json_loads(cache.read("@deps.meta.json")) for file, data in diff.items(): if data is None: @@ -37,10 +37,10 @@ def apply_diff(cache_dir: str, diff_file: str, sqlite: bool = False) -> None: else: cache.write(file, data) if file.endswith(".meta.json") and "@deps" not in file: - meta = json.loads(data) + meta = json_loads(data) old_deps["snapshot"][meta["id"]] = meta["hash"] - cache.write("@deps.meta.json", json.dumps(old_deps)) + cache.write("@deps.meta.json", json_dumps(old_deps)) cache.commit() diff --git a/misc/diff-cache.py b/misc/diff-cache.py index 15d3e5a83983..8441caf81304 100644 --- a/misc/diff-cache.py +++ b/misc/diff-cache.py @@ -8,7 +8,6 @@ from __future__ import annotations import argparse -import json import os import sys from collections import defaultdict @@ -17,6 +16,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from mypy.metastore import FilesystemMetadataStore, MetadataStore, SqliteMetadataStore +from mypy.util import json_dumps, json_loads def make_cache(input_dir: str, sqlite: bool) -> MetadataStore: @@ -33,7 +33,7 @@ def merge_deps(all: dict[str, set[str]], new: dict[str, set[str]]) -> None: def load(cache: MetadataStore, s: str) -> Any: data = cache.read(s) - obj = json.loads(data) + obj = json_loads(data) if s.endswith(".meta.json"): # For meta files, zero out the mtimes and sort the # dependencies to avoid spurious conflicts @@ -73,7 +73,7 @@ def main() -> None: type_misses: dict[str, int] = defaultdict(int) type_hits: dict[str, int] = defaultdict(int) - updates: dict[str, str | None] = {} + updates: dict[str, bytes | None] = {} deps1: dict[str, set[str]] = {} deps2: dict[str, set[str]] = {} @@ -96,7 +96,7 @@ def main() -> None: # so we can produce a much smaller direct diff of them. if ".deps." not in s: if obj2 is not None: - updates[s] = json.dumps(obj2) + updates[s] = json_dumps(obj2) else: updates[s] = None elif obj2: @@ -122,7 +122,7 @@ def main() -> None: merge_deps(new_deps, root_deps) new_deps_json = {k: list(v) for k, v in new_deps.items() if v} - updates["@root.deps.json"] = json.dumps(new_deps_json) + updates["@root.deps.json"] = json_dumps(new_deps_json) # Drop updates to deps.meta.json for size reasons. The diff # applier will manually fix it up. @@ -136,8 +136,8 @@ def main() -> None: print("hits", type_hits) print("misses", type_misses) - with open(args.output, "w") as f: - json.dump(updates, f) + with open(args.output, "wb") as f: + f.write(json_dumps(updates)) if __name__ == "__main__": diff --git a/mypy/build.py b/mypy/build.py index 733f0685792e..ac6471d2383f 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -59,7 +59,7 @@ get_mypy_comments, hash_digest, is_stub_package_file, - is_sub_path, + is_sub_path_normabs, is_typeshed_file, module_prefix, read_py_file, @@ -92,9 +92,10 @@ from mypy.plugins.default import DefaultPlugin from mypy.renaming import LimitedVariableRenameVisitor, VariableRenameVisitor from mypy.stats import dump_type_stats -from mypy.stubinfo import legacy_bundled_packages, non_bundled_packages, stub_distribution_name +from mypy.stubinfo import is_module_from_legacy_bundled_package, stub_distribution_name from mypy.types import Type from mypy.typestate import reset_global_state, type_state +from mypy.util import json_dumps, json_loads from mypy.version import __version__ # Switch to True to produce debug output related to fine-grained incremental @@ -664,7 +665,7 @@ def __init__( for module in CORE_BUILTIN_MODULES: if options.use_builtins_fixtures: continue - path = self.find_module_cache.find_module(module) + path = self.find_module_cache.find_module(module, fast_path=True) if not isinstance(path, str): raise CompileError( [f"Failed to find builtin module {module}, perhaps typeshed is broken?"] @@ -736,8 +737,8 @@ def maybe_swap_for_shadow_path(self, path: str) -> str: shadow_file = self.shadow_equivalence_map.get(path) return shadow_file if shadow_file else path - def get_stat(self, path: str) -> os.stat_result: - return self.fscache.stat(self.maybe_swap_for_shadow_path(path)) + def get_stat(self, path: str) -> os.stat_result | None: + return self.fscache.stat_or_none(self.maybe_swap_for_shadow_path(path)) def getmtime(self, path: str) -> int: """Return a file's mtime; but 0 in bazel mode. @@ -858,7 +859,7 @@ def load_fine_grained_deps(self, id: str) -> dict[str, set[str]]: t0 = time.time() if id in self.fg_deps_meta: # TODO: Assert deps file wasn't changed. - deps = json.loads(self.metastore.read(self.fg_deps_meta[id]["path"])) + deps = json_loads(self.metastore.read(self.fg_deps_meta[id]["path"])) else: deps = {} val = {k: set(v) for k, v in deps.items()} @@ -911,8 +912,8 @@ def stats_summary(self) -> Mapping[str, object]: return self.stats -def deps_to_json(x: dict[str, set[str]]) -> str: - return json.dumps({k: list(v) for k, v in x.items()}, separators=(",", ":")) +def deps_to_json(x: dict[str, set[str]]) -> bytes: + return json_dumps({k: list(v) for k, v in x.items()}) # File for storing metadata about all the fine-grained dependency caches @@ -980,7 +981,7 @@ def write_deps_cache( meta = {"snapshot": meta_snapshot, "deps_meta": fg_deps_meta} - if not metastore.write(DEPS_META_FILE, json.dumps(meta, separators=(",", ":"))): + if not metastore.write(DEPS_META_FILE, json_dumps(meta)): manager.log(f"Error writing fine-grained deps meta JSON file {DEPS_META_FILE}") error = True @@ -1048,7 +1049,7 @@ def generate_deps_for_cache(manager: BuildManager, graph: Graph) -> dict[str, di def write_plugins_snapshot(manager: BuildManager) -> None: """Write snapshot of versions and hashes of currently active plugins.""" - snapshot = json.dumps(manager.plugins_snapshot, separators=(",", ":")) + snapshot = json_dumps(manager.plugins_snapshot) if not manager.metastore.write(PLUGIN_SNAPSHOT_FILE, snapshot): manager.errors.set_file(_cache_dir_prefix(manager.options), None, manager.options) manager.errors.report(0, 0, "Error writing plugins snapshot", blocker=True) @@ -1079,8 +1080,8 @@ def read_quickstart_file( # just ignore it. raw_quickstart: dict[str, Any] = {} try: - with open(options.quickstart_file) as f: - raw_quickstart = json.load(f) + with open(options.quickstart_file, "rb") as f: + raw_quickstart = json_loads(f.read()) quickstart = {} for file, (x, y, z) in raw_quickstart.items(): @@ -1148,10 +1149,10 @@ def _load_json_file( manager.add_stats(metastore_read_time=time.time() - t0) # Only bother to compute the log message if we are logging it, since it could be big if manager.verbosity() >= 2: - manager.trace(log_success + data.rstrip()) + manager.trace(log_success + data.rstrip().decode()) try: t1 = time.time() - result = json.loads(data) + result = json_loads(data) manager.add_stats(data_json_load_time=time.time() - t1) except json.JSONDecodeError: manager.errors.set_file(file, None, manager.options) @@ -1343,8 +1344,8 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> CacheMeta | No # So that plugins can return data with tuples in it without # things silently always invalidating modules, we round-trip # the config data. This isn't beautiful. - plugin_data = json.loads( - json.dumps(manager.plugin.report_config_data(ReportConfigContext(id, path, is_check=True))) + plugin_data = json_loads( + json_dumps(manager.plugin.report_config_data(ReportConfigContext(id, path, is_check=True))) ) if m.plugin_data != plugin_data: manager.log(f"Metadata abandoned for {id}: plugin configuration differs") @@ -1394,9 +1395,9 @@ def validate_meta( if bazel: # Normalize path under bazel to make sure it isn't absolute path = normpath(path, manager.options) - try: - st = manager.get_stat(path) - except OSError: + + st = manager.get_stat(path) + if st is None: return None if not stat.S_ISDIR(st.st_mode) and not stat.S_ISREG(st.st_mode): manager.log(f"Metadata abandoned for {id}: file or directory {path} does not exist") @@ -1478,10 +1479,7 @@ def validate_meta( "ignore_all": meta.ignore_all, "plugin_data": meta.plugin_data, } - if manager.options.debug_cache: - meta_str = json.dumps(meta_dict, indent=2, sort_keys=True) - else: - meta_str = json.dumps(meta_dict, separators=(",", ":")) + meta_bytes = json_dumps(meta_dict, manager.options.debug_cache) meta_json, _, _ = get_cache_names(id, path, manager.options) manager.log( "Updating mtime for {}: file {}, meta {}, mtime {}".format( @@ -1489,7 +1487,7 @@ def validate_meta( ) ) t1 = time.time() - manager.metastore.write(meta_json, meta_str) # Ignore errors, just an optimization. + manager.metastore.write(meta_json, meta_bytes) # Ignore errors, just an optimization. manager.add_stats(validate_update_time=time.time() - t1, validate_munging_time=t1 - t0) return meta @@ -1507,13 +1505,6 @@ def compute_hash(text: str) -> str: return hash_digest(text.encode("utf-8")) -def json_dumps(obj: Any, debug_cache: bool) -> str: - if debug_cache: - return json.dumps(obj, indent=2, sort_keys=True) - else: - return json.dumps(obj, sort_keys=True, separators=(",", ":")) - - def write_cache( id: str, path: str, @@ -1566,16 +1557,15 @@ def write_cache( # Serialize data and analyze interface data = tree.serialize() - data_str = json_dumps(data, manager.options.debug_cache) - interface_hash = compute_hash(data_str) + data_bytes = json_dumps(data, manager.options.debug_cache) + interface_hash = hash_digest(data_bytes) plugin_data = manager.plugin.report_config_data(ReportConfigContext(id, path, is_check=False)) # Obtain and set up metadata - try: - st = manager.get_stat(path) - except OSError as err: - manager.log(f"Cannot get stat for {path}: {err}") + st = manager.get_stat(path) + if st is None: + manager.log(f"Cannot get stat for {path}") # Remove apparently-invalid cache files. # (This is purely an optimization.) for filename in [data_json, meta_json]: @@ -1592,7 +1582,7 @@ def write_cache( manager.trace(f"Interface for {id} is unchanged") else: manager.trace(f"Interface for {id} has changed") - if not metastore.write(data_json, data_str): + if not metastore.write(data_json, data_bytes): # Most likely the error is the replace() call # (see https://github.com/python/mypy/issues/3215). manager.log(f"Error writing data JSON file {data_json}") @@ -2668,17 +2658,13 @@ def find_module_and_diagnose( ignore_missing_imports = options.ignore_missing_imports - id_components = id.split(".") # Don't honor a global (not per-module) ignore_missing_imports # setting for modules that used to have bundled stubs, as # otherwise updating mypy can silently result in new false # negatives. (Unless there are stubs but they are incomplete.) global_ignore_missing_imports = manager.options.ignore_missing_imports if ( - any( - ".".join(id_components[:i]) in legacy_bundled_packages - for i in range(len(id_components), 0, -1) - ) + is_module_from_legacy_bundled_package(id) and global_ignore_missing_imports and not options.ignore_missing_imports_per_module and result is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED @@ -2726,7 +2712,9 @@ def exist_added_packages(suppressed: list[str], manager: BuildManager, options: def find_module_simple(id: str, manager: BuildManager) -> str | None: """Find a filesystem path for module `id` or `None` if not found.""" - x = find_module_with_reason(id, manager) + t0 = time.time() + x = manager.find_module_cache.find_module(id, fast_path=True) + manager.add_stats(find_module_time=time.time() - t0, find_module_calls=1) if isinstance(x, ModuleNotFoundReason): return None return x @@ -2735,7 +2723,7 @@ def find_module_simple(id: str, manager: BuildManager) -> str | None: def find_module_with_reason(id: str, manager: BuildManager) -> ModuleSearchResult: """Find a filesystem path for module `id` or the reason it can't be found.""" t0 = time.time() - x = manager.find_module_cache.find_module(id) + x = manager.find_module_cache.find_module(id, fast_path=False) manager.add_stats(find_module_time=time.time() - t0, find_module_calls=1) return x @@ -2797,18 +2785,15 @@ def module_not_found( code = codes.IMPORT errors.report(line, 0, msg.format(module=target), code=code) - components = target.split(".") - for i in range(len(components), 0, -1): - module = ".".join(components[:i]) - if module in legacy_bundled_packages or module in non_bundled_packages: - break - + dist = stub_distribution_name(target) for note in notes: if "{stub_dist}" in note: - note = note.format(stub_dist=stub_distribution_name(module)) + assert dist is not None + note = note.format(stub_dist=dist) errors.report(line, 0, note, severity="note", only_once=True, code=code) if reason is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED: - manager.missing_stub_packages.add(stub_distribution_name(module)) + assert dist is not None + manager.missing_stub_packages.add(dist) errors.set_import_context(save_import_context) @@ -3543,10 +3528,9 @@ def is_silent_import_module(manager: BuildManager, path: str) -> bool: if manager.options.no_silence_site_packages: return False # Silence errors in site-package dirs and typeshed - return any( - is_sub_path(path, dir) - for dir in manager.search_paths.package_path + manager.search_paths.typeshed_path - ) + if any(is_sub_path_normabs(path, dir) for dir in manager.search_paths.package_path): + return True + return any(is_sub_path_normabs(path, dir) for dir in manager.search_paths.typeshed_path) def write_undocumented_ref_info( @@ -3567,4 +3551,4 @@ def write_undocumented_ref_info( assert not ref_info_file.startswith(".") deps_json = get_undocumented_ref_info_json(state.tree, type_map) - metastore.write(ref_info_file, json.dumps(deps_json, separators=(",", ":"))) + metastore.write(ref_info_file, json_dumps(deps_json)) diff --git a/mypy/fscache.py b/mypy/fscache.py index 15679ad03e85..8251f4bd9488 100644 --- a/mypy/fscache.py +++ b/mypy/fscache.py @@ -51,8 +51,8 @@ def set_package_root(self, package_root: list[str]) -> None: def flush(self) -> None: """Start another transaction and empty all caches.""" - self.stat_cache: dict[str, os.stat_result] = {} - self.stat_error_cache: dict[str, OSError] = {} + self.stat_or_none_cache: dict[str, os.stat_result | None] = {} + self.listdir_cache: dict[str, list[str]] = {} self.listdir_error_cache: dict[str, OSError] = {} self.isfile_case_cache: dict[str, bool] = {} @@ -62,24 +62,21 @@ def flush(self) -> None: self.hash_cache: dict[str, str] = {} self.fake_package_cache: set[str] = set() - def stat(self, path: str) -> os.stat_result: - if path in self.stat_cache: - return self.stat_cache[path] - if path in self.stat_error_cache: - raise copy_os_error(self.stat_error_cache[path]) + def stat_or_none(self, path: str) -> os.stat_result | None: + if path in self.stat_or_none_cache: + return self.stat_or_none_cache[path] + + st = None try: st = os.stat(path) - except OSError as err: + except OSError: if self.init_under_package_root(path): try: - return self._fake_init(path) + st = self._fake_init(path) except OSError: pass - # Take a copy to get rid of associated traceback and frame objects. - # Just assigning to __traceback__ doesn't free them. - self.stat_error_cache[path] = copy_os_error(err) - raise err - self.stat_cache[path] = st + + self.stat_or_none_cache[path] = st return st def init_under_package_root(self, path: str) -> bool: @@ -112,9 +109,9 @@ def init_under_package_root(self, path: str) -> bool: if not os.path.basename(dirname).isidentifier(): # Can't put an __init__.py in a place that's not an identifier return False - try: - st = self.stat(dirname) - except OSError: + + st = self.stat_or_none(dirname) + if st is None: return False else: if not stat.S_ISDIR(st.st_mode): @@ -145,7 +142,7 @@ def _fake_init(self, path: str) -> os.stat_result: assert basename == "__init__.py", path assert not os.path.exists(path), path # Not cached! dirname = os.path.normpath(dirname) - st = self.stat(dirname) # May raise OSError + st = os.stat(dirname) # May raise OSError # Get stat result as a list so we can modify it. seq: list[float] = list(st) seq[stat.ST_MODE] = stat.S_IFREG | 0o444 @@ -153,7 +150,6 @@ def _fake_init(self, path: str) -> os.stat_result: seq[stat.ST_NLINK] = 1 seq[stat.ST_SIZE] = 0 st = os.stat_result(seq) - self.stat_cache[path] = st # Make listdir() and read() also pretend this file exists. self.fake_package_cache.add(dirname) return st @@ -181,9 +177,8 @@ def listdir(self, path: str) -> list[str]: return results def isfile(self, path: str) -> bool: - try: - st = self.stat(path) - except OSError: + st = self.stat_or_none(path) + if st is None: return False return stat.S_ISREG(st.st_mode) @@ -248,18 +243,14 @@ def exists_case(self, path: str, prefix: str) -> bool: return res def isdir(self, path: str) -> bool: - try: - st = self.stat(path) - except OSError: + st = self.stat_or_none(path) + if st is None: return False return stat.S_ISDIR(st.st_mode) def exists(self, path: str) -> bool: - try: - self.stat(path) - except FileNotFoundError: - return False - return True + st = self.stat_or_none(path) + return st is not None def read(self, path: str) -> bytes: if path in self.read_cache: @@ -269,7 +260,7 @@ def read(self, path: str) -> bytes: # Need to stat first so that the contents of file are from no # earlier instant than the mtime reported by self.stat(). - self.stat(path) + self.stat_or_none(path) dirname, basename = os.path.split(path) dirname = os.path.normpath(dirname) @@ -294,8 +285,10 @@ def hash_digest(self, path: str) -> str: return self.hash_cache[path] def samefile(self, f1: str, f2: str) -> bool: - s1 = self.stat(f1) - s2 = self.stat(f2) + s1 = self.stat_or_none(f1) + s2 = self.stat_or_none(f2) + if s1 is None or s2 is None: + return False return os.path.samestat(s1, s2) diff --git a/mypy/fswatcher.py b/mypy/fswatcher.py index a574a36a0cc5..97a62ca9f9f7 100644 --- a/mypy/fswatcher.py +++ b/mypy/fswatcher.py @@ -2,6 +2,7 @@ from __future__ import annotations +import os from typing import AbstractSet, Iterable, NamedTuple from mypy.fscache import FileSystemCache @@ -56,8 +57,7 @@ def remove_watched_paths(self, paths: Iterable[str]) -> None: del self._file_data[path] self._paths -= set(paths) - def _update(self, path: str) -> None: - st = self.fs.stat(path) + def _update(self, path: str, st: os.stat_result) -> None: hash_digest = self.fs.hash_digest(path) self._file_data[path] = FileData(st.st_mtime, st.st_size, hash_digest) @@ -65,9 +65,8 @@ def _find_changed(self, paths: Iterable[str]) -> AbstractSet[str]: changed = set() for path in paths: old = self._file_data[path] - try: - st = self.fs.stat(path) - except FileNotFoundError: + st = self.fs.stat_or_none(path) + if st is None: if old is not None: # File was deleted. changed.add(path) @@ -76,13 +75,13 @@ def _find_changed(self, paths: Iterable[str]) -> AbstractSet[str]: if old is None: # File is new. changed.add(path) - self._update(path) + self._update(path, st) # Round mtimes down, to match the mtimes we write to meta files elif st.st_size != old.st_size or int(st.st_mtime) != int(old.st_mtime): # Only look for changes if size or mtime has changed as an # optimization, since calculating hash is expensive. new_hash = self.fs.hash_digest(path) - self._update(path) + self._update(path, st) if st.st_size != old.st_size or new_hash != old.hash: # Changed file. changed.add(path) diff --git a/mypy/metastore.py b/mypy/metastore.py index 4caa7d7f0534..c9139045e3b1 100644 --- a/mypy/metastore.py +++ b/mypy/metastore.py @@ -33,14 +33,14 @@ def getmtime(self, name: str) -> float: """ @abstractmethod - def read(self, name: str) -> str: + def read(self, name: str) -> bytes: """Read the contents of a metadata entry. Raises FileNotFound if the entry does not exist. """ @abstractmethod - def write(self, name: str, data: str, mtime: float | None = None) -> bool: + def write(self, name: str, data: bytes, mtime: float | None = None) -> bool: """Write a metadata entry. If mtime is specified, set it as the mtime of the entry. Otherwise, @@ -86,16 +86,16 @@ def getmtime(self, name: str) -> float: return int(os.path.getmtime(os.path.join(self.cache_dir_prefix, name))) - def read(self, name: str) -> str: + def read(self, name: str) -> bytes: assert os.path.normpath(name) != os.path.abspath(name), "Don't use absolute paths!" if not self.cache_dir_prefix: raise FileNotFoundError() - with open(os.path.join(self.cache_dir_prefix, name)) as f: + with open(os.path.join(self.cache_dir_prefix, name), "rb") as f: return f.read() - def write(self, name: str, data: str, mtime: float | None = None) -> bool: + def write(self, name: str, data: bytes, mtime: float | None = None) -> bool: assert os.path.normpath(name) != os.path.abspath(name), "Don't use absolute paths!" if not self.cache_dir_prefix: @@ -105,7 +105,7 @@ def write(self, name: str, data: str, mtime: float | None = None) -> bool: tmp_filename = path + "." + random_string() try: os.makedirs(os.path.dirname(path), exist_ok=True) - with open(tmp_filename, "w") as f: + with open(tmp_filename, "wb") as f: f.write(data) os.replace(tmp_filename, path) if mtime is not None: @@ -135,15 +135,13 @@ def list_all(self) -> Iterable[str]: SCHEMA = """ -CREATE TABLE IF NOT EXISTS files ( +CREATE TABLE IF NOT EXISTS files2 ( path TEXT UNIQUE NOT NULL, mtime REAL, - data TEXT + data BLOB ); -CREATE INDEX IF NOT EXISTS path_idx on files(path); +CREATE INDEX IF NOT EXISTS path_idx on files2(path); """ -# No migrations yet -MIGRATIONS: list[str] = [] def connect_db(db_file: str) -> sqlite3.Connection: @@ -151,11 +149,6 @@ def connect_db(db_file: str) -> sqlite3.Connection: db = sqlite3.dbapi2.connect(db_file) db.executescript(SCHEMA) - for migr in MIGRATIONS: - try: - db.executescript(migr) - except sqlite3.OperationalError: - pass return db @@ -176,7 +169,7 @@ def _query(self, name: str, field: str) -> Any: if not self.db: raise FileNotFoundError() - cur = self.db.execute(f"SELECT {field} FROM files WHERE path = ?", (name,)) + cur = self.db.execute(f"SELECT {field} FROM files2 WHERE path = ?", (name,)) results = cur.fetchall() if not results: raise FileNotFoundError() @@ -188,12 +181,12 @@ def getmtime(self, name: str) -> float: assert isinstance(mtime, float) return mtime - def read(self, name: str) -> str: + def read(self, name: str) -> bytes: data = self._query(name, "data") - assert isinstance(data, str) + assert isinstance(data, bytes) return data - def write(self, name: str, data: str, mtime: float | None = None) -> bool: + def write(self, name: str, data: bytes, mtime: float | None = None) -> bool: import sqlite3 if not self.db: @@ -202,7 +195,7 @@ def write(self, name: str, data: str, mtime: float | None = None) -> bool: if mtime is None: mtime = time.time() self.db.execute( - "INSERT OR REPLACE INTO files(path, mtime, data) VALUES(?, ?, ?)", + "INSERT OR REPLACE INTO files2(path, mtime, data) VALUES(?, ?, ?)", (name, mtime, data), ) except sqlite3.OperationalError: @@ -213,7 +206,7 @@ def remove(self, name: str) -> None: if not self.db: raise FileNotFoundError() - self.db.execute("DELETE FROM files WHERE path = ?", (name,)) + self.db.execute("DELETE FROM files2 WHERE path = ?", (name,)) def commit(self) -> None: if self.db: @@ -221,5 +214,5 @@ def commit(self) -> None: def list_all(self) -> Iterable[str]: if self.db: - for row in self.db.execute("SELECT path FROM files"): + for row in self.db.execute("SELECT path FROM files2"): yield row[0] diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index 452cfef20f4c..49c39a9ce91c 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -22,6 +22,7 @@ from mypy.nodes import MypyFile from mypy.options import Options from mypy.stubinfo import approved_stub_package_exists +from mypy.util import os_path_join # Paths to be searched in find_module(). @@ -205,7 +206,7 @@ def find_module_via_source_set(self, id: str) -> ModuleSearchResult | None: d = os.path.dirname(p) for _ in range(id.count(".")): if not any( - self.fscache.isfile(os.path.join(d, "__init__" + x)) for x in PYTHON_EXTENSIONS + self.fscache.isfile(os_path_join(d, "__init__" + x)) for x in PYTHON_EXTENSIONS ): return None d = os.path.dirname(d) @@ -249,7 +250,7 @@ def find_lib_path_dirs(self, id: str, lib_path: tuple[str, ...]) -> PackageDirs: dirs = [] for pathitem in self.get_toplevel_possibilities(lib_path, components[0]): # e.g., '/usr/lib/python3.4/foo/bar' - dir = os.path.normpath(os.path.join(pathitem, dir_chain)) + dir = os.path.normpath(os_path_join(pathitem, dir_chain)) if self.fscache.isdir(dir): dirs.append((dir, True)) return dirs @@ -320,8 +321,8 @@ def _find_module_non_stub_helper( plausible_match = False dir_path = pkg_dir for index, component in enumerate(components): - dir_path = os.path.join(dir_path, component) - if self.fscache.isfile(os.path.join(dir_path, "py.typed")): + dir_path = os_path_join(dir_path, component) + if self.fscache.isfile(os_path_join(dir_path, "py.typed")): return os.path.join(pkg_dir, *components[:-1]), index == 0 elif not plausible_match and ( self.fscache.isdir(dir_path) or self.fscache.isfile(dir_path + ".py") @@ -330,9 +331,8 @@ def _find_module_non_stub_helper( # If this is not a directory then we can't traverse further into it if not self.fscache.isdir(dir_path): break - for i in range(len(components), 0, -1): - if approved_stub_package_exists(".".join(components[:i])): - return ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED + if approved_stub_package_exists(".".join(components)): + return ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED if plausible_match: return ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS else: @@ -418,9 +418,9 @@ def _find_module(self, id: str, use_typeshed: bool) -> ModuleSearchResult: # Third-party stub/typed packages for pkg_dir in self.search_paths.package_path: stub_name = components[0] + "-stubs" - stub_dir = os.path.join(pkg_dir, stub_name) + stub_dir = os_path_join(pkg_dir, stub_name) if fscache.isdir(stub_dir): - stub_typed_file = os.path.join(stub_dir, "py.typed") + stub_typed_file = os_path_join(stub_dir, "py.typed") stub_components = [stub_name] + components[1:] path = os.path.join(pkg_dir, *stub_components[:-1]) if fscache.isdir(path): @@ -430,7 +430,7 @@ def _find_module(self, id: str, use_typeshed: bool) -> ModuleSearchResult: # Partial here means that mypy should look at the runtime # package if installed. if fscache.read(stub_typed_file).decode().strip() == "partial": - runtime_path = os.path.join(pkg_dir, dir_chain) + runtime_path = os_path_join(pkg_dir, dir_chain) third_party_inline_dirs.append((runtime_path, True)) # if the package is partial, we don't verify the module, as # the partial stub package may not have a __init__.pyi @@ -556,7 +556,7 @@ def _find_module(self, id: str, use_typeshed: bool) -> ModuleSearchResult: return ModuleNotFoundReason.NOT_FOUND def find_modules_recursive(self, module: str) -> list[BuildSource]: - module_path = self.find_module(module) + module_path = self.find_module(module, fast_path=True) if isinstance(module_path, ModuleNotFoundReason): return [] sources = [BuildSource(module_path, module, None)] @@ -580,7 +580,7 @@ def find_modules_recursive(self, module: str) -> list[BuildSource]: # Skip certain names altogether if name in ("__pycache__", "site-packages", "node_modules") or name.startswith("."): continue - subpath = os.path.join(package_path, name) + subpath = os_path_join(package_path, name) if self.options and matches_exclude( subpath, self.options.exclude, self.fscache, self.options.verbosity >= 2 @@ -590,8 +590,8 @@ def find_modules_recursive(self, module: str) -> list[BuildSource]: if self.fscache.isdir(subpath): # Only recurse into packages if (self.options and self.options.namespace_packages) or ( - self.fscache.isfile(os.path.join(subpath, "__init__.py")) - or self.fscache.isfile(os.path.join(subpath, "__init__.pyi")) + self.fscache.isfile(os_path_join(subpath, "__init__.py")) + or self.fscache.isfile(os_path_join(subpath, "__init__.pyi")) ): seen.add(name) sources.extend(self.find_modules_recursive(module + "." + name)) @@ -636,7 +636,7 @@ def verify_module(fscache: FileSystemCache, id: str, path: str, prefix: str) -> for i in range(id.count(".")): path = os.path.dirname(path) if not any( - fscache.isfile_case(os.path.join(path, f"__init__{extension}"), prefix) + fscache.isfile_case(os_path_join(path, f"__init__{extension}"), prefix) for extension in PYTHON_EXTENSIONS ): return False @@ -651,7 +651,7 @@ def highest_init_level(fscache: FileSystemCache, id: str, path: str, prefix: str for i in range(id.count(".")): path = os.path.dirname(path) if any( - fscache.isfile_case(os.path.join(path, f"__init__{extension}"), prefix) + fscache.isfile_case(os_path_join(path, f"__init__{extension}"), prefix) for extension in PYTHON_EXTENSIONS ): level = i + 1 @@ -668,10 +668,13 @@ def mypy_path() -> list[str]: def default_lib_path( data_dir: str, pyversion: tuple[int, int], custom_typeshed_dir: str | None ) -> list[str]: - """Return default standard library search paths.""" + """Return default standard library search paths. Guaranteed to be normalised.""" + + data_dir = os.path.abspath(data_dir) path: list[str] = [] if custom_typeshed_dir: + custom_typeshed_dir = os.path.abspath(custom_typeshed_dir) typeshed_dir = os.path.join(custom_typeshed_dir, "stdlib") mypy_extensions_dir = os.path.join(custom_typeshed_dir, "stubs", "mypy-extensions") versions_file = os.path.join(typeshed_dir, "VERSIONS") @@ -711,7 +714,7 @@ def default_lib_path( @functools.lru_cache(maxsize=None) def get_search_dirs(python_executable: str | None) -> tuple[list[str], list[str]]: - """Find package directories for given python. + """Find package directories for given python. Guaranteed to return absolute paths. This runs a subprocess call, which generates a list of the directories in sys.path. To avoid repeatedly calling a subprocess (which can be slow!) we @@ -773,6 +776,7 @@ def compute_search_paths( root_dir = os.getenv("MYPY_TEST_PREFIX", None) if not root_dir: root_dir = os.path.dirname(os.path.dirname(__file__)) + root_dir = os.path.abspath(root_dir) lib_path.appendleft(os.path.join(root_dir, "test-data", "unit", "lib-stub")) # alt_lib_path is used by some tests to bypass the normal lib_path mechanics. # If we don't have one, grab directories of source files. @@ -829,6 +833,7 @@ def compute_search_paths( return SearchPaths( python_path=tuple(reversed(python_path)), mypy_path=tuple(mypypath), + # package_path and typeshed_path must be normalised and absolute via os.path.abspath package_path=tuple(sys_path + site_packages), typeshed_path=tuple(lib_path), ) @@ -842,11 +847,11 @@ def load_stdlib_py_versions(custom_typeshed_dir: str | None) -> StdlibVersions: None means there is no maximum version. """ - typeshed_dir = custom_typeshed_dir or os.path.join(os.path.dirname(__file__), "typeshed") - stdlib_dir = os.path.join(typeshed_dir, "stdlib") + typeshed_dir = custom_typeshed_dir or os_path_join(os.path.dirname(__file__), "typeshed") + stdlib_dir = os_path_join(typeshed_dir, "stdlib") result = {} - versions_path = os.path.join(stdlib_dir, "VERSIONS") + versions_path = os_path_join(stdlib_dir, "VERSIONS") assert os.path.isfile(versions_path), (custom_typeshed_dir, versions_path, __file__) with open(versions_path) as f: for line in f: diff --git a/mypy/stubinfo.py b/mypy/stubinfo.py index 9d8dfbe43f37..0ec64b037fee 100644 --- a/mypy/stubinfo.py +++ b/mypy/stubinfo.py @@ -1,22 +1,53 @@ from __future__ import annotations -def is_legacy_bundled_package(prefix: str) -> bool: - return prefix in legacy_bundled_packages +def is_module_from_legacy_bundled_package(module: str) -> bool: + top_level = module.split(".", 1)[0] + return top_level in legacy_bundled_packages -def approved_stub_package_exists(prefix: str) -> bool: - return is_legacy_bundled_package(prefix) or prefix in non_bundled_packages +def approved_stub_package_exists(module: str) -> bool: + top_level = module.split(".", 1)[0] + if top_level in legacy_bundled_packages: + return True + if top_level in non_bundled_packages_flat: + return True + if top_level in non_bundled_packages_namespace: + namespace = non_bundled_packages_namespace[top_level] + components = module.split(".") + for i in range(len(components), 0, -1): + module = ".".join(components[:i]) + if module in namespace: + return True + return False -def stub_distribution_name(prefix: str) -> str: - return legacy_bundled_packages.get(prefix) or non_bundled_packages[prefix] +def stub_distribution_name(module: str) -> str | None: + top_level = module.split(".", 1)[0] + + dist = legacy_bundled_packages.get(top_level) + if dist: + return dist + dist = non_bundled_packages_flat.get(top_level) + if dist: + return dist + + if top_level in non_bundled_packages_namespace: + namespace = non_bundled_packages_namespace[top_level] + components = module.split(".") + for i in range(len(components), 0, -1): + module = ".".join(components[:i]) + dist = namespace.get(module) + if dist: + return dist + + return None # Stubs for these third-party packages used to be shipped with mypy. # # Map package name to PyPI stub distribution name. -legacy_bundled_packages = { +legacy_bundled_packages: dict[str, str] = { "aiofiles": "types-aiofiles", "bleach": "types-bleach", "boto": "types-boto", @@ -32,7 +63,6 @@ def stub_distribution_name(prefix: str) -> str: "docutils": "types-docutils", "first": "types-first", "gflags": "types-python-gflags", - "google.protobuf": "types-protobuf", "markdown": "types-Markdown", "mock": "types-mock", "OpenSSL": "types-pyOpenSSL", @@ -66,20 +96,17 @@ def stub_distribution_name(prefix: str) -> str: # include packages that have a release that includes PEP 561 type # information. # -# Package name can have one or two components ('a' or 'a.b'). -# # Note that these packages are omitted for now: # pika: typeshed's stubs are on PyPI as types-pika-ts. # types-pika already exists on PyPI, and is more complete in many ways, # but is a non-typeshed stubs package. -non_bundled_packages = { +non_bundled_packages_flat: dict[str, str] = { "MySQLdb": "types-mysqlclient", "PIL": "types-Pillow", "PyInstaller": "types-pyinstaller", "Xlib": "types-python-xlib", "aws_xray_sdk": "types-aws-xray-sdk", "babel": "types-babel", - "backports.ssl_match_hostname": "types-backports.ssl_match_hostname", "braintree": "types-braintree", "bs4": "types-beautifulsoup4", "bugbear": "types-flake8-bugbear", @@ -107,7 +134,6 @@ def stub_distribution_name(prefix: str) -> str: "flask_migrate": "types-Flask-Migrate", "fpdf": "types-fpdf2", "gdb": "types-gdb", - "google.cloud.ndb": "types-google-cloud-ndb", "hdbcli": "types-hdbcli", "html5lib": "types-html5lib", "httplib2": "types-httplib2", @@ -123,7 +149,6 @@ def stub_distribution_name(prefix: str) -> str: "oauthlib": "types-oauthlib", "openpyxl": "types-openpyxl", "opentracing": "types-opentracing", - "paho.mqtt": "types-paho-mqtt", "parsimonious": "types-parsimonious", "passlib": "types-passlib", "passpy": "types-passpy", @@ -171,3 +196,10 @@ def stub_distribution_name(prefix: str) -> str: "pandas": "pandas-stubs", # https://github.com/pandas-dev/pandas-stubs "lxml": "lxml-stubs", # https://github.com/lxml/lxml-stubs } + + +non_bundled_packages_namespace: dict[str, dict[str, str]] = { + "backports": {"backports.ssl_match_hostname": "types-backports.ssl_match_hostname"}, + "google": {"google.cloud.ndb": "types-google-cloud-ndb", "google.protobuf": "types-protobuf"}, + "paho": {"paho.mqtt": "types-paho-mqtt"}, +} diff --git a/mypy/test/teststubinfo.py b/mypy/test/teststubinfo.py index eccee90244f3..10ce408e7023 100644 --- a/mypy/test/teststubinfo.py +++ b/mypy/test/teststubinfo.py @@ -2,11 +2,45 @@ import unittest -from mypy.stubinfo import is_legacy_bundled_package +from mypy.stubinfo import ( + approved_stub_package_exists, + is_module_from_legacy_bundled_package, + legacy_bundled_packages, + non_bundled_packages_flat, + stub_distribution_name, +) class TestStubInfo(unittest.TestCase): def test_is_legacy_bundled_packages(self) -> None: - assert not is_legacy_bundled_package("foobar_asdf") - assert is_legacy_bundled_package("pycurl") - assert is_legacy_bundled_package("dataclasses") + assert not is_module_from_legacy_bundled_package("foobar_asdf") + assert not is_module_from_legacy_bundled_package("PIL") + assert is_module_from_legacy_bundled_package("pycurl") + assert is_module_from_legacy_bundled_package("dataclasses") + + def test_approved_stub_package_exists(self) -> None: + assert not approved_stub_package_exists("foobar_asdf") + assert approved_stub_package_exists("pycurl") + assert approved_stub_package_exists("babel") + assert approved_stub_package_exists("google.cloud.ndb") + assert approved_stub_package_exists("google.cloud.ndb.submodule") + assert not approved_stub_package_exists("google.cloud.unknown") + assert approved_stub_package_exists("google.protobuf") + assert approved_stub_package_exists("google.protobuf.submodule") + assert not approved_stub_package_exists("google") + + def test_stub_distribution_name(self) -> None: + assert stub_distribution_name("foobar_asdf") is None + assert stub_distribution_name("pycurl") == "types-pycurl" + assert stub_distribution_name("babel") == "types-babel" + assert stub_distribution_name("google.cloud.ndb") == "types-google-cloud-ndb" + assert stub_distribution_name("google.cloud.ndb.submodule") == "types-google-cloud-ndb" + assert stub_distribution_name("google.cloud.unknown") is None + assert stub_distribution_name("google.protobuf") == "types-protobuf" + assert stub_distribution_name("google.protobuf.submodule") == "types-protobuf" + assert stub_distribution_name("google") is None + + def test_period_in_top_level(self) -> None: + for packages in (non_bundled_packages_flat, legacy_bundled_packages): + for top_level_module in packages: + assert "." not in top_level_module diff --git a/mypy/util.py b/mypy/util.py index 4b1b918b92e6..289740bdf1ef 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -4,16 +4,22 @@ import hashlib import io +import json import os -import pathlib import re import shutil import sys import time from importlib import resources as importlib_resources -from typing import IO, Callable, Container, Final, Iterable, Sequence, Sized, TypeVar +from typing import IO, Any, Callable, Container, Final, Iterable, Sequence, Sized, TypeVar from typing_extensions import Literal +orjson: Any +try: + import orjson # type: ignore[import-not-found, no-redef, unused-ignore] +except ImportError: + orjson = None + try: import curses @@ -412,9 +418,43 @@ def replace_object_state( pass -def is_sub_path(path1: str, path2: str) -> bool: - """Given two paths, return if path1 is a sub-path of path2.""" - return pathlib.Path(path2) in pathlib.Path(path1).parents +def is_sub_path_normabs(path: str, dir: str) -> bool: + """Given two paths, return if path is a sub-path of dir. + + Moral equivalent of: Path(dir) in Path(path).parents + + Similar to the pathlib version: + - Treats paths case-sensitively + - Does not fully handle unnormalised paths (e.g. paths with "..") + - Does not handle a mix of absolute and relative paths + Unlike the pathlib version: + - Fast + - On Windows, assumes input has been slash normalised + - Handles even fewer unnormalised paths (e.g. paths with "." and "//") + + As a result, callers should ensure that inputs have had os.path.abspath called on them + (note that os.path.abspath will normalise) + """ + if not dir.endswith(os.sep): + dir += os.sep + return path.startswith(dir) + + +if sys.platform == "linux" or sys.platform == "darwin": + + def os_path_join(path: str, b: str) -> str: + # Based off of os.path.join, but simplified to str-only, 2 args and mypyc can compile it. + if b.startswith("/") or not path: + return b + elif path.endswith("/"): + return path + b + else: + return path + "/" + b + +else: + + def os_path_join(a: str, p: str) -> str: + return os.path.join(a, p) def hard_exit(status: int = 0) -> None: @@ -535,9 +575,7 @@ def hash_digest(data: bytes) -> str: accidental collision, but we don't really care about any of the cryptographic properties. """ - # Once we drop Python 3.5 support, we should consider using - # blake2b, which is faster. - return hashlib.sha256(data).hexdigest() + return hashlib.sha1(data).hexdigest() def parse_gray_color(cup: bytes) -> str: @@ -874,3 +912,25 @@ def quote_docstring(docstr: str) -> str: return f"''{docstr_repr}''" else: return f'""{docstr_repr}""' + + +def json_dumps(obj: object, debug: bool = False) -> bytes: + if orjson is not None: + if debug: + return orjson.dumps(obj, option=orjson.OPT_INDENT_2 | orjson.OPT_SORT_KEYS) # type: ignore[no-any-return] + else: + # TODO: If we don't sort keys here, testIncrementalInternalScramble fails + # We should document exactly what is going on there + return orjson.dumps(obj, option=orjson.OPT_SORT_KEYS) # type: ignore[no-any-return] + + if debug: + return json.dumps(obj, indent=2, sort_keys=True).encode("utf-8") + else: + # See above for sort_keys comment + return json.dumps(obj, sort_keys=True, separators=(",", ":")).encode("utf-8") + + +def json_loads(data: bytes) -> Any: + if orjson is not None: + return orjson.loads(data) + return json.loads(data) diff --git a/mypy/version.py b/mypy/version.py index b7f966633d20..772b8f684200 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -8,7 +8,7 @@ # - Release versions have the form "1.2.3". # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). # - Before 1.0 we had the form "0.NNN". -__version__ = "1.12.1" +__version__ = "1.13.0" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) diff --git a/mypyc/codegen/emitmodule.py b/mypyc/codegen/emitmodule.py index 1d8708912de5..b5e0a37f0cca 100644 --- a/mypyc/codegen/emitmodule.py +++ b/mypyc/codegen/emitmodule.py @@ -24,7 +24,7 @@ from mypy.nodes import MypyFile from mypy.options import Options from mypy.plugin import Plugin, ReportConfigContext -from mypy.util import hash_digest +from mypy.util import hash_digest, json_dumps from mypyc.codegen.cstring import c_string_initializer from mypyc.codegen.emit import Emitter, EmitterContext, HeaderDeclaration, c_array_initializer from mypyc.codegen.emitclass import generate_class, generate_class_type_decl @@ -154,7 +154,7 @@ def report_config_data(self, ctx: ReportConfigContext) -> tuple[str | None, list ir_data = json.loads(ir_json) # Check that the IR cache matches the metadata cache - if compute_hash(meta_json) != ir_data["meta_hash"]: + if hash_digest(meta_json) != ir_data["meta_hash"]: return None # Check that all of the source files are present and as @@ -369,11 +369,11 @@ def write_cache( newpath = get_state_ir_cache_name(st) ir_data = { "ir": module.serialize(), - "meta_hash": compute_hash(meta_data), + "meta_hash": hash_digest(meta_data), "src_hashes": hashes[group_map[id]], } - result.manager.metastore.write(newpath, json.dumps(ir_data, separators=(",", ":"))) + result.manager.metastore.write(newpath, json_dumps(ir_data)) result.manager.metastore.commit() diff --git a/setup.py b/setup.py index a50afde4ce6b..71124916a3e4 100644 --- a/setup.py +++ b/setup.py @@ -232,6 +232,7 @@ def run(self): "python2": "", "reports": "lxml", "install-types": "pip", + "faster-cache": "orjson", }, python_requires=">=3.8", include_package_data=True, diff --git a/tox.ini b/tox.ini index c2abd05d7b6c..9d962456d034 100644 --- a/tox.ini +++ b/tox.ini @@ -19,15 +19,18 @@ passenv = PROGRAMDATA PROGRAMFILES(X86) PYTEST_ADDOPTS -deps = -rtest-requirements.txt +deps = + -r test-requirements.txt + # This is a bit of a hack, but ensures the faster-cache path is tested in CI + orjson;python_version=='3.12' commands = python -m pytest {posargs} [testenv:dev] description = generate a DEV environment, that has all project libraries usedevelop = True deps = - -rtest-requirements.txt - -rdocs/requirements-docs.txt + -r test-requirements.txt + -r docs/requirements-docs.txt commands = python -m pip list --format=columns python -c 'import sys; print(sys.executable)' @@ -37,7 +40,7 @@ commands = description = invoke sphinx-build to build the HTML docs passenv = VERIFY_MYPY_ERROR_CODES -deps = -rdocs/requirements-docs.txt +deps = -r docs/requirements-docs.txt commands = sphinx-build -n -d "{toxworkdir}/docs_doctree" docs/source "{toxworkdir}/docs_out" --color -W -bhtml {posargs} python -c 'import pathlib; print("documentation available under file://\{0\}".format(pathlib.Path(r"{toxworkdir}") / "docs_out" / "index.html"))' pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy