diff --git a/news/6311.bugfix.rst b/news/6311.bugfix.rst new file mode 100644 index 0000000000..c06eabb023 --- /dev/null +++ b/news/6311.bugfix.rst @@ -0,0 +1,10 @@ +Significantly improved performance through various optimizations: + +* Added caching of parsed Pipfile content with a file lock and proper invalidation +* Optimized dependency resolution by reducing unnecessary subprocess calls +* Improved handling of reverse dependencies in the update process +* Added file locking mechanism to prevent concurrent Pipfile modifications +* Reduced redundant file operations and system calls +* Added developer utilities for profiling performance bottlenecks + +Fixes bug with passing markers in CLI install command not getting propogated to Pipfile diff --git a/pipenv/environment.py b/pipenv/environment.py index 58fbf1506d..620df888db 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -78,7 +78,7 @@ def __init__( self.prefix = Path(prefix if prefix else sys.prefix) self._base_paths = {} if self.is_venv: - self._base_paths = self.get_paths() + self._base_paths = self.get_paths self.sys_paths = get_paths() def safe_import(self, name: str) -> ModuleType: @@ -180,7 +180,7 @@ def base_paths(self) -> dict[str, str]: paths = self._base_paths.copy() else: try: - paths = self.get_paths() + paths = self.get_paths except Exception: paths = get_paths( self.install_scheme, @@ -257,12 +257,6 @@ def python(self) -> str: @cached_property def sys_path(self) -> list[str]: - """ - The system path inside the environment - - :return: The :data:`sys.path` from the environment - :rtype: list - """ import json current_executable = Path(sys.executable).as_posix() @@ -328,6 +322,7 @@ def build_command( py_command = py_command % lines_as_str return py_command + @cached_property def get_paths(self) -> dict[str, str] | None: """ Get the paths for the environment by running a subcommand diff --git a/pipenv/project.py b/pipenv/project.py index 45b387c827..b17b60cceb 100644 --- a/pipenv/project.py +++ b/pipenv/project.py @@ -8,6 +8,7 @@ import os import re import sys +import time import urllib.parse from json.decoder import JSONDecodeError from pathlib import Path @@ -149,6 +150,9 @@ def __init__(self, python_version=None, chdir=True): self._download_location = None self._proper_names_db_path = None self._pipfile_location = None + self._parsed_pipfile = None + self._parsed_pipfile_atime = None + self._parsed_pipfile_mtime = None self._pipfile_newlines = DEFAULT_NEWLINES self._lockfile_newlines = DEFAULT_NEWLINES self._requirements_location = None @@ -663,11 +667,68 @@ def requirements_location(self) -> str | None: self._requirements_location = loc return self._requirements_location + def _acquire_file_lock(self, file_obj): + """Acquire lock on an existing file object""" + if sys.platform == "win32": + import msvcrt + + # Try to lock for a maximum of 10 seconds + start_time = time.time() + while (time.time() - start_time) < 10: + try: + msvcrt.locking(file_obj.fileno(), msvcrt.LK_NBLCK, 1) + return True + except OSError: # noqa: PERF203 + time.sleep(0.1) + return False + else: + import fcntl + + try: + # Use non-blocking to prevent deadlocks + fcntl.flock(file_obj.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB) + return True + except OSError: + return False + + def _release_file_lock(self, file_obj): + """Release lock on an existing file object""" + if sys.platform == "win32": + import msvcrt + + try: + msvcrt.locking(file_obj.fileno(), msvcrt.LK_UNLCK, 1) + except OSError: + pass + else: + import fcntl + + try: + fcntl.flock(file_obj.fileno(), fcntl.LOCK_UN) + except OSError: + pass + @property def parsed_pipfile(self) -> tomlkit.toml_document.TOMLDocument | TPipfile: - """Parse Pipfile into a TOMLFile""" - contents = self.read_pipfile() - return self._parse_pipfile(contents) + """Parse Pipfile into a TOMLFile with file locking""" + # Only lock the actual file we're reading + with open(self.pipfile_location, "r+" if sys.platform == "win32" else "r") as f: + # Try to get lock, but don't wait forever + if not self._acquire_file_lock(f): + # If we can't get the lock, just read without lock + contents = f.read() + self._pipfile_newlines = preferred_newlines(f) + self._parsed_pipfile = self._parse_pipfile(contents) + return self._parsed_pipfile + + try: + contents = f.read() + self._pipfile_newlines = preferred_newlines(f) + self._parsed_pipfile = self._parse_pipfile(contents) + finally: + self._release_file_lock(f) + + return self._parsed_pipfile def read_pipfile(self) -> str: # Open the pipfile, read it into memory. @@ -689,27 +750,6 @@ def _parse_pipfile( # Fallback to toml parser, for large files. return toml.loads(contents) - def _read_pyproject(self) -> None: - pyproject = self.path_to("pyproject.toml") - if os.path.exists(pyproject): - self._pyproject = toml.load(pyproject) - build_system = self._pyproject.get("build-system", None) - if not os.path.exists(self.path_to("setup.py")): - if not build_system or not build_system.get("requires"): - build_system = { - "requires": ["setuptools>=40.8.0", "wheel"], - "build-backend": get_default_pyproject_backend(), - } - self._build_system = build_system - - @property - def build_requires(self) -> list[str]: - return self._build_system.get("requires", ["setuptools>=40.8.0", "wheel"]) - - @property - def build_backend(self) -> str: - return self._build_system.get("build-backend", get_default_pyproject_backend()) - @property def settings(self) -> tomlkit.items.Table | dict[str, str | bool]: """A dictionary of the settings added to the Pipfile.""" @@ -795,25 +835,6 @@ def get_editable_packages(self, category): } return packages - def _get_vcs_packages(self, dev=False): - from pipenv.utils.requirementslib import is_vcs - - section = "dev-packages" if dev else "packages" - packages = { - k: v - for k, v in self.parsed_pipfile.get(section, {}).items() - if is_vcs(v) or is_vcs(k) - } - return packages or {} - - @property - def all_packages(self): - """Returns a list of all packages.""" - packages = {} - for category in self.get_package_categories(): - packages.update(self.parsed_pipfile.get(category, {})) - return packages - @property def packages(self): """Returns a list of packages.""" @@ -824,16 +845,6 @@ def dev_packages(self): """Returns a list of dev-packages.""" return self.get_pipfile_section("dev-packages") - @property - def pipfile_is_empty(self): - if not self.pipfile_exists: - return True - - if not self.read_pipfile(): - return True - - return False - def create_pipfile(self, python=None): """Creates the Pipfile, filled with juicy defaults.""" # Inherit the pip's index configuration of install command. @@ -848,12 +859,15 @@ def create_pipfile(self, python=None): verify_ssl = index.startswith("https") sources.append({"url": index, "verify_ssl": verify_ssl, "name": source_name}) - data = { - "source": sources, - # Default packages. - "packages": {}, - "dev-packages": {}, - } + if self.pipfile_exists: + data = self.parsed_pipfile + else: + data = { + "source": sources, + # Default packages. + "packages": {}, + "dev-packages": {}, + } # Default requires. required_python = python if not python: @@ -864,7 +878,7 @@ def create_pipfile(self, python=None): version = python_version(required_python) or self.s.PIPENV_DEFAULT_PYTHON_VERSION if version: data["requires"] = {"python_version": ".".join(version.split(".")[:2])} - if python and version and len(version.split(".")) > 2: + if required_python and version and len(version.split(".")) > 2: data["requires"].update({"python_full_version": version}) self.write_toml(data) @@ -941,9 +955,10 @@ def get_lockfile_meta(self): } def write_toml(self, data, path=None): - """Writes the given data structure out as TOML.""" + """Writes the given data structure out as TOML with file locking""" if path is None: path = self.pipfile_location + data = convert_toml_outline_tables(data, self) try: formatted_data = tomlkit.dumps(data).rstrip() @@ -951,7 +966,6 @@ def write_toml(self, data, path=None): document = tomlkit.document() for category in self.get_package_categories(): document[category] = tomlkit.table() - # Convert things to inline tables — fancy :) for package in data.get(category, {}): if hasattr(data[category][package], "keys"): table = tomlkit.inline_table() @@ -967,9 +981,21 @@ def write_toml(self, data, path=None): newlines = self._pipfile_newlines else: newlines = DEFAULT_NEWLINES - formatted_data = cleanup_toml(formatted_data) - with open(path, "w", newline=newlines) as f: - f.write(formatted_data) + + file_data = cleanup_toml(formatted_data) + + with open(path, "r+" if os.path.exists(path) else "w+", newline=newlines) as f: + if not self._acquire_file_lock(f): + # If we can't get the lock, write anyway - better than hanging + f.write(file_data) + return + + try: + f.seek(0) + f.truncate() + f.write(file_data) + finally: + self._release_file_lock(f) def write_lockfile(self, content): """Write out the lockfile.""" @@ -983,7 +1009,7 @@ def write_lockfile(self, content): f.write("\n") def pipfile_sources(self, expand_vars=True): - if self.pipfile_is_empty or "source" not in self.parsed_pipfile: + if not self.pipfile_exists or "source" not in self.parsed_pipfile: sources = [self.default_source] if os.environ.get("PIPENV_PYPI_MIRROR"): sources[0]["url"] = os.environ["PIPENV_PYPI_MIRROR"] @@ -1163,6 +1189,7 @@ def generate_package_pipfile_entry( vcs_specifier = determine_vcs_specifier(package) name = self.get_package_name_in_pipfile(req_name, category=category) normalized_name = normalize_name(req_name) + markers = pip_line.split(";")[-1].strip() if ";" in pip_line else "" extras = package.extras specifier = "*" @@ -1173,6 +1200,8 @@ def generate_package_pipfile_entry( entry = {} if extras: entry["extras"] = list(extras) + if markers: + entry["markers"] = str(markers) if path_specifier: entry["file"] = unquote(str(path_specifier)) if pip_line.startswith("-e"): @@ -1315,10 +1344,6 @@ def add_index_to_pipfile(self, index, verify_ssl=True): self.write_toml(p) return source["name"] - def recase_pipfile(self): - if self.ensure_proper_casing(): - self.write_toml(self.parsed_pipfile) - def load_lockfile(self, expand_env_vars=True): lockfile_modified = False with open(self.lockfile_location, encoding="utf-8") as lock: @@ -1448,7 +1473,7 @@ def _which(self, command, location=None, allow_global=False): else: location = os.environ.get("VIRTUAL_ENV", None) if not (location and os.path.exists(location)) and not allow_global: - raise RuntimeError("location not created nor specified") + return None version_str = "python{}".format(".".join([str(v) for v in sys.version_info[:2]])) is_python = command in ("python", os.path.basename(sys.executable), version_str) diff --git a/pipenv/resolver.py b/pipenv/resolver.py index 0236bd195e..e1977e53c1 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -135,7 +135,6 @@ class Entry: entry_dict: Dict[str, Any] project: Any # Could be more specific with a Project type resolver: Any # Could be more specific with a Resolver type - reverse_deps: Optional[Dict[str, Any]] = None category: Optional[str] = None def __post_init__(self): @@ -319,9 +318,6 @@ def process_resolver_results( if not results: return [] - # Get reverse dependencies for the project - reverse_deps = project.environment.reverse_dependencies() - processed_results = [] for result in results: # Create Entry instance with our new dataclass @@ -330,7 +326,6 @@ def process_resolver_results( entry_dict=result, project=project, resolver=resolver, - reverse_deps=reverse_deps, category=category, ) diff --git a/pipenv/routines/update.py b/pipenv/routines/update.py index 50cfcd71cf..35b0fd5fdc 100644 --- a/pipenv/routines/update.py +++ b/pipenv/routines/update.py @@ -2,10 +2,8 @@ import os import sys from collections import defaultdict -from pathlib import Path from typing import Dict, Set, Tuple -from pipenv.exceptions import JSONParseError, PipenvCmdError from pipenv.patched.pip._vendor.packaging.specifiers import SpecifierSet from pipenv.patched.pip._vendor.packaging.version import InvalidVersion, Version from pipenv.routines.outdated import do_outdated @@ -17,11 +15,11 @@ get_lockfile_section_using_pipfile_category, get_pipfile_category_using_lockfile_section, ) -from pipenv.utils.processes import run_command from pipenv.utils.project import ensure_project from pipenv.utils.requirements import add_index_to_pipfile from pipenv.utils.resolver import venv_resolve_deps -from pipenv.vendor import pipdeptree +from pipenv.vendor.pipdeptree._discovery import get_installed_distributions +from pipenv.vendor.pipdeptree._models import PackageDAG def do_update( @@ -106,44 +104,26 @@ def do_update( def get_reverse_dependencies(project) -> Dict[str, Set[Tuple[str, str]]]: - """Get reverse dependencies using pipdeptree.""" - pipdeptree_path = Path(pipdeptree.__file__).parent - python_path = project.python() - cmd_args = [python_path, str(pipdeptree_path), "-l", "--reverse", "--json-tree"] - - c = run_command(cmd_args, is_verbose=project.s.is_verbose()) - if c.returncode != 0: - raise PipenvCmdError(c.err, c.out, c.returncode) - try: - dep_tree = json.loads(c.stdout.strip()) - except json.JSONDecodeError: - raise JSONParseError(c.stdout, c.stderr) - - # Build reverse dependency map: package -> set of (dependent_package, required_version) - reverse_deps = defaultdict(set) + """Get reverse dependencies without running pipdeptree as a subprocess.""" - def process_tree_node(n, parents=None): - if parents is None: - parents = [] + # Use the project's specified Python interpreter + python_interpreter = project.python() - package_name = n["package_name"] - required_version = n.get("required_version", "Any") + # Get installed packages for the specified interpreter + pkgs = get_installed_distributions(interpreter=python_interpreter) - # Add the current node to its parents' reverse dependencies - for parent in parents: - reverse_deps[parent].add((package_name, required_version)) + # Create a package dependency tree (DAG) + dep_tree = PackageDAG.from_pkgs(pkgs) + dep_tree.reverse() - # Process dependencies recursively, keeping track of parent path - for dep in n.get("dependencies", []): - process_tree_node(dep, parents + [package_name]) + # Initialize reverse dependency map + reverse_deps = defaultdict(set) - # Start processing the tree from the root nodes - for node in dep_tree: - try: - process_tree_node(node) - except Exception as e: # noqa: PERF203 - err.print( - f"[red bold]Warning[/red bold]: Unable to analyze dependencies: {str(e)}" + # Populate the reverse dependency map + for package, dependents in dep_tree.items(): + for dep in dependents: + reverse_deps[dep.project_name].add( + (package.project_name, getattr(package, "installed_version", "Any")) ) return reverse_deps @@ -290,8 +270,13 @@ def upgrade( # Early conflict detection conflicts_found = False for package in package_args: - if "==" in package: - name, version = package.split("==") + package_parts = [package] + if ";" in package: + package_parts = package.split(";") + # Not using markers here for now + # markers = ";".join(package_parts[1:]) if len(package_parts) > 1 else None + if "==" in package_parts[0]: + name, version = package_parts[0].split("==") conflicts = check_version_conflicts(name, version, reverse_deps, lockfile) if conflicts: conflicts_found = True diff --git a/pipenv/utils/developers.py b/pipenv/utils/developers.py new file mode 100644 index 0000000000..be27862560 --- /dev/null +++ b/pipenv/utils/developers.py @@ -0,0 +1,61 @@ +import cProfile +import functools +import os +import pstats +from datetime import datetime +from pstats import SortKey + + +def profile_method(output_dir="profiles"): + """ + Decorator to profile pipenv method execution with focus on file reads. + """ + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + os.makedirs(output_dir, exist_ok=True) + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + profile_name = f"{func.__name__}_{timestamp}" + profile_path = os.path.join(output_dir, f"{profile_name}.prof") + + profiler = cProfile.Profile() + profiler.enable() + + try: + result = func(*args, **kwargs) + return result + finally: + profiler.disable() + + # Save and analyze stats + stats = pstats.Stats(profiler) + stats.sort_stats(SortKey.CUMULATIVE) + stats.dump_stats(profile_path) + print(f"\nProfile saved to: {profile_path}") + + # Analyze file reads specifically + print("\nAnalyzing file read operations:") + print("-" * 50) + + # Get all entries involving file read operations + read_stats = stats.stats + for (file, line, name), (_, _, tt, _, callers) in read_stats.items(): + if "read" in str(name): + # Print the call stack for this read operation + print(f"\nFile read at: {file}:{line}") + print(f"Function: {name}") + print(f"Time: {tt:.6f}s") + print("Called by:") + for caller in callers: + caller_file, caller_line, caller_name = caller + print(f" {caller_name} in {caller_file}:{caller_line}") + print("-" * 30) + + # Print overall stats + print("\nTop 20 overall calls:") + stats.print_stats(20) + + return wrapper + + return decorator diff --git a/pipenv/utils/pipfile.py b/pipenv/utils/pipfile.py index 78e43c55ce..fa3bef3d43 100644 --- a/pipenv/utils/pipfile.py +++ b/pipenv/utils/pipfile.py @@ -68,13 +68,12 @@ def ensure_pipfile( ): """Creates a Pipfile for the project, if it doesn't exist.""" - # Assert Pipfile exists. python = ( project._which("python") if not (project.s.USING_DEFAULT_PYTHON or system) else None ) - if project.pipfile_is_empty: + if not project.pipfile_exists: # Show an error message and exit if system is passed and no pipfile exists if system and not project.s.PIPENV_VIRTUALENV: raise exceptions.PipenvOptionsError( @@ -82,6 +81,9 @@ def ensure_pipfile( "--system is intended to be used for pre-existing Pipfile " "installation, not installation of specific packages. Aborting.", ) + err.print("Creating a Pipfile for this project...", style="bold") + # Create the pipfile if it doesn't exist. + project.create_pipfile(python=python) # If there's a requirements file, but no Pipfile... if project.requirements_exists and not skip_requirements: requirements_dir_path = os.path.dirname(project.requirements_location) @@ -89,8 +91,6 @@ def ensure_pipfile( f"[bold]requirements.txt[/bold] found in [bold yellow]{requirements_dir_path}" "[/bold yellow] instead of [bold]Pipfile[/bold]! Converting..." ) - # Create a Pipfile... - project.create_pipfile(python=python) with console.status( "Importing requirements...", spinner=project.s.PIPENV_SPINNER ) as st: @@ -110,10 +110,6 @@ def ensure_pipfile( 'We recommend updating your [bold]Pipfile[/bold] to specify the [bold]"*"' "[/bold] version, instead." ) - else: - err.print("Creating a Pipfile for this project...", style="bold") - # Create the pipfile if it doesn't exist. - project.create_pipfile(python=python) # Validate the Pipfile's contents. if validate and project.virtualenv_exists and not project.s.PIPENV_SKIP_VALIDATION: # Ensure that Pipfile is using proper casing. diff --git a/pipenv/utils/project.py b/pipenv/utils/project.py index b6e58119fd..f80741affd 100644 --- a/pipenv/utils/project.py +++ b/pipenv/utils/project.py @@ -44,6 +44,14 @@ def ensure_project( raise exceptions.PipfileNotFound # Skip virtualenv creation when --system was used. if not system_or_exists: + # Ensure the Pipfile exists. + ensure_pipfile( + project, + validate=validate, + skip_requirements=skip_requirements, + system=system, + pipfile_categories=pipfile_categories, + ) ensure_virtualenv( project, python=python, diff --git a/pipenv/utils/requirements.py b/pipenv/utils/requirements.py index e786180011..df1726a65f 100644 --- a/pipenv/utils/requirements.py +++ b/pipenv/utils/requirements.py @@ -99,8 +99,6 @@ def import_requirements(project, r=None, dev=False, categories=None): project.add_package_to_pipfile(package, package_string, dev=dev) else: package_string = str(package.req) - if package.markers: - package_string += f" ; {package.markers}" if categories: for category in categories: project.add_package_to_pipfile( @@ -112,7 +110,6 @@ def import_requirements(project, r=None, dev=False, categories=None): trusted_hosts = sorted(set(trusted_hosts)) for index in indexes: add_index_to_pipfile(project, index, trusted_hosts) - project.recase_pipfile() def add_index_to_pipfile(project, index, trusted_hosts=None): diff --git a/pipenv/utils/resolver.py b/pipenv/utils/resolver.py index 7de6a7f779..ad0d5e7c22 100644 --- a/pipenv/utils/resolver.py +++ b/pipenv/utils/resolver.py @@ -1,4 +1,3 @@ -import contextlib import json import os import subprocess @@ -420,48 +419,47 @@ def constraints(self): return constraints_list - @contextlib.contextmanager - def get_resolver(self, clear=False): + def resolve(self): from pipenv.patched.pip._internal.utils.temp_dir import TempDirectory with global_tempdir_manager(), get_build_tracker() as build_tracker, TempDirectory( globally_managed=True - ) as directory: - pip_options = self.pip_options - finder = self.finder() - wheel_cache = WheelCache(pip_options.cache_dir) - preparer = self.pip_command.make_requirement_preparer( - temp_build_dir=directory, - options=pip_options, - build_tracker=build_tracker, - session=self.session, - finder=finder, - use_user_site=False, - ) - resolver = self.pip_command.make_resolver( - preparer=preparer, - finder=finder, - options=pip_options, - wheel_cache=wheel_cache, - use_user_site=False, - ignore_installed=True, - ignore_requires_python=pip_options.ignore_requires_python, - force_reinstall=pip_options.force_reinstall, - upgrade_strategy="to-satisfy-only", - use_pep517=pip_options.use_pep517, - ) - yield resolver - - def resolve(self): - with temp_environ(), self.get_resolver() as resolver: + ) as temp_dir: try: + finder = self.finder() + wheel_cache = WheelCache(self.pip_options.cache_dir) + + preparer = self.pip_command.make_requirement_preparer( + temp_build_dir=temp_dir, + options=self.pip_options, + build_tracker=build_tracker, + session=self.session, + finder=finder, + use_user_site=False, + ) + + resolver = self.pip_command.make_resolver( + preparer=preparer, + finder=finder, + options=self.pip_options, + wheel_cache=wheel_cache, + use_user_site=False, + ignore_installed=True, + ignore_requires_python=self.pip_options.ignore_requires_python, + force_reinstall=self.pip_options.force_reinstall, + upgrade_strategy="to-satisfy-only", + use_pep517=self.pip_options.use_pep517, + ) + results = resolver.resolve(self.constraints, check_supported_wheels=False) - except InstallationError as e: - raise ResolutionFailure(message=str(e)) - else: self.results = set(results.all_requirements) self.resolved_tree.update(self.results) - return self.resolved_tree + return set(results.all_requirements) + + except InstallationError as e: + raise ResolutionFailure(message=str(e)) + finally: + build_tracker.cleanup() def _get_pipfile_markers(self, pipfile_entry): sys_platform = pipfile_entry.get("sys_platform") @@ -720,34 +718,25 @@ def actually_resolve_deps( pipfile_category, req_dir, ): - with warnings.catch_warnings(record=True) as warning_list: - resolver = Resolver.create( - deps, - project, - index_lookup, - markers_lookup, - sources, - req_dir, - clear, - pre, - pipfile_category, - ) - resolver.resolve() - hashes = resolver.resolve_hashes - resolver.resolve_constraints() - results = resolver.clean_results() - for warning in warning_list: - _show_warning( - warning.message, - warning.category, - warning.filename, - warning.lineno, - warning.line, - ) + resolver = Resolver.create( + deps, + project, + index_lookup, + markers_lookup, + sources, + req_dir, + clear, + pre, + pipfile_category, + ) + resolver.resolve() + hashes = resolver.resolve_hashes + resolver.resolve_constraints() + results = resolver.clean_results() return (results, hashes, resolver) -def resolve(cmd, st, project): +def resolve(cmd, project): from pipenv.cmdparse import Script c = subprocess_run(Script.parse(cmd).cmd_args, block=False, env=os.environ.copy()) @@ -758,13 +747,13 @@ def resolve(cmd, st, project): continue errors += line if is_verbose: - st.console.print(line.rstrip()) + console.print(line.rstrip()) c.wait() returncode = c.poll() out = c.stdout.read() if returncode != 0: - st.console.print(environments.PIPENV_SPINNER_FAIL_TEXT.format("Locking Failed!")) + console.print(environments.PIPENV_SPINNER_FAIL_TEXT.format("Locking Failed!")) err.print(out.strip()) if not is_verbose: err.print(err) @@ -840,96 +829,88 @@ def venv_resolve_deps( os.environ.pop("PIPENV_SITE_DIR", None) if extra_pip_args: os.environ["PIPENV_EXTRA_PIP_ARGS"] = json.dumps(extra_pip_args) - with console.status( - f"Locking {pipfile_category}...", spinner=project.s.PIPENV_SPINNER - ) as st: - # This conversion is somewhat slow on local and file-type requirements since - # we now download those requirements / make temporary folders to perform - # dependency resolution on them, so we are including this step inside the - # spinner context manager for the UX improvement - st.console.print("Building requirements...") - deps = convert_deps_to_pip( - deps, project.pipfile_sources(), include_index=True + + # This conversion is somewhat slow on local and file-type requirements since + # we now download those requirements / make temporary folders to perform + # dependency resolution on them, so we are including this step inside the + # spinner context manager for the UX improvement + console.print("Building requirements...") + deps = convert_deps_to_pip(deps, project.pipfile_sources(), include_index=True) + # Useful for debugging and hitting breakpoints in the resolver + if project.s.PIPENV_RESOLVER_PARENT_PYTHON: + try: + results = resolver.resolve_packages( + pre, + clear, + project.s.is_verbose(), + system=allow_global, + write=False, + requirements_dir=req_dir, + packages=deps, + pipfile_category=pipfile_category, + constraints=deps, + ) + if results: + console.print(environments.PIPENV_SPINNER_OK_TEXT.format("Success!")) + except Exception: + console.print( + environments.PIPENV_SPINNER_FAIL_TEXT.format("Locking Failed!") + ) + raise # maybe sys.exit(1) here? + else: # Default/Production behavior is to use project python's resolver + cmd = [ + which("python", allow_global=allow_global), + Path(resolver.__file__.rstrip("co")).as_posix(), + ] + if pre: + cmd.append("--pre") + if clear: + cmd.append("--clear") + if allow_global: + cmd.append("--system") + if pipfile_category: + cmd.append("--category") + cmd.append(pipfile_category) + if project.s.is_verbose(): + cmd.append("--verbose") + target_file = tempfile.NamedTemporaryFile( + prefix="resolver", suffix=".json", delete=False ) - # Useful for debugging and hitting breakpoints in the resolver - if project.s.PIPENV_RESOLVER_PARENT_PYTHON: + target_file.close() + cmd.extend(["--write", make_posix(target_file.name)]) + + with tempfile.NamedTemporaryFile( + mode="w+", prefix="pipenv", suffix="constraints.txt", delete=False + ) as constraints_file: + for dep_name, pip_line in deps.items(): + constraints_file.write(f"{dep_name}, {pip_line}\n") + cmd.append("--constraints-file") + cmd.append(constraints_file.name) + console.print("Resolving dependencies...") + c = resolve(cmd, project=project) + if c.returncode == 0: try: - results = resolver.resolve_packages( - pre, - clear, - project.s.is_verbose(), - system=allow_global, - write=False, - requirements_dir=req_dir, - packages=deps, - pipfile_category=pipfile_category, - constraints=deps, - ) - if results: - st.console.print( - environments.PIPENV_SPINNER_OK_TEXT.format("Success!") - ) - except Exception: - st.console.print( - environments.PIPENV_SPINNER_FAIL_TEXT.format("Locking Failed!") - ) - raise # maybe sys.exit(1) here? - else: # Default/Production behavior is to use project python's resolver - cmd = [ - which("python", allow_global=allow_global), - Path(resolver.__file__.rstrip("co")).as_posix(), - ] - if pre: - cmd.append("--pre") - if clear: - cmd.append("--clear") - if allow_global: - cmd.append("--system") - if pipfile_category: - cmd.append("--category") - cmd.append(pipfile_category) - if project.s.is_verbose(): - cmd.append("--verbose") - target_file = tempfile.NamedTemporaryFile( - prefix="resolver", suffix=".json", delete=False - ) - target_file.close() - cmd.extend(["--write", make_posix(target_file.name)]) - - with tempfile.NamedTemporaryFile( - mode="w+", prefix="pipenv", suffix="constraints.txt", delete=False - ) as constraints_file: - for dep_name, pip_line in deps.items(): - constraints_file.write(f"{dep_name}, {pip_line}\n") - cmd.append("--constraints-file") - cmd.append(constraints_file.name) - st.console.print("Resolving dependencies...") - c = resolve(cmd, st, project=project) - if c.returncode == 0: - try: - with open(target_file.name) as fh: - results = json.load(fh) - except (IndexError, json.JSONDecodeError): - err.print(c.stdout.strip()) - err.print(c.stderr.strip()) - if os.path.exists(target_file.name): - os.unlink(target_file.name) - raise RuntimeError("There was a problem with locking.") + with open(target_file.name) as fh: + results = json.load(fh) + except (IndexError, json.JSONDecodeError): + err.print(c.stdout.strip()) + err.print(c.stderr.strip()) if os.path.exists(target_file.name): os.unlink(target_file.name) - st.console.print( - environments.PIPENV_SPINNER_OK_TEXT.format("Success!") + raise RuntimeError("There was a problem with locking.") + if os.path.exists(target_file.name): + os.unlink(target_file.name) + console.print(environments.PIPENV_SPINNER_OK_TEXT.format("Success!")) + if not project.s.is_verbose() and c.stderr.strip(): + err.print( + f"Warning: {c.stderr.strip()}", overflow="ignore", crop=False ) - if not project.s.is_verbose() and c.stderr.strip(): - err.print( - f"Warning: {c.stderr.strip()}", overflow="ignore", crop=False - ) - else: - st.console.print( - environments.PIPENV_SPINNER_FAIL_TEXT.format("Locking Failed!") - ) - err.print(f"Output: {c.stdout.strip()}") - err.print(f"Error: {c.stderr.strip()}") + else: + console.print( + environments.PIPENV_SPINNER_FAIL_TEXT.format("Locking Failed!") + ) + err.print(f"Output: {c.stdout.strip()}") + err.print(f"Error: {c.stderr.strip()}") if lockfile_category not in lockfile: lockfile[lockfile_category] = {} return prepare_lockfile( diff --git a/pipenv/utils/shell.py b/pipenv/utils/shell.py index da4811a546..11f05649c1 100644 --- a/pipenv/utils/shell.py +++ b/pipenv/utils/shell.py @@ -67,14 +67,17 @@ def looks_like_dir(path): return any(sep in path for sep in seps) +@lru_cache(maxsize=100) def load_path(python): import json from pathlib import Path python = Path(python).as_posix() + c = subprocess_run([python, "-c", "import json, sys; print(json.dumps(sys.path))"]) if c.returncode == 0: - return json.loads(c.stdout.strip()) + paths = json.loads(c.stdout.strip()) + return paths else: return [] diff --git a/pipenv/utils/toml.py b/pipenv/utils/toml.py index b97d32d3bd..1cc32f07b8 100644 --- a/pipenv/utils/toml.py +++ b/pipenv/utils/toml.py @@ -74,7 +74,11 @@ def convert_toml_table(section): return result is_tomlkit_parsed = isinstance(parsed, Container) - for section in project.get_package_categories(): + if project.pipfile_exists: + package_categories = project.get_package_categories() + else: + package_categories = ["packages", "dev-packages"] + for section in package_categories: table_data = parsed.get(section, {}) if table_data is None: continue diff --git a/tests/integration/test_install_basic.py b/tests/integration/test_install_basic.py index 37961ae712..97c8b0cade 100644 --- a/tests/integration/test_install_basic.py +++ b/tests/integration/test_install_basic.py @@ -402,7 +402,8 @@ def test_install_creates_pipfile(pipenv_instance_pypi): assert c.returncode == 0 assert os.path.isfile(p.pipfile_path) python_version = str(sys.version_info.major) + "." + str(sys.version_info.minor) - assert p.pipfile["requires"] == {"python_version": python_version} + python_full_version = str(sys.version_info.major) + "." + str(sys.version_info.minor) + "." + str(sys.version_info.micro) + assert p.pipfile["requires"] == {"python_version": python_version, "python_full_version": python_full_version} @pytest.mark.basic