diff --git a/Lib/__np__/common.py b/Lib/__np__/common.py index 3216bf5bbe6865..720536d7c6922b 100644 --- a/Lib/__np__/common.py +++ b/Lib/__np__/common.py @@ -220,6 +220,28 @@ def download_extract(url, destination): extract_archive(downloaded_file, destination) +def run(*args, **kwargs): + import subprocess + + stdin = kwargs.pop("stdin", None) + quiet = kwargs.pop("quiet", False) + assert not kwargs + + env = os.environ.copy() + # Don't use the pip path customization here. Just replicate our current path. + env["PYTHONPATH"] = os.pathsep.join([x for x in sys.path if not x.endswith(os.path.sep + "site")]) + + p = subprocess.Popen( + args, + universal_newlines=True, + stdin=stdin, + env=env, + ) + + p.wait() + if p.returncode != 0: + raise subprocess.CalledProcessError(p.returncode, args, output) + def run_with_output(*args, **kwargs): import subprocess @@ -314,7 +336,7 @@ def find_build_tool_exe(tool_name, exe): def run_build_tool_exe(tool_name, exe, *args, **kwargs): - return run_with_output(find_build_tool_exe(tool_name, exe), *args, **kwargs) + run(find_build_tool_exe(tool_name, exe), *args, **kwargs) def apply_patch(patch_file, directory): diff --git a/Lib/__np__/metabuild.py b/Lib/__np__/metabuild.py index d2091fb5829657..d94692aa3338b0 100644 --- a/Lib/__np__/metabuild.py +++ b/Lib/__np__/metabuild.py @@ -27,7 +27,7 @@ def build_wheel( with open(os.path.join("..", "script.json")) as f: metadata = json.load(f) - __np__.packaging.build_package(metadata['name'], metadata['version'], metadata['script_metadata'], wheel_directory) + return __np__.packaging.build_package(metadata['name'], metadata['version'], metadata['script_metadata'], wheel_directory) managed_build = ManagedBackend() diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 926bb7f6c0b857..e0d1a92a9bc546 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -11,7 +11,7 @@ __all__ = ["version", "bootstrap", "_PROJECTS", "_get_packages"] _PACKAGE_NAMES = ('setuptools', 'packaging', 'pip', 'wheel') _SETUPTOOLS_VERSION = "75.2.0.post20241019" -_PACKAGING_VERSION = "24.2.dev0" +_PACKAGING_VERSION = "24.2" _PIP_VERSION = "23.3.2" _WHEEL_VERSION = "0.42.0" _PROJECTS = [ diff --git a/Lib/ensurepip/_bundled/packaging-24.2.dev0-py3-none-any.whl b/Lib/ensurepip/_bundled/packaging-24.2-py3-none-any.whl similarity index 57% rename from Lib/ensurepip/_bundled/packaging-24.2.dev0-py3-none-any.whl rename to Lib/ensurepip/_bundled/packaging-24.2-py3-none-any.whl index 4521b5554e06b1..414d19024837a5 100644 Binary files a/Lib/ensurepip/_bundled/packaging-24.2.dev0-py3-none-any.whl and b/Lib/ensurepip/_bundled/packaging-24.2-py3-none-any.whl differ diff --git a/Lib/pip.py b/Lib/pip.py index e500f09f063856..8aed4ae398538e 100644 --- a/Lib/pip.py +++ b/Lib/pip.py @@ -68,7 +68,7 @@ def our_load_pyproject_toml(use_pep517, pyproject_toml, setup_py, req_name): has_setup = os.path.isfile(setup_py) # We will be taking over the build process. - if os.path.isfile(os.path.join(os.path.dirname(os.path.dirname(pyproject_toml)), "script.json")): + if not req_name.startswith("file://") and os.path.isfile(os.path.join(os.path.dirname(os.path.dirname(pyproject_toml)), "script.json")): with open(os.path.join(os.path.dirname(os.path.dirname(pyproject_toml)), "script.json"), 'r') as f: data = json.load(f) requires = [] @@ -82,7 +82,7 @@ def our_load_pyproject_toml(use_pep517, pyproject_toml, setup_py, req_name): return None return pip._internal.pyproject.BuildSystemDetails( [x for x in result.requires if re.split(r'[><=]', x, 1)[0] not in builtin_packages], - result.backend, result.check, [os.path.dirname(__file__), real_pip_dir] + result.backend_path) + result.backend, result.check, sys.path + result.backend_path) @@ -165,7 +165,7 @@ def get_runnable_pip() -> str: def our_get_requirements(self, args, options, finder, session): reqs = orig_get_requirements(self, args, options, finder, session) # This should prevent accidentally updating the pinned bundled packages. - return [x for x in reqs if x.req.name not in builtin_packages] + return [x for x in reqs if x.req is None or x.req.name not in builtin_packages] pip._internal.cli.req_command.RequirementCommand.get_requirements = our_get_requirements @@ -198,7 +198,10 @@ def install( orig_prepare_distribution = pip._internal.resolution.resolvelib.candidates.LinkCandidate._prepare_distribution def _prepare_distribution(self): - build_script = __np__.packaging.find_build_script_for_package(self.name, self.version.public) + try: + build_script = __np__.packaging.find_build_script_for_package(self.name, self.version.public) + except: + build_script = None if build_script is not None: with open(os.path.join(self._factory.preparer.build_dir, 'script.json'), 'w') as f: diff --git a/Lib/rebuildpython.py b/Lib/rebuildpython.py index ff18a9d51c1264..3ab67092e5bc88 100644 --- a/Lib/rebuildpython.py +++ b/Lib/rebuildpython.py @@ -90,6 +90,21 @@ def get_lib_hash(): return hashlib.sha256(hash_string.encode('ascii')).hexdigest() + +def is_lib_valid(path): + if os.path.isfile(path): + if __np__.getToolsInstallDir() in path: + # Disqualify libs from inside build tools. + return False + + with open(path, "rb") as f: + if f.read(7) == b"!": + return True + return False + else: + return True + + def run_rebuild(): try: with open(os.path.join(interpreter_prefix, "link.json"), 'r') as f: @@ -398,7 +413,7 @@ def run_rebuild(): if final_path not in final_lib_list: final_lib_list.append(final_path) - link_libs = final_lib_list + link_libs = [x for x in final_lib_list if is_lib_valid(x)] compiler.compile( ["python.c"], output_dir=build_dir, include_dirs=include_dirs, macros=macros