PK`ZZZw9�wmicropip/__init__.pyfrom .package_manager import PackageManager try: from ._version import __version__ except ImportError: pass _package_manager_singleton = PackageManager() install = _package_manager_singleton.install set_index_urls = _package_manager_singleton.set_index_urls list = _package_manager_singleton.list freeze = _package_manager_singleton.freeze add_mock_package = _package_manager_singleton.add_mock_package list_mock_packages = _package_manager_singleton.list_mock_packages remove_mock_package = _package_manager_singleton.remove_mock_package uninstall = _package_manager_singleton.uninstall __all__ = [ "install", "list", "freeze", "add_mock_package", "list_mock_packages", "remove_mock_package", "uninstall", "set_index_urls", "__version__", ] PK`ZZZ�t��micropip/_mock_package.pyimport importlib import importlib.abc import importlib.metadata import importlib.util import shutil import site import sys from collections.abc import Callable from pathlib import Path from textwrap import dedent MOCK_INSTALL_NAME_MEMORY = "micropip in-memory mock package" MOCK_INSTALL_NAME_PERSISTENT = "micropip mock package" class MockDistribution(importlib.metadata.Distribution): def __init__(self, file_dict, modules): self.file_dict = file_dict self.modules = modules def read_text(self, filename): """Attempt to load metadata file given by the name. :param filename: The name of the file in the distribution info. :return: The text if found, otherwise None. """ if filename in self.file_dict: return self.file_dict[filename] else: return None def locate_file(self, path): """ Given a path to a file in this distribution, return a path to it. """ return None _mock_modules: dict[str, str | Callable] = {} _mock_distributions: dict[str, MockDistribution] = {} class _MockModuleFinder(importlib.abc.MetaPathFinder, importlib.abc.Loader): def __init__(self): pass def find_distributions(self, context): if context.name in _mock_distributions: return [_mock_distributions[context.name]] elif context.name is None: return _mock_distributions.values() else: return [] def find_module(self, fullname, path=None): spec = self.find_spec(fullname, path) if spec is None: return None return spec def create_module(self, spec): if spec.name in _mock_modules: from types import ModuleType module = ModuleType(spec.name) module.__path__ = "/micropip_mocks/" + module.__name__.replace(".", "/") return module def exec_module(self, module): init_object = _mock_modules[module.__name__] if isinstance(init_object, str): # run module init code in the module exec(dedent(init_object), module.__dict__) elif callable(init_object): # run module init function init_object(module) def find_spec(self, fullname, path=None, target=None): if fullname not in _mock_modules.keys(): return None spec = importlib.util.spec_from_loader(fullname, self) return spec _finder = _MockModuleFinder() def _add_in_memory_distribution(name, metafiles, modules): if _finder not in sys.meta_path: sys.meta_path = [_finder] + sys.meta_path _mock_distributions[name] = MockDistribution(metafiles, modules) for name, obj in modules.items(): _add_mock_module(name, obj) def _add_mock_module(name, obj): _mock_modules[name] = obj def _remove_in_memory_distribution(name): if name in _mock_distributions: for module in _mock_distributions[name].modules.keys(): if module in sys.modules: del sys.modules[module] del _mock_modules[module] del _mock_distributions[name] def add_mock_package( name: str, version: str, *, modules: dict[str, str | None] | None = None, persistent: bool = False, ) -> None: if modules is None: # make a single mock module with this name modules = {name: ""} # make the metadata METADATA = f"""Metadata-Version: 1.1 Name: {name} Version: {version} Summary: {name} mock package generated by micropip Author-email: {name}@micro.pip.non-working-fake-host """ for module_name in modules.keys(): METADATA += f"Provides: {module_name}\n" if persistent: # make empty mock modules with the requested names in user site packages site_packages = Path(site.getsitepackages()[0]) # should exist already, but just in case site_packages.mkdir(parents=True, exist_ok=True) dist_dir = site_packages / f"{name}-{version}.dist-info" dist_dir.mkdir(parents=True, exist_ok=False) metadata_file = dist_dir / "METADATA" record_file = dist_dir / "RECORD" installer_file = dist_dir / "INSTALLER" file_list = [metadata_file, installer_file] metadata_file.write_text(METADATA) installer_file.write_text(MOCK_INSTALL_NAME_PERSISTENT) for module_name, content in modules.items(): if not content: content = "" content = dedent(content) path_parts = module_name.split(".") dir_path = Path(site_packages, *path_parts) dir_path.mkdir(exist_ok=True, parents=True) init_file = dir_path / "__init__.py" file_list.append(init_file) init_file.write_text(content) with open(record_file, "w") as f: for file in file_list: f.write(f"{file},,{file.stat().st_size}\n") f.write(f"{record_file},,\n") else: # make memory mocks of files INSTALLER = MOCK_INSTALL_NAME_MEMORY metafiles = {"METADATA": METADATA, "INSTALLER": INSTALLER} _add_in_memory_distribution(name, metafiles, modules) importlib.invalidate_caches() def list_mock_packages() -> list[str]: mock_packages = [ dist.name for dist in importlib.metadata.distributions() if dist.read_text("INSTALLER") in (MOCK_INSTALL_NAME_PERSISTENT, MOCK_INSTALL_NAME_MEMORY) ] return mock_packages def remove_mock_package(name: str) -> None: d = importlib.metadata.distribution(name) installer = d.read_text("INSTALLER") if installer == MOCK_INSTALL_NAME_MEMORY: _remove_in_memory_distribution(name) return elif installer is None or installer != MOCK_INSTALL_NAME_PERSISTENT: raise ValueError( f"Package {name} doesn't seem to be a micropip mock. \n" "Are you sure it was installed with micropip?" ) # a real mock package - kill it # remove all files folders: set[Path] = set() if d.files is not None: for file in d.files: p = Path(file.locate()) p.unlink() folders.add(p.parent) # delete all folders except site_packages # (that check is just to avoid killing # undesirable things in case of weird micropip errors) site_packages = Path(site.getsitepackages()[0]) for f in folders: if f != site_packages: shutil.rmtree(f) PK`ZZZI׸���micropip/_utils.pyimport functools import json from importlib.metadata import Distribution from pathlib import Path from sysconfig import get_config_var, get_platform from packaging.requirements import Requirement from packaging.tags import Tag from packaging.tags import sys_tags as sys_tags_orig from packaging.utils import BuildTag, InvalidWheelFilename, canonicalize_name from packaging.utils import parse_wheel_filename as parse_wheel_filename_orig from packaging.version import InvalidVersion, Version from ._compat import REPODATA_PACKAGES def get_dist_info(dist: Distribution) -> Path: """ Get the .dist-info directory of a distribution. """ return dist._path # type: ignore[attr-defined] def get_root(dist: Distribution) -> Path: """ Get the root directory where a package is installed. This is normally the site-packages directory. """ return get_dist_info(dist).parent def get_files_in_distribution(dist: Distribution) -> set[Path]: """ Get a list of files in a distribution, using the metadata. Parameters ---------- dist Distribution to get files from. Returns ------- A list of files in the distribution. """ root = get_root(dist) dist_info = get_dist_info(dist) files_to_remove = set() pkg_files = dist.files or [] metadata_files = dist_info.glob("*") for file in pkg_files: abspath = (root / file).resolve() files_to_remove.add(abspath) # Also add all files in the .dist-info directory. # Since micropip adds some extra files there, we need to remove them too. files_to_remove.update(metadata_files) return files_to_remove @functools.cache def sys_tags() -> tuple[Tag, ...]: new_tags = [] abi_version = get_config_var("PYODIDE_ABI_VERSION") pyodide_platform_tag = f"pyodide_{abi_version}_wasm32" for tag in sys_tags_orig(): if "emscripten" in tag.platform: new_tags.append(Tag(tag.interpreter, tag.abi, pyodide_platform_tag)) new_tags.append(tag) return tuple(new_tags) @functools.cache def parse_wheel_filename( filename: str, ) -> tuple[str, Version, BuildTag, frozenset[Tag]]: return parse_wheel_filename_orig(filename) # TODO: Move these helper functions back to WheelInfo def parse_version(filename: str) -> Version: return parse_wheel_filename(filename)[1] def parse_tags(filename: str) -> frozenset[Tag]: return parse_wheel_filename(filename)[3] def best_compatible_tag_index(tags: frozenset[Tag]) -> int | None: """Get the index of the first tag in ``packaging.tags.sys_tags()`` that a wheel has. Since ``packaging.tags.sys_tags()`` is sorted from most specific ("best") to most general ("worst") compatibility, this index douples as a priority rank: given two compatible wheels, the one whose best index is closer to zero should be installed. Parameters ---------- tags The tags to check. Returns ------- The index, or ``None`` if this wheel has no compatible tags. """ for index, tag in enumerate(sys_tags()): if tag in tags: return index return None def is_package_compatible(filename: str) -> bool: """ Check if a package is compatible with the current platform. Parameters ---------- filename Filename of the package to check. """ if not filename.endswith(".whl"): return False if filename.endswith("py3-none-any.whl"): return True try: tags = parse_tags(filename) except (InvalidVersion, InvalidWheelFilename): return False return best_compatible_tag_index(tags) is not None def check_compatible(filename: str) -> None: """ Check if a package is compatible with the current platform. If not, raise an exception with a error message that explains why. """ compatible = is_package_compatible(filename) if compatible: return # Not compatible, now we need to figure out why. try: tags = parse_tags(filename) except InvalidWheelFilename: raise ValueError(f"Wheel filename is invalid: {filename!r}") from None except InvalidVersion: raise ValueError(f"Wheel version is invalid: {filename!r}") from None tag: Tag = next(iter(tags)) if "emscripten" not in tag.platform: raise ValueError( f"Wheel platform '{tag.platform}' is not compatible with " f"Pyodide's platform '{get_platform()}'" ) def platform_to_version(platform: str) -> str: return ( platform.replace("-", "_") .removeprefix("emscripten_") .removesuffix("_wasm32") .replace("_", ".") ) wheel_emscripten_version = platform_to_version(tag.platform) pyodide_emscripten_version = platform_to_version(get_platform()) if wheel_emscripten_version != pyodide_emscripten_version: raise ValueError( f"Wheel was built with Emscripten v{wheel_emscripten_version} but " f"Pyodide was built with Emscripten v{pyodide_emscripten_version}" ) abi_incompatible = True from sys import version_info version = f"{version_info.major}{version_info.minor}" abis = ["abi3", f"cp{version}"] for tag in tags: if tag.abi in abis: abi_incompatible = False break if abi_incompatible: abis_string = ",".join({tag.abi for tag in tags}) raise ValueError( f"Wheel abi '{abis_string}' is not supported. Supported abis are 'abi3' and 'cp{version}'." ) raise ValueError(f"Wheel interpreter version '{tag.interpreter}' is not supported.") def fix_package_dependencies( package_name: str, *, extras: list[str | None] | None = None ) -> None: """Check and fix the list of dependencies for this package If you have manually installed a package and dependencies from wheels, the dependencies will not be correctly setup in the package list or the pyodide lockfile generated by freezing. This method checks if the dependencies are correctly set in the package list and will add missing dependencies. Parameters ---------- package_name (string): The name of the package to check. extras (list): List of extras for this package. """ if package_name in REPODATA_PACKAGES: # don't check things that are in original repository return dist = Distribution.from_name(package_name) package_requires = dist.requires if package_requires is None: # no dependencies - we're good to go return url = dist.read_text("PYODIDE_URL") # If it wasn't installed with micropip / pyodide, then we # can't do anything with it. if url is None: return # Get current list of pyodide requirements requires = dist.read_text("PYODIDE_REQUIRES") if requires: depends = json.loads(requires) else: depends = [] if extras is None: extras = [None] else: extras = extras + [None] for r in package_requires: req = Requirement(r) req_extras = req.extras req_marker = req.marker req_name = canonicalize_name(req.name) needs_requirement = False if req_marker is not None: for e in extras: if req_marker.evaluate(None if e is None else {"extra": e}): needs_requirement = True break else: needs_requirement = True if needs_requirement: fix_package_dependencies(req_name, extras=list(req_extras)) if req_name not in depends: depends.append(req_name) # write updated depends to PYODIDE_DEPENDS (get_dist_info(dist) / "PYODIDE_REQUIRES").write_text( json.dumps(sorted(x for x in depends)) ) PK`ZZZ�����micropip/_version.py# file generated by setuptools_scm # don't change, don't track in version control TYPE_CHECKING = False if TYPE_CHECKING: from typing import Tuple, Union VERSION_TUPLE = Tuple[Union[int, str], ...] else: VERSION_TUPLE = object version: str __version__: str __version_tuple__: VERSION_TUPLE version_tuple: VERSION_TUPLE __version__ = version = '0.8.0' __version_tuple__ = version_tuple = (0, 8, 0) PK`ZZZ��:��micropip/constants.pyFAQ_URLS = { "cant_find_wheel": "https://pyodide.org/en/stable/usage/faq.html#why-can-t-micropip-find-a-pure-python-wheel-for-a-package" } PK`ZZZӃ��micropip/freeze.pyimport importlib.metadata import itertools import json from collections.abc import Iterator from copy import deepcopy from typing import Any from packaging.utils import canonicalize_name from ._utils import fix_package_dependencies def freeze_lockfile( lockfile_packages: dict[str, dict[str, Any]], lockfile_info: dict[str, str] ) -> str: return json.dumps(freeze_data(lockfile_packages, lockfile_info)) def freeze_data( lockfile_packages: dict[str, dict[str, Any]], lockfile_info: dict[str, str] ) -> dict[str, Any]: pyodide_packages = deepcopy(lockfile_packages) pip_packages = load_pip_packages() package_items = itertools.chain(pyodide_packages.items(), pip_packages) # Sort packages = dict(sorted(package_items)) return { "info": lockfile_info, "packages": packages, } def load_pip_packages() -> Iterator[tuple[str, dict[str, Any]]]: return map( package_item, filter(is_valid, map(load_pip_package, importlib.metadata.distributions())), ) def package_item(entry: dict[str, Any]) -> tuple[str, dict[str, Any]]: return canonicalize_name(entry["name"]), entry def is_valid(entry: dict[str, Any]) -> bool: return entry["file_name"] is not None def load_pip_package(dist: importlib.metadata.Distribution) -> dict[str, Any]: name = dist.name version = dist.version url = dist.read_text("PYODIDE_URL") sha256 = dist.read_text("PYODIDE_SHA256") imports = (dist.read_text("top_level.txt") or "").split() requires = dist.read_text("PYODIDE_REQUIRES") if not requires: fix_package_dependencies(name) requires = dist.read_text("PYODIDE_REQUIRES") depends = json.loads(requires or "[]") return dict( name=name, version=version, file_name=url, install_dir="site", sha256=sha256, imports=imports, depends=depends, ) PK`ZZZ�҄U U micropip/install.pyimport asyncio import importlib from collections.abc import Coroutine from pathlib import Path from typing import Any from packaging.markers import default_environment from ._compat import loadPackage, to_js from .constants import FAQ_URLS from .logging import setup_logging from .transaction import Transaction async def install( requirements: str | list[str], index_urls: list[str] | str, keep_going: bool = False, deps: bool = True, credentials: str | None = None, pre: bool = False, *, verbose: bool | int | None = None, ) -> None: with setup_logging().ctx_level(verbose) as logger: ctx = default_environment() if isinstance(requirements, str): requirements = [requirements] fetch_kwargs = dict() if credentials: fetch_kwargs["credentials"] = credentials # Note: getsitepackages is not available in a virtual environment... # See https://github.com/pypa/virtualenv/issues/228 (issue is closed but # problem is not fixed) from site import getsitepackages wheel_base = Path(getsitepackages()[0]) transaction = Transaction( ctx=ctx, # type: ignore[arg-type] ctx_extras=[], keep_going=keep_going, deps=deps, pre=pre, fetch_kwargs=fetch_kwargs, verbose=verbose, index_urls=index_urls, ) await transaction.gather_requirements(requirements) if transaction.failed: failed_requirements = ", ".join([f"'{req}'" for req in transaction.failed]) raise ValueError( f"Can't find a pure Python 3 wheel for: {failed_requirements}\n" f"See: {FAQ_URLS['cant_find_wheel']}\n" ) package_names = [pkg.name for pkg in transaction.pyodide_packages] + [ pkg.name for pkg in transaction.wheels ] logger.debug( "Installing packages %r and wheels %r ", transaction.pyodide_packages, [w.filename for w in transaction.wheels], ) if package_names: logger.info("Installing collected packages: %s", ", ".join(package_names)) wheel_promises: list[Coroutine[Any, Any, None] | asyncio.Task[Any]] = [] # Install built-in packages pyodide_packages = transaction.pyodide_packages if len(pyodide_packages): # Note: branch never happens in out-of-browser testing because in # that case REPODATA_PACKAGES is empty. wheel_promises.append( asyncio.ensure_future( loadPackage(to_js([name for [name, _, _] in pyodide_packages])) ) ) # Now install PyPI packages for wheel in transaction.wheels: # detect whether the wheel metadata is from PyPI or from custom location # wheel metadata from PyPI has SHA256 checksum digest. wheel_promises.append(wheel.install(wheel_base)) await asyncio.gather(*wheel_promises) packages = [ f"{pkg.name}-{pkg.version}" for pkg in transaction.pyodide_packages ] + [f"{pkg.name}-{pkg.version}" for pkg in transaction.wheels] if packages: logger.info("Successfully installed %s", ", ".join(packages)) importlib.invalidate_caches() PK`ZZZ��Aɿ�micropip/list.pyimport importlib.metadata from typing import Any from ._compat import loadedPackages from .package import PackageDict, PackageMetadata def list_installed_packages( lockfile_packages: dict[str, dict[str, Any]] ) -> PackageDict: # Add packages that are loaded through pyodide.loadPackage packages = PackageDict() for dist in importlib.metadata.distributions(): name = dist.name version = dist.version source = dist.read_text("PYODIDE_SOURCE") if source is None: # source is None if PYODIDE_SOURCE does not exist. In this case the # wheel was installed manually, not via `pyodide.loadPackage` or # `micropip`. continue packages[name] = PackageMetadata( name=name, version=version, source=source, ) for name, pkg_source in loadedPackages.to_py().items(): if name in packages: continue if name in lockfile_packages: version = lockfile_packages[name]["version"] source_ = "pyodide" if pkg_source != "default channel": # Pyodide package loaded from a custom URL source_ = pkg_source else: # TODO: calculate version from wheel metadata version = "unknown" source_ = pkg_source packages[name] = PackageMetadata(name=name, version=version, source=source_) return packages PK`ZZZ�qmicropip/logging.pyimport logging import sys from collections.abc import Generator from contextlib import contextmanager from typing import Any _logger: logging.Logger | None = None _indentation: int = 0 @contextmanager def indent_log(num: int = 2) -> Generator[None, None, None]: """ A context manager which will cause the log output to be indented for any log messages emitted inside it. """ global _indentation _indentation += num try: yield finally: _indentation -= num # borrowed from pip._internal.utils.logging class IndentingFormatter(logging.Formatter): default_time_format = "%Y-%m-%dT%H:%M:%S" def __init__( self, *args: Any, add_timestamp: bool = False, **kwargs: Any, ) -> None: """ A logging.Formatter that obeys the indent_log() context manager. :param add_timestamp: A bool indicating output lines should be prefixed with their record's timestamp. """ self.add_timestamp = add_timestamp super().__init__(*args, **kwargs) def get_message_start(self, formatted: str, levelno: int) -> str: """ Return the start of the formatted log message (not counting the prefix to add to each line). """ if levelno < logging.WARNING: return "" if levelno < logging.ERROR: return "WARNING: " return "ERROR: " def format(self, record: logging.LogRecord) -> str: """ Calls the standard formatter, but will indent all of the log message lines by our current indentation level. """ global _indentation formatted = super().format(record) message_start = self.get_message_start(formatted, record.levelno) formatted = message_start + formatted prefix = "" if self.add_timestamp: prefix = f"{self.formatTime(record)} " prefix += " " * _indentation formatted = "".join([prefix + line for line in formatted.splitlines(True)]) return formatted def _set_formatter_once() -> None: global _logger if _logger is not None: return _logger = logging.getLogger("micropip") ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.NOTSET) ch.setFormatter(IndentingFormatter()) _logger.addHandler(ch) class LoggerWrapper: # need a default value because of __getattr__/__setattr__ logger: logging.Logger = None # type: ignore[assignment] def __init__(self, logger: logging.Logger): # Bypassing __setattr__ by setting attributes directly in __dict__ self.__dict__["logger"] = logger def __getattr__(self, attr): return getattr(self.logger, attr) def __setattr__(self, attr, value): return setattr(self.logger, attr, value) @contextmanager def ctx_level(self, verbosity: int | bool | None = None): cur_level = self.logger.level if verbosity is not None: if verbosity > 2: raise ValueError( "verbosity should be in 0,1,2, False, True, if you are " "directly setting level using logging.LEVEL, please " "directly call `setLevel` on the logger." ) elif verbosity >= 2: level_number = logging.DEBUG elif verbosity == 1: # True == 1 level_number = logging.INFO else: level_number = logging.WARNING self.logger.setLevel(level_number) try: yield self.logger finally: self.logger.setLevel(cur_level) def setup_logging() -> LoggerWrapper: _set_formatter_once() assert _logger return LoggerWrapper(_logger) # TODO: expose this somehow def set_log_level(verbosity: int | bool): if verbosity >= 2: level_number = logging.DEBUG elif verbosity == 1: # True == 1 level_number = logging.INFO else: level_number = logging.WARNING assert _logger _logger.setLevel(level_number) PK`ZZZ١�99micropip/metadata.py""" This is a stripped down version of pip._vendor.pkg_resources.DistInfoDistribution """ import re import zipfile from collections.abc import Iterable from pathlib import Path from packaging.requirements import Requirement from packaging.utils import canonicalize_name def safe_name(name): """Convert an arbitrary string to a standard distribution name Any runs of non-alphanumeric/. characters are replaced with a single '-'. """ return re.sub("[^A-Za-z0-9.]+", "-", name) def safe_extra(extra): """Convert an arbitrary string to a standard 'extra' name Any runs of non-alphanumeric characters are replaced with a single '_', and the result is always lowercased. """ return re.sub("[^A-Za-z0-9.-]+", "_", extra).lower() # Vendored from pip class UnsupportedWheel(Exception): """Unsupported wheel.""" def wheel_dist_info_dir(source: zipfile.ZipFile, name: str) -> str: """Returns the name of the contained .dist-info directory. Raises UnsupportedWheel if not found, >1 found, or it doesn't match the provided name. """ # Zip file path separators must be / subdirs = {p.split("/", 1)[0] for p in source.namelist()} info_dirs = [s for s in subdirs if s.endswith(".dist-info")] if not info_dirs: raise UnsupportedWheel(f".dist-info directory not found in wheel {name!r}") if len(info_dirs) > 1: raise UnsupportedWheel( "multiple .dist-info directories found in wheel {!r}: {}".format( name, ", ".join(info_dirs) ) ) info_dir = info_dirs[0] info_dir_name = canonicalize_name(info_dir) canonical_name = canonicalize_name(name) if not info_dir_name.startswith(canonical_name): raise UnsupportedWheel( f".dist-info directory {info_dir!r} does not start with {canonical_name!r}" ) return info_dir class Metadata: """ Represents a metadata file in a wheel """ PKG_INFO = "METADATA" REQUIRES_DIST = "Requires-Dist:" PROVIDES_EXTRA = "Provides-Extra:" def __init__(self, metadata: Path | zipfile.Path | bytes): self.metadata: list[str] = [] if isinstance(metadata, Path | zipfile.Path): self.metadata = metadata.read_text(encoding="utf-8").splitlines() elif isinstance(metadata, bytes): self.metadata = metadata.decode("utf-8").splitlines() self.deps = self._compute_dependencies() def _parse_requirement(self, line: str) -> Requirement: line = line[len(self.REQUIRES_DIST) :] if " #" in line: line = line[: line.find(" #")] return Requirement(line.strip()) def _compute_dependencies(self) -> dict[str | None, frozenset[Requirement]]: """ Compute the dependencies of the metadata file """ deps: dict[str | None, frozenset[Requirement]] = {} reqs: list[Requirement] = [] extras: list[str] = [] def reqs_for_extra(extra: str | None) -> Iterable[Requirement]: environment = {"extra": extra} if extra else None for req in reqs: if not req.marker or req.marker.evaluate(environment): yield req for line in self.metadata: if line.startswith(self.REQUIRES_DIST): reqs.append(self._parse_requirement(line)) elif line.startswith(self.PROVIDES_EXTRA): extras.append(line[len(self.PROVIDES_EXTRA) :].strip()) deps[None] = frozenset(reqs_for_extra(None)) for extra in extras: deps[safe_extra(extra)] = frozenset(reqs_for_extra(extra)) - deps[None] return deps def requires(self, extras: Iterable[str] = ()) -> list[Requirement]: """List of Requirements needed for this distro if `extras` are used""" deps: list[Requirement] = [] deps.extend(self.deps.get(None, ())) for ext in extras: try: deps.extend(self.deps[safe_extra(ext)]) except KeyError: raise KeyError(f"Unknown extra {ext!r}") from None return deps PK`ZZZ$����micropip/package.pyfrom collections import UserDict from collections.abc import Iterable from dataclasses import astuple, dataclass from typing import Any from packaging.utils import canonicalize_name __all__ = ["PackageDict"] def _format_table(headers: list[str], table: Iterable[Iterable[Any]]) -> str: """ Returns a minimal formatted table >>> print(_format_table(["Header1", "Header2"], [["val1", "val2"], ["val3", "val4"]])) Header1 | Header2 ------- | ------- val1 | val2 val3 | val4 """ def format_row(values, widths, filler=""): row = " | ".join( f"{x:{filler}<{w}}" for x, w in zip(values, widths, strict=True) ) return row.rstrip() col_width = [max(len(x) for x in col) for col in zip(headers, *table, strict=True)] rows = [] rows.append(format_row(headers, col_width)) rows.append(format_row([""] * len(col_width), col_width, filler="-")) for line in table: rows.append(format_row(line, col_width)) return "\n".join(rows) @dataclass class PackageMetadata: name: str version: str = "" source: str = "" def __iter__(self): return iter(astuple(self)) @staticmethod def keys(): return PackageMetadata.__dataclass_fields__.keys() class PackageDict(UserDict[str, PackageMetadata]): """ A dictionary that holds list of metadata on packages. This class is used in micropip to keep the list of installed packages. """ def __repr__(self) -> str: return self._tabularize() def __getitem__(self, key): normalized_key = canonicalize_name(key) return super().__getitem__(normalized_key) def __setitem__(self, key, val): normalized_key = canonicalize_name(key) return super().__setitem__(normalized_key, val) def __contains__(self, key: str) -> bool: # type: ignore[override] normalized_key = canonicalize_name(key) return super().__contains__(normalized_key) def _tabularize(self) -> str: headers = [key.capitalize() for key in PackageMetadata.keys()] table = list(self.values()) return _format_table(headers, table) PK`ZZZ ����,�,micropip/package_index.pyimport json import logging import string import sys from collections import defaultdict from collections.abc import Callable, Generator from dataclasses import dataclass from functools import partial from typing import Any from urllib.parse import urlparse, urlunparse from packaging.utils import InvalidWheelFilename from packaging.version import InvalidVersion, Version from ._compat import HttpStatusError, fetch_string_and_headers from ._utils import is_package_compatible, parse_version from .externals.mousebender.simple import from_project_details_html from .types import DistributionMetadata from .wheelinfo import WheelInfo PYPI = "PYPI" PYPI_URL = "https://pypi.org/simple" DEFAULT_INDEX_URLS = [PYPI_URL] _formatter = string.Formatter() logger = logging.getLogger("micropip") @dataclass class ProjectInfo: """ This class stores common metadata that can be obtained from different APIs (JSON, Simple) provided by PyPI. Responses received from PyPI or other package indexes that support the same APIs must be converted to this class before being processed by micropip. """ name: str # Name of the package # List of releases available for the package, sorted in ascending order by version. # For each version, list of wheels compatible with the current platform are stored. # If no such wheel is available, the list is empty. releases: dict[Version, Generator[WheelInfo, None, None]] @staticmethod def from_json_api(data: str | bytes | dict[str, Any]) -> "ProjectInfo": """ Parse JSON API response https://warehouse.pypa.io/api-reference/json.html """ data_dict = json.loads(data) if isinstance(data, str | bytes) else data name: str = data_dict.get("info", {}).get("name", "UNKNOWN") releases_raw: dict[str, list[Any]] = data_dict["releases"] # Filter out non PEP 440 compliant versions releases: dict[Version, list[Any]] = {} for version_str, fileinfo in releases_raw.items(): version, ok = _is_valid_pep440_version(version_str) if not ok or not version: continue # Skip empty releases if not fileinfo: continue releases[version] = fileinfo return ProjectInfo._compatible_only(name, releases) @staticmethod def from_simple_json_api(data: str | bytes | dict[str, Any]) -> "ProjectInfo": """ Parse Simple JSON API response https://peps.python.org/pep-0691/ """ data_dict = json.loads(data) if isinstance(data, str | bytes) else data name, releases = ProjectInfo._parse_pep691_response( data_dict, index_base_url="" ) return ProjectInfo._compatible_only(name, releases) @staticmethod def from_simple_html_api( data: str, pkgname: str, index_base_url: str ) -> "ProjectInfo": """ Parse Simple HTML API response https://peps.python.org/pep-0503 """ project_detail = from_project_details_html(data, pkgname) name, releases = ProjectInfo._parse_pep691_response(project_detail, index_base_url) # type: ignore[arg-type] return ProjectInfo._compatible_only(name, releases) @staticmethod def _parse_pep691_response( resp: dict[str, Any], index_base_url: str ) -> tuple[str, dict[Version, list[Any]]]: name = resp["name"] # List of versions (PEP 700), this key is not critical to find packages # but it is required to ensure that the same class instance is returned # from JSON and Simple JSON APIs. # Note that Simple HTML API does not have this key. versions = resp.get("versions", []) # Group files by version releases: dict[Version, list[Any]] = defaultdict(list) for version_str in versions: version, ok = _is_valid_pep440_version(version_str) if not ok or not version: continue releases[version] = [] for file in resp["files"]: filename = file["filename"] if not _fast_check_incompatibility(filename): # parsing a wheel filename is expensive, so we do a quick check first continue try: version = parse_version(filename) except (InvalidVersion, InvalidWheelFilename): continue if file["url"].startswith("/"): file["url"] = index_base_url + file["url"] releases[version].append(file) return name, releases @classmethod def _compatible_wheels( cls, files: list[dict[str, Any]], version: Version, name: str ) -> Generator[WheelInfo, None, None]: for file in files: filename = file["filename"] # Checking compatibility takes a bit of time, # so we use a generator to avoid doing it for all files. compatible = is_package_compatible(filename) if not compatible: continue # JSON API has a "digests" key, while Simple API has a "hashes" key. hashes = file["digests"] if "digests" in file else file["hashes"] sha256 = hashes.get("sha256") # Check if the metadata file is available (PEP 658 / PEP-714) core_metadata: DistributionMetadata = file.get("core-metadata") or file.get( "data-dist-info-metadata" ) # Size of the file in bytes, if available (PEP 700) # This key is not available in the Simple API HTML response, so this field may be None size = file.get("size") yield WheelInfo.from_package_index( name=name, filename=filename, url=file["url"], version=version, sha256=sha256, size=size, core_metadata=core_metadata, ) @classmethod def _compatible_only( cls, name: str, releases: dict[Version, list[dict[str, Any]]] ) -> "ProjectInfo": """ Return a generator of wheels compatible with the current platform. Checking compatibility takes a bit of time, so we use a generator to avoid doing it if not needed. """ releases_compatible = { version: cls._compatible_wheels(files, version, name=name) for version, files in releases.items() } # Unfortunately, the JSON API seems to compare versions as strings... # For example, pytest 3.10.0 is considered newer than 3.2.0. # So we need to sort the releases by version again here. releases_compatible = dict(sorted(releases_compatible.items())) return cls( name=name, releases=releases_compatible, ) def _is_valid_pep440_version(version_str: str) -> tuple[Version | None, bool]: """ Check if the given string is a valid PEP 440 version. Since parsing a version is expensive, we return the parsed version as well, so that it can be reused if needed. """ try: version = Version(version_str) return version, True except InvalidVersion: return None, False def _fast_check_incompatibility(filename: str) -> bool: """ This function returns True is the package is incompatible with the current platform. It can be used to quickly filter out incompatible packages before running heavy checks. Note that this function may return False for some packages that are actually incompatible. So it should only be used as a quick check. """ if not filename.endswith(".whl"): return False if filename.endswith("wasm32.whl") and sys.platform == "emscripten": return True if sys.platform not in filename and not filename.endswith("-none-any.whl"): return False return True def _contain_placeholder(url: str, placeholder: str = "package_name") -> bool: fields = [parsed[1] for parsed in _formatter.parse(url)] return placeholder in fields def _select_parser( content_type: str, pkgname: str, index_base_url: str ) -> Callable[[str], ProjectInfo]: """ Select the function to parse the response based on the content type. """ match content_type: case "application/vnd.pypi.simple.v1+json": return ProjectInfo.from_simple_json_api case "application/json": return ProjectInfo.from_json_api case ( "application/vnd.pypi.simple.v1+html" | "text/html" | "text/html; charset=utf-8" ): return partial( ProjectInfo.from_simple_html_api, pkgname=pkgname, index_base_url=index_base_url, ) case _: raise ValueError(f"Unsupported content type: {content_type}") async def query_package( name: str, index_urls: list[str] | str, fetch_kwargs: dict[str, Any] | None = None, ) -> ProjectInfo: """ Query for a package from package indexes. Parameters ---------- name Name of the package to search for. index_urls A list of URLs or a single URL to use as the package index. If a list of URLs is provided, it will be tried in order until it finds a package. If no package is found, an error will be raised. fetch_kwargs Keyword arguments to pass to the fetch function. """ _fetch_kwargs = fetch_kwargs.copy() if fetch_kwargs else {} if "headers" not in _fetch_kwargs: _fetch_kwargs["headers"] = {} # If not specified, prefer Simple JSON API over Simple HTML API or JSON API _fetch_kwargs["headers"].setdefault( "accept", "application/vnd.pypi.simple.v1+json, */*;q=0.01" ) if isinstance(index_urls, str): index_urls = [index_urls] index_urls = [PYPI_URL if url == PYPI else url for url in index_urls] for url in index_urls: logger.debug("Looping through index urls: %r", url) if _contain_placeholder(url): url = url.format(package_name=name) logger.debug("Formatting url with package name : %r", url) else: url = f"{url}/{name}/" logger.debug("Url has no placeholder, appending package name : %r", url) try: metadata, headers = await fetch_string_and_headers(url, _fetch_kwargs) except HttpStatusError as e: if e.status_code == 404: logger.debug("NotFound (404) for %r, trying next index.", url) continue logger.debug( "Error fetching %r (%s), trying next index.", url, e.status_code ) raise content_type = headers.get("content-type", "").lower() try: base_url = urlunparse(urlparse(url)._replace(path="")) parser = _select_parser(content_type, name, index_base_url=base_url) except ValueError as e: raise ValueError(f"Error trying to decode url: {url}") from e return parser(metadata) else: raise ValueError( f"Can't fetch metadata for '{name}'. " "Please make sure you have entered a correct package name " "and correctly specified index_urls (if you changed them)." ) PK`ZZZa�ٝ+�+micropip/package_manager.pyimport builtins from typing import ( # noqa: UP035 List import is necessary due to the `list` method Any, List, ) from . import _mock_package, package_index from ._compat import REPODATA_INFO, REPODATA_PACKAGES from .freeze import freeze_lockfile from .install import install from .list import list_installed_packages from .package import PackageDict from .uninstall import uninstall class PackageManager: """ PackageManager provides an extensible interface for customizing micropip's behavior. Each Manager instance holds its own local state that is independent of other instances. """ def __init__(self) -> None: self.index_urls = package_index.DEFAULT_INDEX_URLS[:] self.repodata_packages: dict[str, dict[str, Any]] = REPODATA_PACKAGES self.repodata_info: dict[str, str] = REPODATA_INFO pass async def install( self, requirements: str | list[str], keep_going: bool = False, deps: bool = True, credentials: str | None = None, pre: bool = False, index_urls: list[str] | str | None = None, *, verbose: bool | int | None = None, ): """Install the given package and all of its dependencies. If a package is not found in the Pyodide repository it will be loaded from PyPI. Micropip can only load pure Python wheels or wasm32/emscripten wheels built by Pyodide. When used in web browsers, downloads from PyPI will be cached. When run in Node.js, packages are currently not cached, and will be re-downloaded each time ``micropip.install`` is run. Parameters ---------- requirements : A requirement or list of requirements to install. Each requirement is a string, which should be either a package name or a wheel URI: - If the requirement does not end in ``.whl``, it will be interpreted as a package name. A package with this name must either be present in the Pyodide lock file or on PyPI. - If the requirement ends in ``.whl``, it is a wheel URI. The part of the requirement after the last ``/`` must be a valid wheel name in compliance with the `PEP 427 naming convention <https://www.python.org/dev/peps/pep-0427/#file-format>`_. - If a wheel URI starts with ``emfs:``, it will be interpreted as a path in the Emscripten file system (Pyodide's file system). E.g., ``emfs:../relative/path/wheel.whl`` or ``emfs:/absolute/path/wheel.whl``. In this case, only .whl files are supported. - If a wheel URI requirement starts with ``http:`` or ``https:`` it will be interpreted as a URL. - In node, you can access the native file system using a URI that starts with ``file:``. In the browser this will not work. keep_going : This parameter decides the behavior of the micropip when it encounters a Python package without a pure Python wheel while doing dependency resolution: - If ``False``, an error will be raised on first package with a missing wheel. - If ``True``, the micropip will keep going after the first error, and report a list of errors at the end. deps : If ``True``, install dependencies specified in METADATA file for each package. Otherwise do not install dependencies. credentials : This parameter specifies the value of ``credentials`` when calling the `fetch() <https://developer.mozilla.org/en-US/docs/Web/API/fetch>`__ function which is used to download the package. When not specified, ``fetch()`` is called without ``credentials``. pre : If ``True``, include pre-release and development versions. By default, micropip only finds stable versions. index_urls : A list of URLs or a single URL to use as the package index when looking up packages. If None, *https://pypi.org/pypi/{package_name}/json* is used. - The index URL should support the `JSON API <https://warehouse.pypa.io/api-reference/json/>`__ . - The index URL may contain the placeholder {package_name} which will be replaced with the package name when looking up a package. If it does not contain the placeholder, the package name will be appended to the URL. - If a list of URLs is provided, micropip will try each URL in order until it finds a package. If no package is found, an error will be raised. verbose : Print more information about the process. By default, micropip does not change logger level. Setting ``verbose=True`` will print similar information as pip. """ if index_urls is None: index_urls = self.index_urls return await install( requirements, index_urls, keep_going, deps, credentials, pre, verbose=verbose, ) def list(self) -> PackageDict: """Get the dictionary of installed packages. Returns ------- ``PackageDict`` A dictionary of installed packages. >>> import micropip >>> await micropip.install('regex') # doctest: +SKIP >>> package_list = micropip.list() >>> print(package_list) # doctest: +SKIP Name | Version | Source ----------------- | -------- | ------- regex | 2021.7.6 | pyodide >>> "regex" in package_list # doctest: +SKIP True """ return list_installed_packages(self.repodata_packages) def freeze(self) -> str: """Produce a json string which can be used as the contents of the ``pyodide-lock.json`` lock file. If you later load Pyodide with this lock file, you can use :js:func:`pyodide.loadPackage` to load packages that were loaded with :py:mod:`micropip` this time. Loading packages with :js:func:`~pyodide.loadPackage` is much faster and you will always get consistent versions of all your dependencies. You can use your custom lock file by passing an appropriate url to the ``lockFileURL`` of :js:func:`~globalThis.loadPyodide`. """ return freeze_lockfile(self.repodata_packages, self.repodata_info) def add_mock_package( self, name: str, version: str, *, modules: dict[str, str | None] | None = None, persistent: bool = False, ): """ Add a mock version of a package to the package dictionary. This means that if it is a dependency, it is skipped on install. By default a single empty module is installed with the same name as the package. You can alternatively give one or more modules to make a set of named modules. The modules parameter is usually a dictionary mapping module name to module text. .. code-block:: python { "mylovely_module":''' def module_method(an_argument): print("This becomes a module level argument") module_value = "this value becomes a module level variable" print("This is run on import of module") ''' } If you are adding the module in non-persistent mode, you can also pass functions which are used to initialize the module on loading (as in `importlib.abc.loader.exec_module` ). This allows you to do things like use `unittest.mock.MagicMock` classes for modules. .. code-block:: python def init_fn(module): module.dict["WOO"]="hello" print("Initing the module now!") ... { "mylovely_module": init_fn } Parameters ---------- name : Package name to add version : Version of the package. This should be a semantic version string, e.g. 1.2.3 modules : Dictionary of module_name:string pairs. The string contains the source of the mock module or is blank for an empty module. persistent : If this is True, modules will be written to the file system, so they persist between runs of python (assuming the file system persists). If it is False, modules will be stored inside micropip in memory only. """ return _mock_package.add_mock_package( name, version, modules=modules, persistent=persistent ) def list_mock_packages(self): """ List all mock packages currently installed. """ return _mock_package.list_mock_packages() def remove_mock_package(self, name: str): """ Remove a mock package. """ return _mock_package.remove_mock_package(name) def uninstall( self, packages: str | builtins.list[str], *, verbose: bool | int = False ) -> None: """Uninstall the given packages. This function only supports uninstalling packages that are installed using a wheel file, i.e. packages that have distribution metadata. It is possible to reinstall a package after uninstalling it, but note that modules / functions that are already imported will not be automatically removed from the namespace. So make sure to reload the module after reinstalling by e.g. running `importlib.reload(module)`. Parameters ---------- packages Packages to uninstall. verbose Print more information about the process. By default, micropip is silent. Setting ``verbose=True`` will print similar information as pip. """ return uninstall(packages, verbose=verbose) def set_index_urls(self, urls: List[str] | str): # noqa: UP006 """ Set the index URLs to use when looking up packages. - The index URL should support the `JSON API <https://warehouse.pypa.io/api-reference/json/>`__ . - The index URL may contain the placeholder {package_name} which will be replaced with the package name when looking up a package. If it does not contain the placeholder, the package name will be appended to the URL. - If a list of URLs is provided, micropip will try each URL in order until it finds a package. If no package is found, an error will be raised. Parameters ---------- urls A list of URLs or a single URL to use as the package index. """ if isinstance(urls, str): urls = [urls] self.index_urls = urls[:] PK`ZZZmicropip/py.typedPK`ZZZ~;��,�,micropip/transaction.pyimport asyncio import importlib.metadata import logging import warnings from dataclasses import dataclass, field from importlib.metadata import PackageNotFoundError from urllib.parse import urlparse from packaging.requirements import Requirement from packaging.utils import canonicalize_name from . import package_index from ._compat import REPODATA_PACKAGES from ._utils import best_compatible_tag_index, check_compatible from .constants import FAQ_URLS from .package import PackageMetadata from .package_index import ProjectInfo from .wheelinfo import WheelInfo logger = logging.getLogger("micropip") @dataclass class Transaction: ctx: dict[str, str] ctx_extras: list[dict[str, str]] keep_going: bool deps: bool pre: bool fetch_kwargs: dict[str, str] index_urls: list[str] | str locked: dict[str, PackageMetadata] = field(default_factory=dict) wheels: list[WheelInfo] = field(default_factory=list) pyodide_packages: list[PackageMetadata] = field(default_factory=list) failed: list[Requirement] = field(default_factory=list) verbose: bool | int | None = None def __post_init__(self): # If index_urls is None, pyodide-lock.json have to be searched first. # TODO: when PyPI starts to support hosting WASM wheels, this might be deleted. self.search_pyodide_lock_first = ( self.index_urls == package_index.DEFAULT_INDEX_URLS ) async def gather_requirements( self, requirements: list[str] | list[Requirement], ) -> None: requirement_promises = [] for requirement in requirements: requirement_promises.append(self.add_requirement(requirement)) await asyncio.gather(*requirement_promises) async def add_requirement(self, req: str | Requirement) -> None: if isinstance(req, Requirement): return await self.add_requirement_inner(req) if not urlparse(req).path.endswith(".whl"): return await self.add_requirement_inner(Requirement(req)) # custom download location wheel = WheelInfo.from_url(req) check_compatible(wheel.filename) await self.add_wheel(wheel, extras=set(), specifier="") def check_version_satisfied(self, req: Requirement) -> tuple[bool, str]: ver = None try: ver = importlib.metadata.version(req.name) except PackageNotFoundError: pass if req.name in self.locked: ver = self.locked[req.name].version if not ver: return False, "" if req.specifier.contains(ver, prereleases=True): # installed version matches, nothing to do return True, ver raise ValueError( f"Requested '{req}', " f"but {req.name}=={ver} is already installed" ) async def add_requirement_inner( self, req: Requirement, ) -> None: """Add a requirement to the transaction. See PEP 508 for a description of the requirements. https://www.python.org/dev/peps/pep-0508 """ for e in req.extras: self.ctx_extras.append({"extra": e}) if self.pre: req.specifier.prereleases = True if req.marker: # handle environment markers # https://www.python.org/dev/peps/pep-0508/#environment-markers # For a requirement being installed as part of an optional feature # via the extra specifier, the evaluation of the marker requires # the extra key in self.ctx to have the value specified in the # primary requirement. # The req.extras attribute is only set for the primary requirement # and hence has to be available during the evaluation of the # dependencies. Thus, we use the self.ctx_extras attribute above to # store all the extra values we come across during the transaction and # attempt the marker evaluation for all of these values. If any of the # evaluations return true we include the dependency. def eval_marker(e: dict[str, str]) -> bool: self.ctx.update(e) # need the assertion here to make mypy happy: # https://github.com/python/mypy/issues/4805 assert req.marker is not None return req.marker.evaluate(self.ctx) self.ctx.update({"extra": ""}) # The current package may have been brought into the transaction # without any of the optional requirement specification, but has # another marker, such as implementation_name. In this scenario, # self.ctx_extras is empty and hence the eval_marker() function # will not be called at all. if not req.marker.evaluate(self.ctx) and not any( [eval_marker(e) for e in self.ctx_extras] ): return # Is some version of this package is already installed? req.name = canonicalize_name(req.name) satisfied, ver = self.check_version_satisfied(req) if satisfied: logger.info("Requirement already satisfied: %s (%s)", req, ver) return try: if self.search_pyodide_lock_first: if self._add_requirement_from_pyodide_lock(req): logger.debug("Transaction: package found in lock file: %r", req) return await self._add_requirement_from_package_index(req) else: try: await self._add_requirement_from_package_index(req) except ValueError: logger.debug( "Transaction: package %r not found in index, will search lock file", req, ) # If the requirement is not found in package index, # we still have a chance to find it from pyodide lockfile. if not self._add_requirement_from_pyodide_lock(req): logger.debug( "Transaction: package %r not found in lock file", req ) raise except ValueError: self.failed.append(req) if not self.keep_going: raise def _add_requirement_from_pyodide_lock(self, req: Requirement) -> bool: """ Find requirement from pyodide-lock.json. If the requirement is found, add it to the package list and return True. Otherwise, return False. """ if req.name in REPODATA_PACKAGES and req.specifier.contains( REPODATA_PACKAGES[req.name]["version"], prereleases=True ): version = REPODATA_PACKAGES[req.name]["version"] self.pyodide_packages.append( PackageMetadata(name=req.name, version=str(version), source="pyodide") ) return True return False async def _add_requirement_from_package_index(self, req: Requirement): """ Find requirement from package index. If the requirement is found, add it to the package list and return True. Otherwise, return False. """ metadata = await package_index.query_package( req.name, self.index_urls, self.fetch_kwargs, ) logger.debug("Transaction: got metadata %r for requirement %r", metadata, req) wheel = find_wheel(metadata, req) logger.debug("Transaction: Selected wheel: %r", wheel) # Maybe while we were downloading pypi_json some other branch # installed the wheel? satisfied, ver = self.check_version_satisfied(req) if satisfied: logger.info("Requirement already satisfied: %s (%s)", req, ver) await self.add_wheel(wheel, req.extras, specifier=str(req.specifier)) async def add_wheel( self, wheel: WheelInfo, extras: set[str], *, specifier: str = "", ) -> None: """ Download a wheel, and add its dependencies to the transaction. Parameters ---------- wheel The wheel to add. extras Markers for optional dependencies. For example, `micropip.install("pkg[test]")` will pass `{"test"}` as the extras argument. specifier Requirement specifier, used only for logging. For example, `micropip.install("pkg>=1.0.0,!=2.0.0")` will pass `>=1.0.0,!=2.0.0` as the specifier argument. """ normalized_name = canonicalize_name(wheel.name) self.locked[normalized_name] = PackageMetadata( name=wheel.name, version=str(wheel.version), ) logger.info("Collecting %s%s", wheel.name, specifier) logger.info(" Downloading %s", wheel.url.split("/")[-1]) wheel_download_task = asyncio.create_task(wheel.download(self.fetch_kwargs)) if self.deps: # Case 1) If metadata file is available, # we can gather requirements without waiting for the wheel to be downloaded. if wheel.pep658_metadata_available(): try: await wheel.download_pep658_metadata(self.fetch_kwargs) except OSError: # If something goes wrong while downloading the metadata, # we have to wait for the wheel to be downloaded. await wheel_download_task await asyncio.gather( self.gather_requirements(wheel.requires(extras)), wheel_download_task, ) # Case 2) If metadata file is not available, # we have to wait for the wheel to be downloaded. else: await wheel_download_task await self.gather_requirements(wheel.requires(extras)) self.wheels.append(wheel) def find_wheel(metadata: ProjectInfo, req: Requirement) -> WheelInfo: """Parse metadata to find the latest version of pure python wheel. Parameters ---------- metadata : ProjectInfo req : Requirement Returns ------- wheel : WheelInfo """ releases = metadata.releases candidate_versions = sorted( req.specifier.filter(releases), reverse=True, ) for ver in candidate_versions: if ver not in releases: warnings.warn( f"The package '{metadata.name}' contains an invalid version: '{ver}'. This version will be skipped", stacklevel=1, ) continue best_wheel = None best_tag_index = float("infinity") wheels = releases[ver] for wheel in wheels: tag_index = best_compatible_tag_index(wheel.tags) if tag_index is not None and tag_index < best_tag_index: best_wheel = wheel best_tag_index = tag_index if best_wheel is not None: return wheel raise ValueError( f"Can't find a pure Python 3 wheel for '{req}'.\n" f"See: {FAQ_URLS['cant_find_wheel']}\n" "You can use `await micropip.install(..., keep_going=True)` " "to get a list of all packages with missing wheels." ) PK`ZZZ-L9��micropip/types.py# Distribution Metadata type (PEP 658) # None = metadata not available # bool = metadata available, but no checksum # dict[str, str] = metadata available with checksum DistributionMetadata = bool | dict[str, str] | None PK`ZZZ)�b�  micropip/uninstall.pyimport importlib import importlib.metadata from importlib.metadata import Distribution from ._compat import loadedPackages from ._utils import get_files_in_distribution, get_root from .logging import setup_logging def uninstall(packages: str | list[str], *, verbose: bool | int = False) -> None: with setup_logging().ctx_level(verbose) as logger: if isinstance(packages, str): packages = [packages] distributions: list[Distribution] = [] for package in packages: try: dist = importlib.metadata.distribution(package) distributions.append(dist) except importlib.metadata.PackageNotFoundError: logger.warning("Skipping '%s' as it is not installed.", package) for dist in distributions: # Note: this value needs to be retrieved before removing files, as # dist.name uses metadata file to get the name name = dist.name version = dist.version logger.info("Found existing installation: %s %s", name, version) root = get_root(dist) files = get_files_in_distribution(dist) directories = set() for file in files: if not file.is_file(): if not file.is_relative_to(root): # This file is not in the site-packages directory. Probably one of: # - data_files # - scripts # - entry_points # Since we don't support these, we can ignore them (except for data_files (TODO)) logger.warning( "skipping file '%s' that is relative to root", ) continue # see PR 130, it is likely that this is never triggered since Python 3.12 # as non existing files are not listed by get_files_in_distribution anymore. logger.warning( "A file '%s' listed in the metadata of '%s' does not exist.", file, name, ) continue file.unlink() if file.parent != root: directories.add(file.parent) # Remove directories in reverse hierarchical order for directory in sorted( directories, key=lambda x: len(x.parts), reverse=True ): try: directory.rmdir() except OSError: logger.warning( "A directory '%s' is not empty after uninstallation of '%s'. " "This might cause problems when installing a new version of the package. ", directory, name, ) if hasattr(loadedPackages, name): delattr(loadedPackages, name) else: # This should not happen, but just in case logger.warning("a package '%s' was not found in loadedPackages.", name) logger.info("Successfully uninstalled %s-%s", name, version) importlib.invalidate_caches() PK`ZZZKo�#�#micropip/wheelinfo.pyimport hashlib import io import json import zipfile from dataclasses import dataclass, field from pathlib import Path from typing import Any, Literal from urllib.parse import ParseResult, urlparse from packaging.requirements import Requirement from packaging.tags import Tag from packaging.version import Version from ._compat import ( fetch_bytes, get_dynlibs, loadDynlibsFromPackage, loadedPackages, ) from ._utils import parse_wheel_filename from .metadata import Metadata, safe_name, wheel_dist_info_dir from .types import DistributionMetadata @dataclass class PackageData: file_name: str package_type: Literal["shared_library", "package"] shared_library: bool @dataclass class WheelInfo: """ WheelInfo represents a wheel file and its metadata (e.g. URL and hash) """ name: str version: Version filename: str build: tuple[int, str] | tuple[()] tags: frozenset[Tag] url: str parsed_url: ParseResult sha256: str | None = None size: int | None = None # Size in bytes, if available (PEP 700) core_metadata: DistributionMetadata = None # Wheel's metadata (PEP 658 / PEP-714) # Fields below are only available after downloading the wheel, i.e. after calling `download()`. _data: bytes | None = field(default=None, repr=False) # Wheel file contents. _metadata: Metadata | None = None # Wheel metadata. _requires: list[Requirement] | None = None # List of requirements. # Path to the .dist-info directory. This is only available after extracting the wheel, i.e. after calling `extract()`. _dist_info: Path | None = None def __post_init__(self): assert ( self.url.startwith(p) for p in ("http:", "https:", "emfs:", "file:") ), self.url self._project_name = safe_name(self.name) self.metadata_url = self.url + ".metadata" @classmethod def from_url(cls, url: str) -> "WheelInfo": """Parse wheels URL and extract available metadata See https://www.python.org/dev/peps/pep-0427/#file-name-convention """ parsed_url = urlparse(url) if parsed_url.scheme == "": url = "file:///" + url file_name = Path(parsed_url.path).name name, version, build, tags = parse_wheel_filename(file_name) return WheelInfo( name=name, version=version, filename=file_name, build=build, tags=tags, url=url, parsed_url=parsed_url, ) @classmethod def from_package_index( cls, name: str, filename: str, url: str, version: Version, sha256: str | None, size: int | None, core_metadata: DistributionMetadata = None, ) -> "WheelInfo": """Extract available metadata from response received from package index""" parsed_url = urlparse(url) _, _, build, tags = parse_wheel_filename(filename) return WheelInfo( name=name, version=version, filename=filename, build=build, tags=tags, url=url, parsed_url=parsed_url, sha256=sha256, size=size, core_metadata=core_metadata, ) async def install(self, target: Path) -> None: """ Install the wheel to the target directory. The installation process is as follows: 0. A wheel needs to be downloaded before it can be installed. This is done by calling `download()`. 1. The wheel is validated by comparing its hash with the one provided by the package index. 2. The wheel is extracted to the target directory. 3. The wheel's shared libraries are loaded. 4. The wheel's metadata is set. """ if not self._data: raise RuntimeError( "Micropip internal error: attempted to install wheel before downloading it?" ) _validate_sha256_checksum(self._data, self.sha256) self._extract(target) await self._load_libraries(target) self._set_installer() async def download(self, fetch_kwargs: dict[str, Any]): if self._data is not None: return self._data = await self._fetch_bytes(self.url, fetch_kwargs) # The wheel's metadata might be downloaded separately from the wheel itself. # If it is not downloaded yet or if the metadata is not available, extract it from the wheel. if self._metadata is None: with zipfile.ZipFile(io.BytesIO(self._data)) as zf: metadata_path = ( Path(wheel_dist_info_dir(zf, self.name)) / Metadata.PKG_INFO ) self._metadata = Metadata(zipfile.Path(zf, str(metadata_path))) def pep658_metadata_available(self) -> bool: """ Check if the wheel's metadata is exposed via PEP 658. """ return self.core_metadata is not None async def download_pep658_metadata( self, fetch_kwargs: dict[str, Any], ) -> None: """ Download the wheel's metadata. If the metadata is not available, return None. """ if self.core_metadata is None: return None data = await self._fetch_bytes(self.metadata_url, fetch_kwargs) match self.core_metadata: case {"sha256": checksum}: # sha256 checksum available _validate_sha256_checksum(data, checksum) case _: # no checksum available pass self._metadata = Metadata(data) def requires(self, extras: set[str]) -> list[Requirement]: """ Get a list of requirements for the wheel. """ if self._metadata is None: raise RuntimeError( "Micropip internal error: attempted to get requirements before downloading the wheel?" ) requires = self._metadata.requires(extras) self._requires = requires return requires async def _fetch_bytes(self, url: str, fetch_kwargs: dict[str, Any]): if self.parsed_url.scheme not in ("https", "http", "emfs", "file"): # Don't raise ValueError it gets swallowed raise TypeError( f"Cannot download from a non-remote location: {url!r} ({self.parsed_url!r})" ) try: bytes = await fetch_bytes(url, fetch_kwargs) return bytes except OSError as e: if self.parsed_url.hostname in [ "files.pythonhosted.org", "cdn.jsdelivr.net", ]: raise e else: raise ValueError( f"Can't fetch wheel from {url!r}. " "One common reason for this is when the server blocks " "Cross-Origin Resource Sharing (CORS). " "Check if the server is sending the correct 'Access-Control-Allow-Origin' header." ) from e def _extract(self, target: Path) -> None: assert self._data with zipfile.ZipFile(io.BytesIO(self._data)) as zf: zf.extractall(target) self._dist_info = target / wheel_dist_info_dir(zf, self.name) def _set_installer(self) -> None: """ Set the installer metadata in the wheel's .dist-info directory. """ assert self._data wheel_source = "pypi" if self.sha256 is not None else self.url self._write_dist_info("PYODIDE_SOURCE", wheel_source) self._write_dist_info("PYODIDE_URL", self.url) self._write_dist_info("PYODIDE_SHA256", _generate_package_hash(self._data)) self._write_dist_info("INSTALLER", "micropip") if self._requires: self._write_dist_info( "PYODIDE_REQUIRES", json.dumps(sorted(x.name for x in self._requires)) ) setattr(loadedPackages, self._project_name, wheel_source) def _write_dist_info(self, file: str, content: str) -> None: assert self._dist_info (self._dist_info / file).write_text(content) async def _load_libraries(self, target: Path) -> None: """ Compiles shared libraries (WASM modules) in the wheel and loads them. """ assert self._data pkg = PackageData( file_name=self.filename, package_type="package", shared_library=False, ) dynlibs = get_dynlibs(io.BytesIO(self._data), ".whl", target) await loadDynlibsFromPackage(pkg, dynlibs) def _validate_sha256_checksum(data: bytes, expected: str | None = None) -> None: if expected is None: # No checksums available, e.g. because installing # from a different location than PyPI. return actual = _generate_package_hash(data) if actual != expected: raise RuntimeError(f"Invalid checksum: expected {expected}, got {actual}") def _generate_package_hash(data: bytes) -> str: return hashlib.sha256(data).hexdigest() PK`ZZZ���ϰ�micropip/_compat/__init__.pyimport sys from .compatibility_layer import CompatibilityLayer compatibility_layer: type[CompatibilityLayer] | None = None IN_BROWSER = "_pyodide_core" in sys.modules if IN_BROWSER: from ._compat_in_pyodide import CompatibilityInPyodide compatibility_layer = CompatibilityInPyodide else: from ._compat_not_in_pyodide import CompatibilityNotInPyodide compatibility_layer = CompatibilityNotInPyodide REPODATA_INFO = compatibility_layer.repodata_info() REPODATA_PACKAGES = compatibility_layer.repodata_packages() fetch_bytes = compatibility_layer.fetch_bytes fetch_string_and_headers = compatibility_layer.fetch_string_and_headers loadedPackages = compatibility_layer.loadedPackages loadDynlibsFromPackage = compatibility_layer.loadDynlibsFromPackage loadPackage = compatibility_layer.loadPackage get_dynlibs = compatibility_layer.get_dynlibs to_js = compatibility_layer.to_js HttpStatusError = compatibility_layer.HttpStatusError __all__ = [ "REPODATA_INFO", "REPODATA_PACKAGES", "fetch_bytes", "fetch_string_and_headers", "loadedPackages", "loadDynlibsFromPackage", "loadPackage", "get_dynlibs", "to_js", "HttpStatusError", ] PK`ZZZ�*g�  &micropip/_compat/_compat_in_pyodide.pyfrom pathlib import Path from typing import TYPE_CHECKING, Any from urllib.parse import urlparse if TYPE_CHECKING: pass from pyodide._package_loader import get_dynlibs from pyodide.ffi import IN_BROWSER, to_js from pyodide.http import HttpStatusError, pyfetch from .compatibility_layer import CompatibilityLayer try: import pyodide_js from pyodide_js import loadedPackages, loadPackage from pyodide_js._api import ( # type: ignore[import] loadBinaryFile, loadDynlibsFromPackage, ) REPODATA_PACKAGES = pyodide_js._api.repodata_packages.to_py() REPODATA_INFO = pyodide_js._api.repodata_info.to_py() except ImportError: if IN_BROWSER: raise # Otherwise, this is pytest test collection so let it go. class CompatibilityInPyodide(CompatibilityLayer): class HttpStatusError(Exception): status_code: int message: str def __init__(self, status_code: int, message: str): self.status_code = status_code self.message = message super().__init__(message) @staticmethod def repodata_info() -> dict[str, str]: return REPODATA_INFO @staticmethod def repodata_packages() -> dict[str, dict[str, Any]]: return REPODATA_PACKAGES @staticmethod async def fetch_bytes(url: str, kwargs: dict[str, str]) -> bytes: parsed_url = urlparse(url) if parsed_url.scheme == "emfs": return Path(parsed_url.path).read_bytes() if parsed_url.scheme == "file": return (await loadBinaryFile(parsed_url.path)).to_bytes() return await (await pyfetch(url, **kwargs)).bytes() @staticmethod async def fetch_string_and_headers( url: str, kwargs: dict[str, str] ) -> tuple[str, dict[str, str]]: try: response = await pyfetch(url, **kwargs) response.raise_for_status() except HttpStatusError as e: raise CompatibilityInPyodide.HttpStatusError(e.status, str(e)) from e content = await response.string() headers: dict[str, str] = response.headers return content, headers loadedPackages = loadedPackages get_dynlibs = get_dynlibs loadDynlibsFromPackage = loadDynlibsFromPackage loadPackage = loadPackage to_js = to_js PK`ZZZ�F�A A *micropip/_compat/_compat_not_in_pyodide.pyimport re from pathlib import Path from typing import IO, TYPE_CHECKING, Any from urllib.error import HTTPError from urllib.request import Request, urlopen from urllib.response import addinfourl from .compatibility_layer import CompatibilityLayer if TYPE_CHECKING: from ..wheelinfo import PackageData class CompatibilityNotInPyodide(CompatibilityLayer): # Vendored from packaging _canonicalize_regex = re.compile(r"[-_.]+") class HttpStatusError(Exception): status_code: int message: str def __init__(self, status_code: int, message: str): self.status_code = status_code self.message = message super().__init__(message) class loadedPackages(CompatibilityLayer.loadedPackages): @staticmethod def to_py(): return {} @staticmethod def repodata_info() -> dict[str, str]: return {} @staticmethod def repodata_packages() -> dict[str, dict[str, Any]]: return {} @staticmethod def _fetch(url: str, kwargs: dict[str, Any]) -> addinfourl: return urlopen(Request(url, **kwargs)) @staticmethod async def fetch_bytes(url: str, kwargs: dict[str, Any]) -> bytes: return CompatibilityNotInPyodide._fetch(url, kwargs=kwargs).read() @staticmethod async def fetch_string_and_headers( url: str, kwargs: dict[str, Any] ) -> tuple[str, dict[str, str]]: try: response = CompatibilityNotInPyodide._fetch(url, kwargs=kwargs) except HTTPError as e: raise CompatibilityNotInPyodide.HttpStatusError(e.code, str(e)) from e headers = {k.lower(): v for k, v in response.headers.items()} return response.read().decode(), headers @staticmethod def get_dynlibs(archive: IO[bytes], suffix: str, target_dir: Path) -> list[str]: return [] @staticmethod async def loadDynlibsFromPackage( pkg_metadata: "PackageData", dynlibs: list[str] ) -> None: pass @staticmethod async def loadPackage(names: str | list[str]) -> None: pass @staticmethod def to_js( obj: Any, /, *, depth: int = -1, pyproxies=None, create_pyproxies: bool = True, dict_converter=None, default_converter=None, ) -> Any: return obj PK`ZZZm}�rtt'micropip/_compat/compatibility_layer.pyfrom abc import ABC, abstractmethod from pathlib import Path from typing import IO, TYPE_CHECKING, Any if TYPE_CHECKING: from ..wheelinfo import PackageData class CompatibilityLayer(ABC): """ CompatibilityLayer represents the interface that must be implemented for each viable environment. All of the following methods / properties must be implemented for use both inside and outside of pyodide. """ class HttpStatusError(ABC, Exception): status_code: int message: str @abstractmethod def __init__(self, status_code: int, message: str): pass class loadedPackages(ABC): @staticmethod @abstractmethod def to_py(): pass @staticmethod @abstractmethod def repodata_info() -> dict[str, str]: pass @staticmethod @abstractmethod def repodata_packages() -> dict[str, dict[str, Any]]: pass @staticmethod @abstractmethod async def fetch_bytes(url: str, kwargs: dict[str, str]) -> bytes: pass @staticmethod @abstractmethod async def fetch_string_and_headers( url: str, kwargs: dict[str, Any] ) -> tuple[str, dict[str, str]]: pass @staticmethod @abstractmethod def get_dynlibs(archive: IO[bytes], suffix: str, target_dir: Path) -> list[str]: pass @staticmethod @abstractmethod async def loadDynlibsFromPackage( pkg_metadata: "PackageData", dynlibs: list[str] ) -> None: pass @staticmethod @abstractmethod async def loadPackage(names: str | list[str]) -> None: pass @staticmethod @abstractmethod def to_js( obj: Any, /, *, depth: int = -1, pyproxies: Any, create_pyproxies: bool = True, dict_converter: Any, default_converter: Any, ) -> Any: pass PK`ZZZmicropip/externals/__init__.pyPK`ZZZ�3\�L"L"(micropip/externals/mousebender/simple.py# Adapted from: https://github.com/brettcannon/mousebender/blob/main/mousebender/simple.py # Only relevant parts are included here. import html import html.parser import urllib.parse import warnings from typing import Any, Dict, List, Optional, Union, Literal, TypeAlias, TypedDict import packaging.utils ACCEPT_JSON_V1 = "application/vnd.pypi.simple.v1+json" class UnsupportedAPIVersion(Exception): """The major version of an API response is not supported.""" def __init__(self, version: str) -> None: """Initialize the exception with a message based on the provided version.""" super().__init__(f"Unsupported API major version: {version!r}") class APIVersionWarning(Warning): """The minor version of an API response is not supported.""" def __init__(self, version: str) -> None: """Initialize the warning with a message based on the provided version.""" super().__init__(f"Unsupported API minor version: {version!r}") class UnsupportedMIMEType(Exception): """An unsupported MIME type was provided in a ``Content-Type`` header.""" _Meta_1_0 = TypedDict("_Meta_1_0", {"api-version": Literal["1.0"]}) _Meta_1_1 = TypedDict("_Meta_1_1", {"api-version": Literal["1.1"]}) _HashesDict: TypeAlias = Dict[str, str] _OptionalProjectFileDetails_1_0 = TypedDict( "_OptionalProjectFileDetails_1_0", { "requires-python": str, "dist-info-metadata": Union[bool, _HashesDict], "gpg-sig": bool, "yanked": Union[bool, str], # PEP-714 "core-metadata": Union[bool, _HashesDict], }, total=False, ) class ProjectFileDetails_1_0(_OptionalProjectFileDetails_1_0): """A :class:`~typing.TypedDict` for the ``files`` key of :class:`ProjectDetails_1_0`.""" filename: str url: str hashes: _HashesDict _OptionalProjectFileDetails_1_1 = TypedDict( "_OptionalProjectFileDetails_1_1", { "requires-python": str, "dist-info-metadata": Union[bool, _HashesDict], "gpg-sig": bool, "yanked": Union[bool, str], # PEP 700 "upload-time": str, # PEP 714 "core-metadata": Union[bool, _HashesDict], }, total=False, ) class ProjectFileDetails_1_1(_OptionalProjectFileDetails_1_1): """A :class:`~typing.TypedDict` for the ``files`` key of :class:`ProjectDetails_1_1`.""" filename: str url: str hashes: _HashesDict # PEP 700 size: int class ProjectDetails_1_0(TypedDict): """A :class:`~typing.TypedDict` for a project details response (:pep:`691`).""" meta: _Meta_1_0 name: packaging.utils.NormalizedName files: list[ProjectFileDetails_1_0] class ProjectDetails_1_1(TypedDict): """A :class:`~typing.TypedDict` for a project details response (:pep:`700`).""" meta: _Meta_1_1 name: packaging.utils.NormalizedName files: list[ProjectFileDetails_1_1] # PEP 700 versions: List[str] ProjectDetails: TypeAlias = Union[ProjectDetails_1_0, ProjectDetails_1_1] def _check_version(tag: str, attrs: Dict[str, Optional[str]]) -> None: if ( tag == "meta" and attrs.get("name") == "pypi:repository-version" and "content" in attrs and attrs["content"] ): version = attrs["content"] major_version, minor_version = map(int, version.split(".")) if major_version != 1: raise UnsupportedAPIVersion(version) elif minor_version > 1: warnings.warn(APIVersionWarning(version), stacklevel=7) class _ArchiveLinkHTMLParser(html.parser.HTMLParser): def __init__(self) -> None: self.archive_links: List[Dict[str, Any]] = [] super().__init__() def handle_starttag( self, tag: str, attrs_list: list[tuple[str, Optional[str]]] ) -> None: attrs = dict(attrs_list) _check_version(tag, attrs) if tag != "a": return # PEP 503: # The href attribute MUST be a URL that links to the location of the # file for download ... if "href" not in attrs or not attrs["href"]: return full_url: str = attrs["href"] parsed_url = urllib.parse.urlparse(full_url) # PEP 503: # ... the text of the anchor tag MUST match the final path component # (the filename) of the URL. _, _, raw_filename = parsed_url.path.rpartition("/") filename = urllib.parse.unquote(raw_filename) url = urllib.parse.urlunparse((*parsed_url[:5], "")) args: Dict[str, Any] = {"filename": filename, "url": url} # PEP 503: # The URL SHOULD include a hash in the form of a URL fragment with the # following syntax: #<hashname>=<hashvalue> ... if parsed_url.fragment: hash_algo, hash_value = parsed_url.fragment.split("=", 1) args["hashes"] = hash_algo.lower(), hash_value # PEP 503: # A repository MAY include a data-requires-python attribute on a file # link. This exposes the Requires-Python metadata field ... # In the attribute value, < and > have to be HTML encoded as &lt; and # &gt;, respectively. if "data-requires-python" in attrs and attrs["data-requires-python"]: requires_python_data = html.unescape(attrs["data-requires-python"]) args["requires-python"] = requires_python_data # PEP 503: # A repository MAY include a data-gpg-sig attribute on a file link with # a value of either true or false ... if "data-gpg-sig" in attrs: args["gpg-sig"] = attrs["data-gpg-sig"] == "true" # PEP 592: # Links in the simple repository MAY have a data-yanked attribute which # may have no value, or may have an arbitrary string as a value. if "data-yanked" in attrs: args["yanked"] = attrs.get("data-yanked") or True # PEP 658: # ... each anchor tag pointing to a distribution MAY have a # data-dist-info-metadata attribute. # PEP 714: # Clients consuming any of the HTML representations of the Simple API # MUST read the PEP 658 metadata from the key data-core-metadata if it # is present. They MAY optionally use the legacy data-dist-info-metadata # if it is present but data-core-metadata is not. metadata_fields = ["data-core-metadata", "data-dist-info-metadata"] if any((metadata_field := field) in attrs for field in metadata_fields): found_metadata = attrs.get(metadata_field) if found_metadata and found_metadata != "true": # The repository SHOULD provide the hash of the Core Metadata # file as the data-dist-info-metadata attribute's value using # the syntax <hashname>=<hashvalue>, where <hashname> is the # lower cased name of the hash function used, and <hashvalue> is # the hex encoded digest. algorithm, _, hash_ = found_metadata.partition("=") metadata = (algorithm.lower(), hash_) else: # The repository MAY use true as the attribute's value if a hash # is unavailable. metadata = "", "" args["metadata"] = metadata self.archive_links.append(args) def from_project_details_html(html: str, name: str) -> ProjectDetails_1_0: """Convert the HTML response for a project details page to a :pep:`691` response. Due to HTML project details pages lacking the name of the project, it must be specified via the *name* parameter to fill in the JSON data. """ parser = _ArchiveLinkHTMLParser() parser.feed(html) files: List[ProjectFileDetails_1_0] = [] for archive_link in parser.archive_links: details: ProjectFileDetails_1_0 = { "filename": archive_link["filename"], "url": archive_link["url"], "hashes": {}, } if "hashes" in archive_link: details["hashes"] = dict([archive_link["hashes"]]) if "metadata" in archive_link: algorithm, value = archive_link["metadata"] if algorithm: value = {algorithm: value} else: value = True for key in ["core-metadata", "dist-info-metadata"]: details[key] = value # type: ignore[literal-required] for key in {"requires-python", "yanked", "gpg-sig"}: if key in archive_link: details[key] = archive_link[key] # type: ignore[literal-required] files.append(details) return { "meta": {"api-version": "1.0"}, "name": packaging.utils.canonicalize_name(name), "files": files, }PK`ZZZ�i0UAUA micropip-0.8.0.dist-info/LICENSEMozilla Public License Version 2.0 ================================== 1. Definitions -------------- 1.1. "Contributor" means each individual or legal entity that creates, contributes to the creation of, or owns Covered Software. 1.2. "Contributor Version" means the combination of the Contributions of others (if any) used by a Contributor and that particular Contributor's Contribution. 1.3. "Contribution" means Covered Software of a particular Contributor. 1.4. "Covered Software" means Source Code Form to which the initial Contributor has attached the notice in Exhibit A, the Executable Form of such Source Code Form, and Modifications of such Source Code Form, in each case including portions thereof. 1.5. "Incompatible With Secondary Licenses" means (a) that the initial Contributor has attached the notice described in Exhibit B to the Covered Software; or (b) that the Covered Software was made available under the terms of version 1.1 or earlier of the License, but not also under the terms of a Secondary License. 1.6. "Executable Form" means any form of the work other than Source Code Form. 1.7. "Larger Work" means a work that combines Covered Software with other material, in a separate file or files, that is not Covered Software. 1.8. "License" means this document. 1.9. "Licensable" means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently, any and all of the rights conveyed by this License. 1.10. "Modifications" means any of the following: (a) any file in Source Code Form that results from an addition to, deletion from, or modification of the contents of Covered Software; or (b) any new file in Source Code Form that contains any Covered Software. 1.11. "Patent Claims" of a Contributor means any patent claim(s), including without limitation, method, process, and apparatus claims, in any patent Licensable by such Contributor that would be infringed, but for the grant of the License, by the making, using, selling, offering for sale, having made, import, or transfer of either its Contributions or its Contributor Version. 1.12. "Secondary License" means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. 1.13. "Source Code Form" means the form of the work preferred for making modifications. 1.14. "You" (or "Your") means an individual or a legal entity exercising rights under this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with You. For purposes of this definition, "control" means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. 2. License Grants and Conditions -------------------------------- 2.1. Grants Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: (a) under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and (b) under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. 2.2. Effective Date The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. 2.3. Limitations on Grant Scope The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the distribution or licensing of Covered Software under this License. Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: (a) for any code that a Contributor has removed from Covered Software; or (b) for infringements caused by: (i) Your and any other third party's modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or (c) under Patent Claims infringed by Covered Software in the absence of its Contributions. This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). 2.4. Subsequent Licenses No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). 2.5. Representation Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. 2.6. Fair Use This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. 2.7. Conditions Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. 3. Responsibilities ------------------- 3.1. Distribution of Source Form All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under the terms of this License. You must inform recipients that the Source Code Form of the Covered Software is governed by the terms of this License, and how they can obtain a copy of this License. You may not attempt to alter or restrict the recipients' rights in the Source Code Form. 3.2. Distribution of Executable Form If You distribute Covered Software in Executable Form then: (a) such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and (b) You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients' rights in the Source Code Form under this License. 3.3. Distribution of a Larger Work You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for the Covered Software. If the Larger Work is a combination of Covered Software with a work governed by one or more Secondary Licenses, and the Covered Software is not Incompatible With Secondary Licenses, this License permits You to additionally distribute such Covered Software under the terms of such Secondary License(s), so that the recipient of the Larger Work may, at their option, further distribute the Covered Software under the terms of either this License or such Secondary License(s). 3.4. Notices You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, or limitations of liability) contained within the Source Code Form of the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. 3.5. Application of Additional Terms You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, You may do so only on Your own behalf, and not on behalf of any Contributor. You must make it absolutely clear that any such warranty, support, indemnity, or liability obligation is offered by You alone, and You hereby agree to indemnify every Contributor for any liability incurred by such Contributor as a result of warranty, support, indemnity or liability terms You offer. You may include additional disclaimers of warranty and limitations of liability specific to any jurisdiction. 4. Inability to Comply Due to Statute or Regulation --------------------------------------------------- If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to statute, judicial order, or regulation then You must: (a) comply with the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. 5. Termination -------------- 5.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular Contributor are reinstated (a) provisionally, unless and until such Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular Contributor are reinstated on an ongoing basis if such Contributor notifies You of the non-compliance by some reasonable means, this is the first time You have received notice of non-compliance with this License from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. 5.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. ************************************************************************ * * * 6. Disclaimer of Warranty * * ------------------------- * * * * Covered Software is provided under this License on an "as is" * * basis, without warranty of any kind, either expressed, implied, or * * statutory, including, without limitation, warranties that the * * Covered Software is free of defects, merchantable, fit for a * * particular purpose or non-infringing. The entire risk as to the * * quality and performance of the Covered Software is with You. * * Should any Covered Software prove defective in any respect, You * * (not any Contributor) assume the cost of any necessary servicing, * * repair, or correction. This disclaimer of warranty constitutes an * * essential part of this License. No use of any Covered Software is * * authorized under this License except under this disclaimer. * * * ************************************************************************ ************************************************************************ * * * 7. Limitation of Liability * * -------------------------- * * * * Under no circumstances and under no legal theory, whether tort * * (including negligence), contract, or otherwise, shall any * * Contributor, or anyone who distributes Covered Software as * * permitted above, be liable to You for any direct, indirect, * * special, incidental, or consequential damages of any character * * including, without limitation, damages for lost profits, loss of * * goodwill, work stoppage, computer failure or malfunction, or any * * and all other commercial damages or losses, even if such party * * shall have been informed of the possibility of such damages. This * * limitation of liability shall not apply to liability for death or * * personal injury resulting from such party's negligence to the * * extent applicable law prohibits such limitation. Some * * jurisdictions do not allow the exclusion or limitation of * * incidental or consequential damages, so this exclusion and * * limitation may not apply to You. * * * ************************************************************************ 8. Litigation ------------- Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal place of business and such litigation shall be governed by laws of that jurisdiction, without reference to its conflict-of-law provisions. Nothing in this Section shall prevent a party's ability to bring cross-claims or counter-claims. 9. Miscellaneous ---------------- This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. 10. Versions of the License --------------------------- 10.1. New Versions Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. 10.2. Effect of New Versions You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. 10.3. Modified Versions If you create software not governed by this License, and you want to create a new license for such software, you may create and use a modified version of this License if you rename the license and remove any references to the name of the license steward (except to note that such modified license differs from this License). 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. Exhibit A - Source Code Form License Notice ------------------------------------------- This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. You may add additional accurate notices of copyright ownership. Exhibit B - "Incompatible With Secondary Licenses" Notice --------------------------------------------------------- This Source Code Form is "Incompatible With Secondary Licenses", as defined by the Mozilla Public License, v. 2.0. PK`ZZZQ��!�S�S!micropip-0.8.0.dist-info/METADATAMetadata-Version: 2.1 Name: micropip Version: 0.8.0 Summary: A lightweight Python package installer for the web Author: Pyodide developers License: Mozilla Public License Version 2.0 ================================== 1. Definitions -------------- 1.1. "Contributor" means each individual or legal entity that creates, contributes to the creation of, or owns Covered Software. 1.2. "Contributor Version" means the combination of the Contributions of others (if any) used by a Contributor and that particular Contributor's Contribution. 1.3. "Contribution" means Covered Software of a particular Contributor. 1.4. "Covered Software" means Source Code Form to which the initial Contributor has attached the notice in Exhibit A, the Executable Form of such Source Code Form, and Modifications of such Source Code Form, in each case including portions thereof. 1.5. "Incompatible With Secondary Licenses" means (a) that the initial Contributor has attached the notice described in Exhibit B to the Covered Software; or (b) that the Covered Software was made available under the terms of version 1.1 or earlier of the License, but not also under the terms of a Secondary License. 1.6. "Executable Form" means any form of the work other than Source Code Form. 1.7. "Larger Work" means a work that combines Covered Software with other material, in a separate file or files, that is not Covered Software. 1.8. "License" means this document. 1.9. "Licensable" means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently, any and all of the rights conveyed by this License. 1.10. "Modifications" means any of the following: (a) any file in Source Code Form that results from an addition to, deletion from, or modification of the contents of Covered Software; or (b) any new file in Source Code Form that contains any Covered Software. 1.11. "Patent Claims" of a Contributor means any patent claim(s), including without limitation, method, process, and apparatus claims, in any patent Licensable by such Contributor that would be infringed, but for the grant of the License, by the making, using, selling, offering for sale, having made, import, or transfer of either its Contributions or its Contributor Version. 1.12. "Secondary License" means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. 1.13. "Source Code Form" means the form of the work preferred for making modifications. 1.14. "You" (or "Your") means an individual or a legal entity exercising rights under this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with You. For purposes of this definition, "control" means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. 2. License Grants and Conditions -------------------------------- 2.1. Grants Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: (a) under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and (b) under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. 2.2. Effective Date The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. 2.3. Limitations on Grant Scope The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the distribution or licensing of Covered Software under this License. Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: (a) for any code that a Contributor has removed from Covered Software; or (b) for infringements caused by: (i) Your and any other third party's modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or (c) under Patent Claims infringed by Covered Software in the absence of its Contributions. This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). 2.4. Subsequent Licenses No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). 2.5. Representation Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. 2.6. Fair Use This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. 2.7. Conditions Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. 3. Responsibilities ------------------- 3.1. Distribution of Source Form All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under the terms of this License. You must inform recipients that the Source Code Form of the Covered Software is governed by the terms of this License, and how they can obtain a copy of this License. You may not attempt to alter or restrict the recipients' rights in the Source Code Form. 3.2. Distribution of Executable Form If You distribute Covered Software in Executable Form then: (a) such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and (b) You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients' rights in the Source Code Form under this License. 3.3. Distribution of a Larger Work You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for the Covered Software. If the Larger Work is a combination of Covered Software with a work governed by one or more Secondary Licenses, and the Covered Software is not Incompatible With Secondary Licenses, this License permits You to additionally distribute such Covered Software under the terms of such Secondary License(s), so that the recipient of the Larger Work may, at their option, further distribute the Covered Software under the terms of either this License or such Secondary License(s). 3.4. Notices You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, or limitations of liability) contained within the Source Code Form of the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. 3.5. Application of Additional Terms You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, You may do so only on Your own behalf, and not on behalf of any Contributor. You must make it absolutely clear that any such warranty, support, indemnity, or liability obligation is offered by You alone, and You hereby agree to indemnify every Contributor for any liability incurred by such Contributor as a result of warranty, support, indemnity or liability terms You offer. You may include additional disclaimers of warranty and limitations of liability specific to any jurisdiction. 4. Inability to Comply Due to Statute or Regulation --------------------------------------------------- If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to statute, judicial order, or regulation then You must: (a) comply with the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. 5. Termination -------------- 5.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular Contributor are reinstated (a) provisionally, unless and until such Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular Contributor are reinstated on an ongoing basis if such Contributor notifies You of the non-compliance by some reasonable means, this is the first time You have received notice of non-compliance with this License from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. 5.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. ************************************************************************ * * * 6. Disclaimer of Warranty * * ------------------------- * * * * Covered Software is provided under this License on an "as is" * * basis, without warranty of any kind, either expressed, implied, or * * statutory, including, without limitation, warranties that the * * Covered Software is free of defects, merchantable, fit for a * * particular purpose or non-infringing. The entire risk as to the * * quality and performance of the Covered Software is with You. * * Should any Covered Software prove defective in any respect, You * * (not any Contributor) assume the cost of any necessary servicing, * * repair, or correction. This disclaimer of warranty constitutes an * * essential part of this License. No use of any Covered Software is * * authorized under this License except under this disclaimer. * * * ************************************************************************ ************************************************************************ * * * 7. Limitation of Liability * * -------------------------- * * * * Under no circumstances and under no legal theory, whether tort * * (including negligence), contract, or otherwise, shall any * * Contributor, or anyone who distributes Covered Software as * * permitted above, be liable to You for any direct, indirect, * * special, incidental, or consequential damages of any character * * including, without limitation, damages for lost profits, loss of * * goodwill, work stoppage, computer failure or malfunction, or any * * and all other commercial damages or losses, even if such party * * shall have been informed of the possibility of such damages. This * * limitation of liability shall not apply to liability for death or * * personal injury resulting from such party's negligence to the * * extent applicable law prohibits such limitation. Some * * jurisdictions do not allow the exclusion or limitation of * * incidental or consequential damages, so this exclusion and * * limitation may not apply to You. * * * ************************************************************************ 8. Litigation ------------- Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal place of business and such litigation shall be governed by laws of that jurisdiction, without reference to its conflict-of-law provisions. Nothing in this Section shall prevent a party's ability to bring cross-claims or counter-claims. 9. Miscellaneous ---------------- This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. 10. Versions of the License --------------------------- 10.1. New Versions Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. 10.2. Effect of New Versions You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. 10.3. Modified Versions If you create software not governed by this License, and you want to create a new license for such software, you may create and use a modified version of this License if you rename the license and remove any references to the name of the license steward (except to note that such modified license differs from this License). 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. Exhibit A - Source Code Form License Notice ------------------------------------------- This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. You may add additional accurate notices of copyright ownership. Exhibit B - "Incompatible With Secondary Licenses" Notice --------------------------------------------------------- This Source Code Form is "Incompatible With Secondary Licenses", as defined by the Mozilla Public License, v. 2.0. Project-URL: Homepage, https://github.com/pyodide/micropip Project-URL: Bug Tracker, https://github.com/pyodide/micropip/issues Classifier: Programming Language :: Python :: 3 Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) Classifier: Operating System :: OS Independent Requires-Python: >=3.12 Description-Content-Type: text/markdown License-File: LICENSE Requires-Dist: packaging>=23.0 Provides-Extra: test Requires-Dist: pytest-httpserver; extra == "test" Requires-Dist: pytest-pyodide; extra == "test" Requires-Dist: pytest-cov; extra == "test" Requires-Dist: pytest<8.0.0; extra == "test" Requires-Dist: build==0.7.0; extra == "test" Requires-Dist: pyodide-lock==v0.1.0a8; extra == "test" # micropip [![PyPI Latest Release](https://img.shields.io/pypi/v/micropip.svg)](https://pypi.org/project/micropip/) ![GHA](https://github.com/pyodide/micropip/actions/workflows/main.yml/badge.svg) A lightweight Python package installer for the web ## Installation In [Pyodide](https://pyodide.org), you can install micropip, - either implicitly by importing it in the REPL - or explicitly via `pyodide.loadPackage('micropip')`. You can also install by URL from PyPI for instance. ## Usage ```py import micropip await micropip.install(<list-of-packages>) ``` For more information see the [documentation](https://pyodide.org/en/stable/usage/loading-packages.html#micropip). ## License micropip uses the [Mozilla Public License Version 2.0](https://choosealicense.com/licenses/mpl-2.0/). PK`ZZZ��p[[micropip-0.8.0.dist-info/WHEELWheel-Version: 1.0 Generator: setuptools (75.6.0) Root-Is-Purelib: true Tag: py3-none-any PK`ZZZ8�� &micropip-0.8.0.dist-info/top_level.txtmicropip PK`ZZZ���c micropip-0.8.0.dist-info/RECORDmicropip/__init__.py,sha256=cE6u4G2HutvCmHJhgnKpzIVc_oQvBfdu0dlB4g6zP7w,790 micropip/_mock_package.py,sha256=65HiYi1hMpxncB2Npr6i8m1_R3WwPm6z4vlHC-2WLaU,6564 micropip/_utils.py,sha256=l9RP79XdY0N7ZyHwU-j3AFzofxoea73oBS4C1OXxJU4,7926 micropip/_version.py,sha256=vspFLRfYI6gAAN7kyihey2lhPos0jxqKaNDWFlKPlmU,411 micropip/constants.py,sha256=RLFxdLNXa4NUQStBSPVACo51uuscZz6sxamHKiStjXw,143 micropip/freeze.py,sha256=FiI9IREB4ZjhIK4CtuE2cYzoLfL9neI9N5k5MFjK8GY,1922 micropip/install.py,sha256=-6uixFRrVuo4OCg6-b7I2PErxfhtfJSFYMq7pxdvGfo,3413 micropip/list.py,sha256=ScYYBfxk5kbtHmlffv2pSKEFHcPQ_COQkEYoAFDTJjE,1471 micropip/logging.py,sha256=2InsMUCPdrVssIQkSgSHSHz2gLXUDz7OFvVhMeWPdv0,4113 micropip/metadata.py,sha256=eLwkH57mrBGWQzP4HLXhnNwXs4FUuIEQBqYskJSOHk8,4153 micropip/package.py,sha256=Z4EXXwJV1Hee340zK-OpBO7UgZBa5AmwxtQyowo8xXM,2182 micropip/package_index.py,sha256=YwRakZ1iTo2YG5mNbcaSiQR9tqZNAYFp3D4I65nVM5k,11454 micropip/package_manager.py,sha256=RJcxdcYYMqgSbdSlZPx_2Nv6y_XIVm38PWTlAie_eRY,11165 micropip/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 micropip/transaction.py,sha256=q-FZ58lnN-4LTZzixABtqggOhDn3fENZYhEzVmrBQJQ,11490 micropip/types.py,sha256=wC57gf486Gx2uaC5Cc2g5rWE9gZds51sC4b-9YOK9FI,220 micropip/uninstall.py,sha256=9qOIazuXurCaXaMmUoDCtXd5TAErbblSdxz2385en4M,3331 micropip/wheelinfo.py,sha256=9c0y9tL_kYKDxkjoMLDQllQPo6A6Kwq9G6h4O5nTzFE,9153 micropip/_compat/__init__.py,sha256=OaXC4p4DcZdcZyUD-nVFc_MabmyTULhHN3S1219gioA,1200 micropip/_compat/_compat_in_pyodide.py,sha256=ydNR4o1lbty7vY4cQkpFPde9CwmwWSWHYlOdRtqWUko,2324 micropip/_compat/_compat_not_in_pyodide.py,sha256=-0ndadJRw45mbBcTijkk2C481GNvpf3F03--eb1kAvY,2369 micropip/_compat/compatibility_layer.py,sha256=dbb7UAwA5ct8vhfGMC1yRnxW9Fa5L15bKsu40xH81so,1908 micropip/externals/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 micropip/externals/mousebender/simple.py,sha256=ZQ5Zr_vI5eZQ9_QfquJUJLcQ7FuO3hVqujw54A9R-ic,8780 micropip-0.8.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725 micropip-0.8.0.dist-info/METADATA,sha256=6QxpuzetG4yA9o52wUVC_b22jfhUDXLIK4gyblHD4pI,21376 micropip-0.8.0.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91 micropip-0.8.0.dist-info/top_level.txt,sha256=qvj8YpG46vX6oBHHfIh3jECZhH4ZH3seXIkc-QFaVvY,9 micropip-0.8.0.dist-info/RECORD,, PK`ZZZw9�w�micropip/__init__.pyPK`ZZZ�t���Hmicropip/_mock_package.pyPK`ZZZI׸����#micropip/_utils.pyPK`ZZZ������I<micropip/_version.pyPK`ZZZ��:���>micropip/constants.pyPK`ZZZӃ����>micropip/freeze.pyPK`ZZZ�҄U U ��Fmicropip/install.pyPK`ZZZ��Aɿ��Tmicropip/list.pyPK`ZZZ�q��Ymicropip/logging.pyPK`ZZZ١�99�?jmicropip/metadata.pyPK`ZZZ$������zmicropip/package.pyPK`ZZZ ����,�,�a�micropip/package_index.pyPK`ZZZa�ٝ+�+�V�micropip/package_manager.pyPK`ZZZ�,�micropip/py.typedPK`ZZZ~;��,�,�[�micropip/transaction.pyPK`ZZZ-L9���r micropip/types.pyPK`ZZZ)�b�  �} micropip/uninstall.pyPK`ZZZKo�#�#��micropip/wheelinfo.pyPK`ZZZ���ϰ���;micropip/_compat/__init__.pyPK`ZZZ�*g�  &��@micropip/_compat/_compat_in_pyodide.pyPK`ZZZ�F�A A *��Imicropip/_compat/_compat_not_in_pyodide.pyPK`ZZZm}�rtt'�rSmicropip/_compat/compatibility_layer.pyPK`ZZZ�+[micropip/externals/__init__.pyPK`ZZZ�3\�L"L"(�g[micropip/externals/mousebender/simple.pyPK`ZZZ�i0UAUA ��}micropip-0.8.0.dist-info/LICENSEPK`ZZZQ��!�S�S!���micropip-0.8.0.dist-info/METADATAPK`ZZZ��p[[�Kmicropip-0.8.0.dist-info/WHEELPK`ZZZ8�� &��micropip-0.8.0.dist-info/top_level.txtPK`ZZZ���c �/micropip-0.8.0.dist-info/RECORDPK!�
Memory