diff --git a/news/distlib.vendor.rst b/news/distlib.vendor.rst new file mode 100644 index 00000000000..428113e8b67 --- /dev/null +++ b/news/distlib.vendor.rst @@ -0,0 +1 @@ +Upgrade distlib to 0.3.9 diff --git a/src/pip/_vendor/distlib/__init__.py b/src/pip/_vendor/distlib/__init__.py index e999438fe94..bf0d6c6d30e 100644 --- a/src/pip/_vendor/distlib/__init__.py +++ b/src/pip/_vendor/distlib/__init__.py @@ -6,7 +6,7 @@ # import logging -__version__ = '0.3.8' +__version__ = '0.3.9' class DistlibException(Exception): diff --git a/src/pip/_vendor/distlib/compat.py b/src/pip/_vendor/distlib/compat.py index e93dc27a3eb..ca561dd2e37 100644 --- a/src/pip/_vendor/distlib/compat.py +++ b/src/pip/_vendor/distlib/compat.py @@ -217,8 +217,7 @@ def which(cmd, mode=os.F_OK | os.X_OK, path=None): # Additionally check that `file` is not a directory, as on Windows # directories pass the os.access check. def _access_check(fn, mode): - return (os.path.exists(fn) and os.access(fn, mode) - and not os.path.isdir(fn)) + return (os.path.exists(fn) and os.access(fn, mode) and not os.path.isdir(fn)) # If we're given a path with a directory part, look it up directly rather # than referring to PATH directories. This includes checking relative to the diff --git a/src/pip/_vendor/distlib/database.py b/src/pip/_vendor/distlib/database.py index eb3765f193b..c0f896a7d85 100644 --- a/src/pip/_vendor/distlib/database.py +++ b/src/pip/_vendor/distlib/database.py @@ -20,14 +20,12 @@ from . import DistlibException, resources from .compat import StringIO from .version import get_scheme, UnsupportedVersionError -from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, - LEGACY_METADATA_FILENAME) -from .util import (parse_requirement, cached_property, parse_name_and_version, - read_exports, write_exports, CSVReader, CSVWriter) +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME) +from .util import (parse_requirement, cached_property, parse_name_and_version, read_exports, write_exports, CSVReader, + CSVWriter) __all__ = [ - 'Distribution', 'BaseInstalledDistribution', 'InstalledDistribution', - 'EggInfoDistribution', 'DistributionPath' + 'Distribution', 'BaseInstalledDistribution', 'InstalledDistribution', 'EggInfoDistribution', 'DistributionPath' ] logger = logging.getLogger(__name__) @@ -35,8 +33,7 @@ EXPORTS_FILENAME = 'pydist-exports.json' COMMANDS_FILENAME = 'pydist-commands.json' -DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', - 'RESOURCES', EXPORTS_FILENAME, 'SHARED') +DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', 'RESOURCES', EXPORTS_FILENAME, 'SHARED') DISTINFO_EXT = '.dist-info' @@ -134,13 +131,9 @@ def _yield_distributions(self): continue try: if self._include_dist and entry.endswith(DISTINFO_EXT): - possible_filenames = [ - METADATA_FILENAME, WHEEL_METADATA_FILENAME, - LEGACY_METADATA_FILENAME - ] + possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] for metadata_filename in possible_filenames: - metadata_path = posixpath.join( - entry, metadata_filename) + metadata_path = posixpath.join(entry, metadata_filename) pydist = finder.find(metadata_path) if pydist: break @@ -148,15 +141,11 @@ def _yield_distributions(self): continue with contextlib.closing(pydist.as_stream()) as stream: - metadata = Metadata(fileobj=stream, - scheme='legacy') + metadata = Metadata(fileobj=stream, scheme='legacy') logger.debug('Found %s', r.path) seen.add(r.path) - yield new_dist_class(r.path, - metadata=metadata, - env=self) - elif self._include_egg and entry.endswith( - ('.egg-info', '.egg')): + yield new_dist_class(r.path, metadata=metadata, env=self) + elif self._include_egg and entry.endswith(('.egg-info', '.egg')): logger.debug('Found %s', r.path) seen.add(r.path) yield old_dist_class(r.path, self) @@ -274,8 +263,7 @@ def provides_distribution(self, name, version=None): try: matcher = self._scheme.matcher('%s (%s)' % (name, version)) except ValueError: - raise DistlibException('invalid name or version: %r, %r' % - (name, version)) + raise DistlibException('invalid name or version: %r, %r' % (name, version)) for dist in self.get_distributions(): # We hit a problem on Travis where enum34 was installed and doesn't @@ -390,10 +378,8 @@ def provides(self): def _get_requirements(self, req_attr): md = self.metadata reqts = getattr(md, req_attr) - logger.debug('%s: got requirements %r from metadata: %r', self.name, - req_attr, reqts) - return set( - md.get_requirements(reqts, extras=self.extras, env=self.context)) + logger.debug('%s: got requirements %r from metadata: %r', self.name, req_attr, reqts) + return set(md.get_requirements(reqts, extras=self.extras, env=self.context)) @property def run_requires(self): @@ -469,8 +455,7 @@ def __eq__(self, other): if type(other) is not type(self): result = False else: - result = (self.name == other.name and self.version == other.version - and self.source_url == other.source_url) + result = (self.name == other.name and self.version == other.version and self.source_url == other.source_url) return result def __hash__(self): @@ -561,8 +546,7 @@ def __init__(self, path, metadata=None, env=None): if r is None: r = finder.find(LEGACY_METADATA_FILENAME) if r is None: - raise ValueError('no %s found in %s' % - (METADATA_FILENAME, path)) + raise ValueError('no %s found in %s' % (METADATA_FILENAME, path)) with contextlib.closing(r.as_stream()) as stream: metadata = Metadata(fileobj=stream, scheme='legacy') @@ -580,8 +564,7 @@ def __init__(self, path, metadata=None, env=None): self.modules = data.splitlines() def __repr__(self): - return '' % ( - self.name, self.version, self.path) + return '' % (self.name, self.version, self.path) def __str__(self): return "%s %s" % (self.name, self.version) @@ -703,8 +686,7 @@ def write_installed_files(self, paths, prefix, dry_run=False): size = '%d' % os.path.getsize(path) with open(path, 'rb') as fp: hash_value = self.get_hash(fp.read()) - if path.startswith(base) or (base_under_prefix - and path.startswith(prefix)): + if path.startswith(base) or (base_under_prefix and path.startswith(prefix)): path = os.path.relpath(path, base) writer.writerow((path, hash_value, size)) @@ -746,8 +728,7 @@ def check_installed_files(self): with open(path, 'rb') as f: actual_hash = self.get_hash(f.read(), hasher) if actual_hash != hash_value: - mismatches.append( - (path, 'hash', hash_value, actual_hash)) + mismatches.append((path, 'hash', hash_value, actual_hash)) return mismatches @cached_property @@ -829,9 +810,8 @@ def get_distinfo_file(self, path): # it's an absolute path? distinfo_dirname, path = path.split(os.sep)[-2:] if distinfo_dirname != self.path.split(os.sep)[-1]: - raise DistlibException( - 'dist-info file %r does not belong to the %r %s ' - 'distribution' % (path, self.name, self.version)) + raise DistlibException('dist-info file %r does not belong to the %r %s ' + 'distribution' % (path, self.name, self.version)) # The file must be relative if path not in DIST_FILES: @@ -857,8 +837,7 @@ def list_distinfo_files(self): yield path def __eq__(self, other): - return (isinstance(other, InstalledDistribution) - and self.path == other.path) + return (isinstance(other, InstalledDistribution) and self.path == other.path) # See http://docs.python.org/reference/datamodel#object.__hash__ __hash__ = object.__hash__ @@ -911,8 +890,7 @@ def parse_requires_data(data): if not line: # pragma: no cover continue if line.startswith('['): # pragma: no cover - logger.warning( - 'Unexpected line: quitting requirement scan: %r', line) + logger.warning('Unexpected line: quitting requirement scan: %r', line) break r = parse_requirement(line) if not r: # pragma: no cover @@ -954,13 +932,11 @@ def parse_requires_path(req_path): else: # FIXME handle the case where zipfile is not available zipf = zipimport.zipimporter(path) - fileobj = StringIO( - zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) + fileobj = StringIO(zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) metadata = Metadata(fileobj=fileobj, scheme='legacy') try: data = zipf.get_data('EGG-INFO/requires.txt') - tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode( - 'utf-8') + tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8') requires = parse_requires_data(data.decode('utf-8')) except IOError: requires = None @@ -990,8 +966,7 @@ def parse_requires_path(req_path): return metadata def __repr__(self): - return '' % (self.name, self.version, - self.path) + return '' % (self.name, self.version, self.path) def __str__(self): return "%s %s" % (self.name, self.version) @@ -1083,8 +1058,7 @@ def list_distinfo_files(self, absolute=False): yield line def __eq__(self, other): - return (isinstance(other, EggInfoDistribution) - and self.path == other.path) + return (isinstance(other, EggInfoDistribution) and self.path == other.path) # See http://docs.python.org/reference/datamodel#object.__hash__ __hash__ = object.__hash__ @@ -1184,8 +1158,7 @@ def to_dot(self, f, skip_disconnected=True): disconnected.append(dist) for other, label in adjs: if label is not None: - f.write('"%s" -> "%s" [label="%s"]\n' % - (dist.name, other.name, label)) + f.write('"%s" -> "%s" [label="%s"]\n' % (dist.name, other.name, label)) else: f.write('"%s" -> "%s"\n' % (dist.name, other.name)) if not skip_disconnected and len(disconnected) > 0: @@ -1225,8 +1198,7 @@ def topological_sort(self): # Remove from the adjacency list of others for k, v in alist.items(): alist[k] = [(d, r) for d, r in v if d not in to_remove] - logger.debug('Moving to result: %s', - ['%s (%s)' % (d.name, d.version) for d in to_remove]) + logger.debug('Moving to result: %s', ['%s (%s)' % (d.name, d.version) for d in to_remove]) result.extend(to_remove) return result, list(alist.keys()) @@ -1261,15 +1233,13 @@ def make_graph(dists, scheme='default'): # now make the edges for dist in dists: - requires = (dist.run_requires | dist.meta_requires - | dist.build_requires | dist.dev_requires) + requires = (dist.run_requires | dist.meta_requires | dist.build_requires | dist.dev_requires) for req in requires: try: matcher = scheme.matcher(req) except UnsupportedVersionError: # XXX compat-mode if cannot read the version - logger.warning('could not read version %r - using name only', - req) + logger.warning('could not read version %r - using name only', req) name = req.split()[0] matcher = scheme.matcher(name) diff --git a/src/pip/_vendor/distlib/locators.py b/src/pip/_vendor/distlib/locators.py index f9f0788fc2a..222c1bf3e90 100644 --- a/src/pip/_vendor/distlib/locators.py +++ b/src/pip/_vendor/distlib/locators.py @@ -19,15 +19,12 @@ import zlib from . import DistlibException -from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, - queue, quote, unescape, build_opener, - HTTPRedirectHandler as BaseRedirectHandler, text_type, - Request, HTTPError, URLError) +from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, queue, quote, unescape, build_opener, + HTTPRedirectHandler as BaseRedirectHandler, text_type, Request, HTTPError, URLError) from .database import Distribution, DistributionPath, make_dist from .metadata import Metadata, MetadataInvalidError -from .util import (cached_property, ensure_slash, split_filename, get_project_data, - parse_requirement, parse_name_and_version, ServerProxy, - normalize_name) +from .util import (cached_property, ensure_slash, split_filename, get_project_data, parse_requirement, + parse_name_and_version, ServerProxy, normalize_name) from .version import get_scheme, UnsupportedVersionError from .wheel import Wheel, is_compatible @@ -58,6 +55,7 @@ class RedirectHandler(BaseRedirectHandler): """ A class to work around a bug in some Python 3.2.x releases. """ + # There's a bug in the base version for some 3.2.x # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header # returns e.g. /abc, it bails because it says the scheme '' @@ -80,8 +78,7 @@ def http_error_302(self, req, fp, code, msg, headers): headers.replace_header(key, newurl) else: headers[key] = newurl - return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, - headers) + return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, headers) http_error_301 = http_error_303 = http_error_307 = http_error_302 @@ -92,7 +89,7 @@ class Locator(object): """ source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') binary_extensions = ('.egg', '.exe', '.whl') - excluded_extensions = ('.pdf',) + excluded_extensions = ('.pdf', ) # A list of tags indicating which wheels you want to match. The default # value of None matches against the tags compatible with the running @@ -100,7 +97,7 @@ class Locator(object): # instance to a list of tuples (pyver, abi, arch) which you want to match. wheel_tags = None - downloadable_extensions = source_extensions + ('.whl',) + downloadable_extensions = source_extensions + ('.whl', ) def __init__(self, scheme='default'): """ @@ -200,8 +197,7 @@ def score_url(self, url): is_downloadable = basename.endswith(self.downloadable_extensions) if is_wheel: compatible = is_compatible(Wheel(basename), self.wheel_tags) - return (t.scheme == 'https', 'pypi.org' in t.netloc, - is_downloadable, is_wheel, compatible, basename) + return (t.scheme == 'https', 'pypi.org' in t.netloc, is_downloadable, is_wheel, compatible, basename) def prefer_url(self, url1, url2): """ @@ -239,14 +235,14 @@ def convert_url_to_download_info(self, url, project_name): If it is, a dictionary is returned with keys "name", "version", "filename" and "url"; otherwise, None is returned. """ + def same_project(name1, name2): return normalize_name(name1) == normalize_name(name2) result = None scheme, netloc, path, params, query, frag = urlparse(url) if frag.lower().startswith('egg='): # pragma: no cover - logger.debug('%s: version hint in fragment: %r', - project_name, frag) + logger.debug('%s: version hint in fragment: %r', project_name, frag) m = HASHER_HASH.match(frag) if m: algo, digest = m.groups() @@ -270,10 +266,8 @@ def same_project(name1, name2): 'name': wheel.name, 'version': wheel.version, 'filename': wheel.filename, - 'url': urlunparse((scheme, netloc, origpath, - params, query, '')), - 'python-version': ', '.join( - ['.'.join(list(v[2:])) for v in wheel.pyver]), + 'url': urlunparse((scheme, netloc, origpath, params, query, '')), + 'python-version': ', '.join(['.'.join(list(v[2:])) for v in wheel.pyver]), } except Exception: # pragma: no cover logger.warning('invalid path for wheel: %s', path) @@ -294,8 +288,7 @@ def same_project(name1, name2): 'name': name, 'version': version, 'filename': filename, - 'url': urlunparse((scheme, netloc, origpath, - params, query, '')), + 'url': urlunparse((scheme, netloc, origpath, params, query, '')), } if pyver: # pragma: no cover result['python-version'] = pyver @@ -371,7 +364,7 @@ def locate(self, requirement, prereleases=False): self.matcher = matcher = scheme.matcher(r.requirement) logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) versions = self.get_project(r.name) - if len(versions) > 2: # urls and digests keys are present + if len(versions) > 2: # urls and digests keys are present # sometimes, versions are invalid slist = [] vcls = matcher.version_class @@ -412,6 +405,7 @@ class PyPIRPCLocator(Locator): This locator uses XML-RPC to locate distributions. It therefore cannot be used with simple mirrors (that only mirror file content). """ + def __init__(self, url, **kwargs): """ Initialise an instance. @@ -461,6 +455,7 @@ class PyPIJSONLocator(Locator): This locator uses PyPI's JSON interface. It's very limited in functionality and probably not worth using. """ + def __init__(self, url, **kwargs): super(PyPIJSONLocator, self).__init__(**kwargs) self.base_url = ensure_slash(url) @@ -498,7 +493,7 @@ def _get_project(self, name): # Now get other releases for version, infos in d['releases'].items(): if version == md.version: - continue # already done + continue # already done omd = Metadata(scheme=self.scheme) omd.name = md.name omd.version = version @@ -511,6 +506,8 @@ def _get_project(self, name): odist.digests[url] = self._get_digest(info) result['urls'].setdefault(version, set()).add(url) result['digests'][url] = self._get_digest(info) + + # for info in urls: # md.source_url = info['url'] # dist.digest = self._get_digest(info) @@ -534,7 +531,8 @@ class Page(object): # or immediately followed by a "rel" attribute. The attribute values can be # declared with double quotes, single quotes or no quotes - which leads to # the length of the expression. - _href = re.compile(""" + _href = re.compile( + """ (rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))\\s+)? href\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)) (\\s+rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)))? @@ -561,17 +559,16 @@ def links(self): about their "rel" attribute, for determining which ones to treat as downloads and which ones to queue for further scraping. """ + def clean(url): "Tidy up an URL." scheme, netloc, path, params, query, frag = urlparse(url) - return urlunparse((scheme, netloc, quote(path), - params, query, frag)) + return urlunparse((scheme, netloc, quote(path), params, query, frag)) result = set() for match in self._href.finditer(self.data): d = match.groupdict('') - rel = (d['rel1'] or d['rel2'] or d['rel3'] or - d['rel4'] or d['rel5'] or d['rel6']) + rel = (d['rel1'] or d['rel2'] or d['rel3'] or d['rel4'] or d['rel5'] or d['rel6']) url = d['url1'] or d['url2'] or d['url3'] url = urljoin(self.base_url, url) url = unescape(url) @@ -645,7 +642,7 @@ def _wait_threads(self): # Note that you need two loops, since you can't say which # thread will get each sentinel for t in self._threads: - self._to_fetch.put(None) # sentinel + self._to_fetch.put(None) # sentinel for t in self._threads: t.join() self._threads = [] @@ -693,7 +690,7 @@ def _process_download(self, url): info = self.convert_url_to_download_info(url, self.project_name) logger.debug('process_download: %s -> %s', url, info) if info: - with self._lock: # needed because self.result is shared + with self._lock: # needed because self.result is shared self._update_version_data(self.result, info) return info @@ -703,8 +700,7 @@ def _should_queue(self, link, referrer, rel): particular "rel" attribute should be queued for scraping. """ scheme, netloc, path, _, _, _ = urlparse(link) - if path.endswith(self.source_extensions + self.binary_extensions + - self.excluded_extensions): + if path.endswith(self.source_extensions + self.binary_extensions + self.excluded_extensions): result = False elif self.skip_externals and not link.startswith(self.base_url): result = False @@ -722,8 +718,7 @@ def _should_queue(self, link, referrer, rel): result = False else: result = True - logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, - referrer, result) + logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, referrer, result) return result def _fetch(self): @@ -738,14 +733,13 @@ def _fetch(self): try: if url: page = self.get_page(url) - if page is None: # e.g. after an error + if page is None: # e.g. after an error continue for link, rel in page.links: if link not in self._seen: try: self._seen.add(link) - if (not self._process_download(link) and - self._should_queue(link, url, rel)): + if (not self._process_download(link) and self._should_queue(link, url, rel)): logger.debug('Queueing %s from %s', link, url) self._to_fetch.put(link) except MetadataInvalidError: # e.g. invalid versions @@ -793,7 +787,7 @@ def get_page(self, url): data = resp.read() encoding = headers.get('Content-Encoding') if encoding: - decoder = self.decoders[encoding] # fail if not found + decoder = self.decoders[encoding] # fail if not found data = decoder(data) encoding = 'utf-8' m = CHARSET.search(content_type) @@ -802,7 +796,7 @@ def get_page(self, url): try: data = data.decode(encoding) except UnicodeError: # pragma: no cover - data = data.decode('latin-1') # fallback + data = data.decode('latin-1') # fallback result = Page(data, final_url) self._page_cache[final_url] = result except HTTPError as e: @@ -815,7 +809,7 @@ def get_page(self, url): except Exception as e: # pragma: no cover logger.exception('Fetch failed: %s: %s', url, e) finally: - self._page_cache[url] = result # even if None (failure) + self._page_cache[url] = result # even if None (failure) return result _distname_re = re.compile(']*>([^<]+)<') @@ -869,9 +863,7 @@ def _get_project(self, name): for fn in files: if self.should_include(fn, root): fn = os.path.join(root, fn) - url = urlunparse(('file', '', - pathname2url(os.path.abspath(fn)), - '', '', '')) + url = urlunparse(('file', '', pathname2url(os.path.abspath(fn)), '', '', '')) info = self.convert_url_to_download_info(url, name) if info: self._update_version_data(result, info) @@ -888,9 +880,7 @@ def get_distribution_names(self): for fn in files: if self.should_include(fn, root): fn = os.path.join(root, fn) - url = urlunparse(('file', '', - pathname2url(os.path.abspath(fn)), - '', '', '')) + url = urlunparse(('file', '', pathname2url(os.path.abspath(fn)), '', '', '')) info = self.convert_url_to_download_info(url, None) if info: result.add(info['name']) @@ -906,6 +896,7 @@ class JSONLocator(Locator): require archive downloads before dependencies can be determined! As you might imagine, that can be slow. """ + def get_distribution_names(self): """ Return all the distribution names known to this locator. @@ -922,9 +913,9 @@ def _get_project(self, name): # We don't store summary in project metadata as it makes # the data bigger for no benefit during dependency # resolution - dist = make_dist(data['name'], info['version'], - summary=data.get('summary', - 'Placeholder for summary'), + dist = make_dist(data['name'], + info['version'], + summary=data.get('summary', 'Placeholder for summary'), scheme=self.scheme) md = dist.metadata md.source_url = info['url'] @@ -943,6 +934,7 @@ class DistPathLocator(Locator): This locator finds installed distributions in a path. It can be useful for adding to an :class:`AggregatingLocator`. """ + def __init__(self, distpath, **kwargs): """ Initialise an instance. @@ -960,8 +952,12 @@ def _get_project(self, name): else: result = { dist.version: dist, - 'urls': {dist.version: set([dist.source_url])}, - 'digests': {dist.version: set([None])} + 'urls': { + dist.version: set([dist.source_url]) + }, + 'digests': { + dist.version: set([None]) + } } return result @@ -970,6 +966,7 @@ class AggregatingLocator(Locator): """ This class allows you to chain and/or merge a list of locators. """ + def __init__(self, *locators, **kwargs): """ Initialise an instance. @@ -1058,10 +1055,9 @@ def get_distribution_names(self): # We use a legacy scheme simply because most of the dists on PyPI use legacy # versions which don't conform to PEP 440. default_locator = AggregatingLocator( - # JSONLocator(), # don't use as PEP 426 is withdrawn - SimpleScrapingLocator('https://pypi.org/simple/', - timeout=3.0), - scheme='legacy') + # JSONLocator(), # don't use as PEP 426 is withdrawn + SimpleScrapingLocator('https://pypi.org/simple/', timeout=3.0), + scheme='legacy') locate = default_locator.locate @@ -1137,7 +1133,7 @@ def find_providers(self, reqt): :return: A set of distribution which can fulfill the requirement. """ matcher = self.get_matcher(reqt) - name = matcher.key # case-insensitive + name = matcher.key # case-insensitive result = set() provided = self.provided if name in provided: @@ -1179,8 +1175,7 @@ def try_to_replace(self, provider, other, problems): unmatched.add(s) if unmatched: # can't replace other with provider - problems.add(('cantreplace', provider, other, - frozenset(unmatched))) + problems.add(('cantreplace', provider, other, frozenset(unmatched))) result = False else: # can replace other with provider @@ -1233,8 +1228,7 @@ def find(self, requirement, meta_extras=None, prereleases=False): dist = odist = requirement logger.debug('passed %s as requirement', odist) else: - dist = odist = self.locator.locate(requirement, - prereleases=prereleases) + dist = odist = self.locator.locate(requirement, prereleases=prereleases) if dist is None: raise DistlibException('Unable to locate %r' % requirement) logger.debug('located %s', odist) @@ -1244,7 +1238,7 @@ def find(self, requirement, meta_extras=None, prereleases=False): install_dists = set([odist]) while todo: dist = todo.pop() - name = dist.key # case-insensitive + name = dist.key # case-insensitive if name not in self.dists_by_name: self.add_distribution(dist) else: @@ -1281,8 +1275,7 @@ def find(self, requirement, meta_extras=None, prereleases=False): providers.add(provider) if r in ireqts and dist in install_dists: install_dists.add(provider) - logger.debug('Adding %s to install_dists', - provider.name_and_version) + logger.debug('Adding %s to install_dists', provider.name_and_version) for p in providers: name = p.key if name not in self.dists_by_name: @@ -1297,7 +1290,6 @@ def find(self, requirement, meta_extras=None, prereleases=False): for dist in dists: dist.build_time_dependency = dist not in install_dists if dist.build_time_dependency: - logger.debug('%s is a build-time dependency only.', - dist.name_and_version) + logger.debug('%s is a build-time dependency only.', dist.name_and_version) logger.debug('find done for %s', odist) return dists, problems diff --git a/src/pip/_vendor/distlib/markers.py b/src/pip/_vendor/distlib/markers.py index 1514d460e70..3f5632be47c 100644 --- a/src/pip/_vendor/distlib/markers.py +++ b/src/pip/_vendor/distlib/markers.py @@ -23,8 +23,7 @@ __all__ = ['interpret'] -_VERSION_PATTERN = re.compile( - r'((\d+(\.\d+)*\w*)|\'(\d+(\.\d+)*\w*)\'|\"(\d+(\.\d+)*\w*)\")') +_VERSION_PATTERN = re.compile(r'((\d+(\.\d+)*\w*)|\'(\d+(\.\d+)*\w*)\'|\"(\d+(\.\d+)*\w*)\")') _VERSION_MARKERS = {'python_version', 'python_full_version'} @@ -82,13 +81,12 @@ def evaluate(self, expr, context): elhs = expr['lhs'] erhs = expr['rhs'] if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): - raise SyntaxError('invalid comparison: %s %s %s' % - (elhs, op, erhs)) + raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs)) lhs = self.evaluate(elhs, context) rhs = self.evaluate(erhs, context) - if ((_is_version_marker(elhs) or _is_version_marker(erhs)) - and op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')): + if ((_is_version_marker(elhs) or _is_version_marker(erhs)) and + op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')): lhs = LV(lhs) rhs = LV(rhs) elif _is_version_marker(elhs) and op in ('in', 'not in'): @@ -111,8 +109,7 @@ def format_full_version(info): return version if hasattr(sys, 'implementation'): - implementation_version = format_full_version( - sys.implementation.version) + implementation_version = format_full_version(sys.implementation.version) implementation_name = sys.implementation.name else: implementation_version = '0' @@ -156,11 +153,9 @@ def interpret(marker, execution_context=None): try: expr, rest = parse_marker(marker) except Exception as e: - raise SyntaxError('Unable to interpret marker syntax: %s: %s' % - (marker, e)) + raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e)) if rest and rest[0] != '#': - raise SyntaxError('unexpected trailing data in marker: %s: %s' % - (marker, rest)) + raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest)) context = dict(DEFAULT_CONTEXT) if execution_context: context.update(execution_context) diff --git a/src/pip/_vendor/distlib/metadata.py b/src/pip/_vendor/distlib/metadata.py index 7189aeef229..ce9a34b3e24 100644 --- a/src/pip/_vendor/distlib/metadata.py +++ b/src/pip/_vendor/distlib/metadata.py @@ -15,7 +15,6 @@ import logging import re - from . import DistlibException, __version__ from .compat import StringIO, string_types, text_type from .markers import interpret @@ -40,6 +39,7 @@ class MetadataUnrecognizedVersionError(DistlibException): class MetadataInvalidError(DistlibException): """A metadata value is invalid""" + # public API of this module __all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] @@ -52,53 +52,38 @@ class MetadataInvalidError(DistlibException): _LINE_PREFIX_1_2 = re.compile('\n \\|') _LINE_PREFIX_PRE_1_2 = re.compile('\n ') -_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'License') - -_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Supported-Platform', 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'License', 'Classifier', 'Download-URL', 'Obsoletes', +_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Summary', 'Description', 'Keywords', 'Home-page', + 'Author', 'Author-email', 'License') + +_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', 'License', 'Classifier', 'Download-URL', 'Obsoletes', 'Provides', 'Requires') -_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', - 'Download-URL') +_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', 'Download-URL') -_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Supported-Platform', 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'Maintainer', 'Maintainer-email', 'License', - 'Classifier', 'Download-URL', 'Obsoletes-Dist', - 'Project-URL', 'Provides-Dist', 'Requires-Dist', +_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', 'Project-URL', 'Provides-Dist', 'Requires-Dist', 'Requires-Python', 'Requires-External') -_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', - 'Obsoletes-Dist', 'Requires-External', 'Maintainer', - 'Maintainer-email', 'Project-URL') +_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', 'Obsoletes-Dist', 'Requires-External', + 'Maintainer', 'Maintainer-email', 'Project-URL') -_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Supported-Platform', 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'Maintainer', 'Maintainer-email', 'License', - 'Classifier', 'Download-URL', 'Obsoletes-Dist', - 'Project-URL', 'Provides-Dist', 'Requires-Dist', - 'Requires-Python', 'Requires-External', 'Private-Version', - 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', - 'Provides-Extra') +_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External', 'Private-Version', 'Obsoleted-By', 'Setup-Requires-Dist', + 'Extension', 'Provides-Extra') -_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', - 'Setup-Requires-Dist', 'Extension') +_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension') # See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in # the metadata. Include them in the tuple literal below to allow them # (for now). # Ditto for Obsoletes - see issue #140. -_566_FIELDS = _426_FIELDS + ('Description-Content-Type', - 'Requires', 'Provides', 'Obsoletes') +_566_FIELDS = _426_FIELDS + ('Description-Content-Type', 'Requires', 'Provides', 'Obsoletes') -_566_MARKERS = ('Description-Content-Type',) +_566_MARKERS = ('Description-Content-Type', ) _643_MARKERS = ('Dynamic', 'License-File') @@ -135,6 +120,7 @@ def _version2fieldlist(version): def _best_version(fields): """Detect the best version depending on the fields used.""" + def _has_marker(keys, markers): return any(marker in keys for marker in markers) @@ -163,12 +149,12 @@ def _has_marker(keys, markers): possible_versions.remove('2.2') logger.debug('Removed 2.2 due to %s', key) # if key not in _426_FIELDS and '2.0' in possible_versions: - # possible_versions.remove('2.0') - # logger.debug('Removed 2.0 due to %s', key) + # possible_versions.remove('2.0') + # logger.debug('Removed 2.0 due to %s', key) # possible_version contains qualified versions if len(possible_versions) == 1: - return possible_versions[0] # found ! + return possible_versions[0] # found ! elif len(possible_versions) == 0: logger.debug('Out of options - unknown metadata set: %s', fields) raise MetadataConflictError('Unknown metadata set') @@ -199,28 +185,25 @@ def _has_marker(keys, markers): if is_2_1: return '2.1' # if is_2_2: - # return '2.2' + # return '2.2' return '2.2' + # This follows the rules about transforming keys as described in # https://www.python.org/dev/peps/pep-0566/#id17 -_ATTR2FIELD = { - name.lower().replace("-", "_"): name for name in _ALL_FIELDS -} +_ATTR2FIELD = {name.lower().replace("-", "_"): name for name in _ALL_FIELDS} _FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()} _PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') -_VERSIONS_FIELDS = ('Requires-Python',) -_VERSION_FIELDS = ('Version',) -_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', - 'Requires', 'Provides', 'Obsoletes-Dist', - 'Provides-Dist', 'Requires-Dist', 'Requires-External', - 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', +_VERSIONS_FIELDS = ('Requires-Python', ) +_VERSION_FIELDS = ('Version', ) +_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', 'Requires', 'Provides', 'Obsoletes-Dist', 'Provides-Dist', + 'Requires-Dist', 'Requires-External', 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', 'Provides-Extra', 'Extension', 'License-File') -_LISTTUPLEFIELDS = ('Project-URL',) +_LISTTUPLEFIELDS = ('Project-URL', ) -_ELEMENTSFIELD = ('Keywords',) +_ELEMENTSFIELD = ('Keywords', ) _UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') @@ -252,10 +235,10 @@ class LegacyMetadata(object): - *mapping* is a dict-like object - *scheme* is a version scheme name """ + # TODO document the mapping API and UNKNOWN default key - def __init__(self, path=None, fileobj=None, mapping=None, - scheme='default'): + def __init__(self, path=None, fileobj=None, mapping=None, scheme='default'): if [path, fileobj, mapping].count(None) < 2: raise TypeError('path, fileobj and mapping are exclusive') self._fields = {} @@ -290,8 +273,7 @@ def __delitem__(self, name): raise KeyError(name) def __contains__(self, name): - return (name in self._fields or - self._convert_name(name) in self._fields) + return (name in self._fields or self._convert_name(name) in self._fields) def _convert_name(self, name): if name in _ALL_FIELDS: @@ -319,12 +301,12 @@ def __getattr__(self, name): # Public API # -# dependencies = property(_get_dependencies, _set_dependencies) - def get_fullname(self, filesafe=False): - """Return the distribution name with version. + """ + Return the distribution name with version. - If filesafe is true, return a filename-escaped form.""" + If filesafe is true, return a filename-escaped form. + """ return _get_name_and_version(self['Name'], self['Version'], filesafe) def is_field(self, name): @@ -415,6 +397,7 @@ def update(self, other=None, **kwargs): Keys that don't match a metadata field or that have an empty value are dropped. """ + def _set(key, value): if key in _ATTR2FIELD and value: self.set(self._convert_name(key), value) @@ -437,14 +420,12 @@ def set(self, name, value): """Control then set a metadata field.""" name = self._convert_name(name) - if ((name in _ELEMENTSFIELD or name == 'Platform') and - not isinstance(value, (list, tuple))): + if ((name in _ELEMENTSFIELD or name == 'Platform') and not isinstance(value, (list, tuple))): if isinstance(value, string_types): value = [v.strip() for v in value.split(',')] else: value = [] - elif (name in _LISTFIELDS and - not isinstance(value, (list, tuple))): + elif (name in _LISTFIELDS and not isinstance(value, (list, tuple))): if isinstance(value, string_types): value = [value] else: @@ -458,18 +439,14 @@ def set(self, name, value): for v in value: # check that the values are valid if not scheme.is_valid_matcher(v.split(';')[0]): - logger.warning( - "'%s': '%s' is not valid (field '%s')", - project_name, v, name) + logger.warning("'%s': '%s' is not valid (field '%s')", project_name, v, name) # FIXME this rejects UNKNOWN, is that right? elif name in _VERSIONS_FIELDS and value is not None: if not scheme.is_valid_constraint_list(value): - logger.warning("'%s': '%s' is not a valid version (field '%s')", - project_name, value, name) + logger.warning("'%s': '%s' is not a valid version (field '%s')", project_name, value, name) elif name in _VERSION_FIELDS and value is not None: if not scheme.is_valid_version(value): - logger.warning("'%s': '%s' is not a valid version (field '%s')", - project_name, value, name) + logger.warning("'%s': '%s' is not a valid version (field '%s')", project_name, value, name) if name in _UNICODEFIELDS: if name == 'Description': @@ -539,10 +516,8 @@ def are_valid_constraints(value): return True for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), - (_VERSIONS_FIELDS, - scheme.is_valid_constraint_list), - (_VERSION_FIELDS, - scheme.is_valid_version)): + (_VERSIONS_FIELDS, scheme.is_valid_constraint_list), (_VERSION_FIELDS, + scheme.is_valid_version)): for field in fields: value = self.get(field, None) if value is not None and not controller(value): @@ -598,8 +573,7 @@ def items(self): return [(key, self[key]) for key in self.keys()] def __repr__(self): - return '<%s %s %s>' % (self.__class__.__name__, self.name, - self.version) + return '<%s %s %s>' % (self.__class__.__name__, self.name, self.version) METADATA_FILENAME = 'pydist.json' @@ -631,7 +605,7 @@ class Metadata(object): MANDATORY_KEYS = { 'name': (), 'version': (), - 'summary': ('legacy',), + 'summary': ('legacy', ), } INDEX_KEYS = ('name version license summary description author ' @@ -644,22 +618,21 @@ class Metadata(object): SYNTAX_VALIDATORS = { 'metadata_version': (METADATA_VERSION_MATCHER, ()), - 'name': (NAME_MATCHER, ('legacy',)), - 'version': (VERSION_MATCHER, ('legacy',)), - 'summary': (SUMMARY_MATCHER, ('legacy',)), - 'dynamic': (FIELDNAME_MATCHER, ('legacy',)), + 'name': (NAME_MATCHER, ('legacy', )), + 'version': (VERSION_MATCHER, ('legacy', )), + 'summary': (SUMMARY_MATCHER, ('legacy', )), + 'dynamic': (FIELDNAME_MATCHER, ('legacy', )), } __slots__ = ('_legacy', '_data', 'scheme') - def __init__(self, path=None, fileobj=None, mapping=None, - scheme='default'): + def __init__(self, path=None, fileobj=None, mapping=None, scheme='default'): if [path, fileobj, mapping].count(None) < 2: raise TypeError('path, fileobj and mapping are exclusive') self._legacy = None self._data = None self.scheme = scheme - #import pdb; pdb.set_trace() + # import pdb; pdb.set_trace() if mapping is not None: try: self._validate_mapping(mapping, scheme) @@ -693,8 +666,7 @@ def __init__(self, path=None, fileobj=None, mapping=None, # The ValueError comes from the json.load - if that # succeeds and we get a validation error, we want # that to propagate - self._legacy = LegacyMetadata(fileobj=StringIO(data), - scheme=scheme) + self._legacy = LegacyMetadata(fileobj=StringIO(data), scheme=scheme) self.validate() common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) @@ -732,8 +704,7 @@ def __getattribute__(self, key): result = self._legacy.get(lk) else: value = None if maker is None else maker() - if key not in ('commands', 'exports', 'modules', 'namespaces', - 'classifiers'): + if key not in ('commands', 'exports', 'modules', 'namespaces', 'classifiers'): result = self._data.get(key, value) else: # special cases for PEP 459 @@ -770,8 +741,7 @@ def _validate_value(self, key, value, scheme=None): m = pattern.match(value) if not m: raise MetadataInvalidError("'%s' is an invalid value for " - "the '%s' property" % (value, - key)) + "the '%s' property" % (value, key)) def __setattr__(self, key, value): self._validate_value(key, value) @@ -783,8 +753,7 @@ def __setattr__(self, key, value): if lk is None: raise NotImplementedError self._legacy[lk] = value - elif key not in ('commands', 'exports', 'modules', 'namespaces', - 'classifiers'): + elif key not in ('commands', 'exports', 'modules', 'namespaces', 'classifiers'): self._data[key] = value else: # special cases for PEP 459 @@ -872,8 +841,7 @@ def get_requirements(self, reqts, extras=None, env=None): # A recursive call, but it should terminate since 'test' # has been removed from the extras reqts = self._data.get('%s_requires' % key, []) - result.extend(self.get_requirements(reqts, extras=extras, - env=env)) + result.extend(self.get_requirements(reqts, extras=extras, env=env)) return result @property @@ -914,8 +882,7 @@ def validate(self): if self._legacy: missing, warnings = self._legacy.check(True) if missing or warnings: - logger.warning('Metadata: missing: %s, warnings: %s', - missing, warnings) + logger.warning('Metadata: missing: %s, warnings: %s', missing, warnings) else: self._validate_mapping(self._data, self.scheme) @@ -932,9 +899,8 @@ def _from_legacy(self): 'metadata_version': self.METADATA_VERSION, 'generator': self.GENERATOR, } - lmd = self._legacy.todict(True) # skip missing ones - for k in ('name', 'version', 'license', 'summary', 'description', - 'classifier'): + lmd = self._legacy.todict(True) # skip missing ones + for k in ('name', 'version', 'license', 'summary', 'description', 'classifier'): if k in lmd: if k == 'classifier': nk = 'classifiers' @@ -945,14 +911,13 @@ def _from_legacy(self): if kw == ['']: kw = [] result['keywords'] = kw - keys = (('requires_dist', 'run_requires'), - ('setup_requires_dist', 'build_requires')) + keys = (('requires_dist', 'run_requires'), ('setup_requires_dist', 'build_requires')) for ok, nk in keys: if ok in lmd and lmd[ok]: result[nk] = [{'requires': lmd[ok]}] result['provides'] = self.provides - author = {} - maintainer = {} + # author = {} + # maintainer = {} return result LEGACY_MAPPING = { @@ -969,6 +934,7 @@ def _from_legacy(self): } def _to_legacy(self): + def process_entries(entries): reqts = set() for e in entries: @@ -1037,12 +1003,10 @@ def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): else: d = self._data if fileobj: - json.dump(d, fileobj, ensure_ascii=True, indent=2, - sort_keys=True) + json.dump(d, fileobj, ensure_ascii=True, indent=2, sort_keys=True) else: with codecs.open(path, 'w', 'utf-8') as f: - json.dump(d, f, ensure_ascii=True, indent=2, - sort_keys=True) + json.dump(d, f, ensure_ascii=True, indent=2, sort_keys=True) def add_requirements(self, requirements): if self._legacy: @@ -1055,7 +1019,7 @@ def add_requirements(self, requirements): always = entry break if always is None: - always = { 'requires': requirements } + always = {'requires': requirements} run_requires.insert(0, always) else: rset = set(always['requires']) | set(requirements) @@ -1064,5 +1028,4 @@ def add_requirements(self, requirements): def __repr__(self): name = self.name or '(no name)' version = self.version or 'no version' - return '<%s %s %s (%s)>' % (self.__class__.__name__, - self.metadata_version, name, version) + return '<%s %s %s (%s)>' % (self.__class__.__name__, self.metadata_version, name, version) diff --git a/src/pip/_vendor/distlib/scripts.py b/src/pip/_vendor/distlib/scripts.py index 0982e6b726e..b1fc705b7e6 100644 --- a/src/pip/_vendor/distlib/scripts.py +++ b/src/pip/_vendor/distlib/scripts.py @@ -15,8 +15,7 @@ from .compat import sysconfig, detect_encoding, ZipFile from .resources import finder -from .util import (FileOperator, get_export_entry, convert_path, - get_executable, get_platform, in_venv) +from .util import (FileOperator, get_export_entry, convert_path, get_executable, get_platform, in_venv) logger = logging.getLogger(__name__) @@ -57,15 +56,16 @@ # location where it was imported from. So we load everything into memory in # advance. -# Issue 31: don't hardcode an absolute package name, but -# determine it relative to the current package -distlib_package = __name__.rsplit('.', 1)[0] +if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): + # Issue 31: don't hardcode an absolute package name, but + # determine it relative to the current package + DISTLIB_PACKAGE = __name__.rsplit('.', 1)[0] -WRAPPERS = { - r.name: r.bytes - for r in finder(distlib_package).iterator("") - if r.name.endswith(".exe") -} + WRAPPERS = { + r.name: r.bytes + for r in finder(DISTLIB_PACKAGE).iterator("") + if r.name.endswith(".exe") + } def enquote_executable(executable): @@ -97,25 +97,18 @@ class ScriptMaker(object): executable = None # for shebangs - def __init__(self, - source_dir, - target_dir, - add_launchers=True, - dry_run=False, - fileop=None): + def __init__(self, source_dir, target_dir, add_launchers=True, dry_run=False, fileop=None): self.source_dir = source_dir self.target_dir = target_dir self.add_launchers = add_launchers self.force = False self.clobber = False # It only makes sense to set mode bits on POSIX. - self.set_mode = (os.name == 'posix') or (os.name == 'java' - and os._name == 'posix') + self.set_mode = (os.name == 'posix') or (os.name == 'java' and os._name == 'posix') self.variants = set(('', 'X.Y')) self._fileop = fileop or FileOperator(dry_run) - self._is_nt = os.name == 'nt' or (os.name == 'java' - and os._name == 'nt') + self._is_nt = os.name == 'nt' or (os.name == 'java' and os._name == 'nt') self.version_info = sys.version_info def _get_alternate_executable(self, executable, options): @@ -177,15 +170,14 @@ def _build_shebang(self, executable, post_interp): max_shebang_length = 512 else: max_shebang_length = 127 - simple_shebang = ((b' ' not in executable) - and (shebang_length <= max_shebang_length)) + simple_shebang = ((b' ' not in executable) and (shebang_length <= max_shebang_length)) if simple_shebang: result = b'#!' + executable + post_interp + b'\n' else: result = b'#!/bin/sh\n' result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n' - result += b"' '''" + result += b"' '''\n" return result def _get_shebang(self, encoding, post_interp=b'', options=None): @@ -196,21 +188,17 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): elif not sysconfig.is_python_build(): executable = get_executable() elif in_venv(): # pragma: no cover - executable = os.path.join( - sysconfig.get_path('scripts'), - 'python%s' % sysconfig.get_config_var('EXE')) + executable = os.path.join(sysconfig.get_path('scripts'), 'python%s' % sysconfig.get_config_var('EXE')) else: # pragma: no cover if os.name == 'nt': # for Python builds from source on Windows, no Python executables with # a version suffix are created, so we use python.exe - executable = os.path.join( - sysconfig.get_config_var('BINDIR'), - 'python%s' % (sysconfig.get_config_var('EXE'))) + executable = os.path.join(sysconfig.get_config_var('BINDIR'), + 'python%s' % (sysconfig.get_config_var('EXE'))) else: executable = os.path.join( sysconfig.get_config_var('BINDIR'), - 'python%s%s' % (sysconfig.get_config_var('VERSION'), - sysconfig.get_config_var('EXE'))) + 'python%s%s' % (sysconfig.get_config_var('VERSION'), sysconfig.get_config_var('EXE'))) if options: executable = self._get_alternate_executable(executable, options) @@ -234,8 +222,8 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): # check that the shebang is decodable using utf-8. executable = executable.encode('utf-8') # in case of IronPython, play safe and enable frames support - if (sys.platform == 'cli' and '-X:Frames' not in post_interp - and '-X:FullFrames' not in post_interp): # pragma: no cover + if (sys.platform == 'cli' and '-X:Frames' not in post_interp and + '-X:FullFrames' not in post_interp): # pragma: no cover post_interp += b' -X:Frames' shebang = self._build_shebang(executable, post_interp) # Python parser starts to read a script using UTF-8 until @@ -246,8 +234,7 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): try: shebang.decode('utf-8') except UnicodeDecodeError: # pragma: no cover - raise ValueError('The shebang (%r) is not decodable from utf-8' % - shebang) + raise ValueError('The shebang (%r) is not decodable from utf-8' % shebang) # If the script is encoded to a custom encoding (use a # #coding:xxx cookie), the shebang has to be decodable from # the script encoding too. @@ -256,15 +243,12 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): shebang.decode(encoding) except UnicodeDecodeError: # pragma: no cover raise ValueError('The shebang (%r) is not decodable ' - 'from the script encoding (%r)' % - (shebang, encoding)) + 'from the script encoding (%r)' % (shebang, encoding)) return shebang def _get_script_text(self, entry): return self.script_template % dict( - module=entry.prefix, - import_name=entry.suffix.split('.')[0], - func=entry.suffix) + module=entry.prefix, import_name=entry.suffix.split('.')[0], func=entry.suffix) manifest = _DEFAULT_MANIFEST @@ -274,9 +258,6 @@ def get_manifest(self, exename): def _write_script(self, names, shebang, script_bytes, filenames, ext): use_launcher = self.add_launchers and self._is_nt - linesep = os.linesep.encode('utf-8') - if not shebang.endswith(linesep): - shebang += linesep if not use_launcher: script_bytes = shebang + script_bytes else: # pragma: no cover @@ -289,8 +270,7 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext): source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH') if source_date_epoch: date_time = time.gmtime(int(source_date_epoch))[:6] - zinfo = ZipInfo(filename='__main__.py', - date_time=date_time) + zinfo = ZipInfo(filename='__main__.py', date_time=date_time) zf.writestr(zinfo, script_bytes) else: zf.writestr('__main__.py', script_bytes) @@ -321,8 +301,7 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext): except Exception: pass # still in use - ignore error else: - if self._is_nt and not outname.endswith( - '.' + ext): # pragma: no cover + if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover outname = '%s.%s' % (outname, ext) if os.path.exists(outname) and not self.clobber: logger.warning('Skipping existing file %s', outname) @@ -341,9 +320,7 @@ def get_script_filenames(self, name): if 'X' in self.variants: result.add('%s%s' % (name, self.version_info[0])) if 'X.Y' in self.variants: - result.add('%s%s%s.%s' % - (name, self.variant_separator, self.version_info[0], - self.version_info[1])) + result.add('%s%s%s.%s' % (name, self.variant_separator, self.version_info[0], self.version_info[1])) return result def _make_script(self, entry, filenames, options=None): @@ -398,8 +375,7 @@ def _copy_script(self, script, filenames): self._fileop.set_executable_mode([outname]) filenames.append(outname) else: - logger.info('copying and adjusting %s -> %s', script, - self.target_dir) + logger.info('copying and adjusting %s -> %s', script, self.target_dir) if not self._fileop.dry_run: encoding, lines = detect_encoding(f.readline) f.seek(0) @@ -421,8 +397,7 @@ def dry_run(self): def dry_run(self, value): self._fileop.dry_run = value - if os.name == 'nt' or (os.name == 'java' - and os._name == 'nt'): # pragma: no cover + if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover # Executable launcher support. # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ @@ -435,7 +410,7 @@ def _get_launcher(self, kind): name = '%s%s%s.exe' % (kind, bits, platform_suffix) if name not in WRAPPERS: msg = ('Unable to find resource %s in package %s' % - (name, distlib_package)) + (name, DISTLIB_PACKAGE)) raise ValueError(msg) return WRAPPERS[name] diff --git a/src/pip/_vendor/distlib/util.py b/src/pip/_vendor/distlib/util.py index ba58858d0fb..0d5bd7a8bf3 100644 --- a/src/pip/_vendor/distlib/util.py +++ b/src/pip/_vendor/distlib/util.py @@ -31,11 +31,9 @@ import time from . import DistlibException -from .compat import (string_types, text_type, shutil, raw_input, StringIO, - cache_from_source, urlopen, urljoin, httplib, xmlrpclib, - HTTPHandler, BaseConfigurator, valid_ident, - Container, configparser, URLError, ZipFile, fsdecode, - unquote, urlparse) +from .compat import (string_types, text_type, shutil, raw_input, StringIO, cache_from_source, urlopen, urljoin, httplib, + xmlrpclib, HTTPHandler, BaseConfigurator, valid_ident, Container, configparser, URLError, ZipFile, + fsdecode, unquote, urlparse) logger = logging.getLogger(__name__) @@ -88,8 +86,7 @@ def marker_var(remaining): else: m = STRING_CHUNK.match(remaining) if not m: - raise SyntaxError('error in string literal: %s' % - remaining) + raise SyntaxError('error in string literal: %s' % remaining) parts.append(m.groups()[0]) remaining = remaining[m.end():] else: @@ -210,8 +207,7 @@ def get_versions(ver_remaining): ver_remaining = ver_remaining[m.end():] m = VERSION_IDENTIFIER.match(ver_remaining) if not m: - raise SyntaxError('invalid version: %s' % - ver_remaining) + raise SyntaxError('invalid version: %s' % ver_remaining) v = m.groups()[0] versions.append((op, v)) ver_remaining = ver_remaining[m.end():] @@ -224,8 +220,7 @@ def get_versions(ver_remaining): break m = COMPARE_OP.match(ver_remaining) if not m: - raise SyntaxError('invalid constraint: %s' % - ver_remaining) + raise SyntaxError('invalid constraint: %s' % ver_remaining) if not versions: versions = None return versions, ver_remaining @@ -235,8 +230,7 @@ def get_versions(ver_remaining): else: i = remaining.find(')', 1) if i < 0: - raise SyntaxError('unterminated parenthesis: %s' % - remaining) + raise SyntaxError('unterminated parenthesis: %s' % remaining) s = remaining[1:i] remaining = remaining[i + 1:].lstrip() # As a special diversion from PEP 508, allow a version number @@ -267,14 +261,8 @@ def get_versions(ver_remaining): if not versions: rs = distname else: - rs = '%s %s' % (distname, ', '.join( - ['%s %s' % con for con in versions])) - return Container(name=distname, - extras=extras, - constraints=versions, - marker=mark_expr, - url=uri, - requirement=rs) + rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) + return Container(name=distname, extras=extras, constraints=versions, marker=mark_expr, url=uri, requirement=rs) def get_resources_dests(resources_root, rules): @@ -524,8 +512,7 @@ def newer(self, source, target): second will have the same "age". """ if not os.path.exists(source): - raise DistlibException("file '%r' does not exist" % - os.path.abspath(source)) + raise DistlibException("file '%r' does not exist" % os.path.abspath(source)) if not os.path.exists(target): return True @@ -601,12 +588,7 @@ def ensure_dir(self, path): if self.record: self.dirs_created.add(path) - def byte_compile(self, - path, - optimize=False, - force=False, - prefix=None, - hashed_invalidation=False): + def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): dpath = cache_from_source(path, not optimize) logger.info('Byte-compiling %s to %s', path, dpath) if not self.dry_run: @@ -617,12 +599,11 @@ def byte_compile(self, assert path.startswith(prefix) diagpath = path[len(prefix):] compile_kwargs = {} - if hashed_invalidation and hasattr(py_compile, - 'PycInvalidationMode'): - compile_kwargs[ - 'invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH - py_compile.compile(path, dpath, diagpath, True, - **compile_kwargs) # raise error + if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): + if not isinstance(hashed_invalidation, py_compile.PycInvalidationMode): + hashed_invalidation = py_compile.PycInvalidationMode.CHECKED_HASH + compile_kwargs['invalidation_mode'] = hashed_invalidation + py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error self.record_as_written(dpath) return dpath @@ -716,16 +697,14 @@ def value(self): return resolve(self.prefix, self.suffix) def __repr__(self): # pragma: no cover - return '' % (self.name, self.prefix, - self.suffix, self.flags) + return '' % (self.name, self.prefix, self.suffix, self.flags) def __eq__(self, other): if not isinstance(other, ExportEntry): result = False else: - result = (self.name == other.name and self.prefix == other.prefix - and self.suffix == other.suffix - and self.flags == other.flags) + result = (self.name == other.name and self.prefix == other.prefix and self.suffix == other.suffix and + self.flags == other.flags) return result __hash__ = object.__hash__ @@ -810,7 +789,7 @@ def get_cache_base(suffix=None): return os.path.join(result, suffix) -def path_to_cache_dir(path): +def path_to_cache_dir(path, use_abspath=True): """ Convert an absolute path to a directory name for use in a cache. @@ -820,7 +799,7 @@ def path_to_cache_dir(path): #. Any occurrence of ``os.sep`` is replaced with ``'--'``. #. ``'.cache'`` is appended. """ - d, p = os.path.splitdrive(os.path.abspath(path)) + d, p = os.path.splitdrive(os.path.abspath(path) if use_abspath else path) if d: d = d.replace(':', '---') p = p.replace(os.sep, '--') @@ -865,9 +844,8 @@ def is_string_sequence(seq): return result -PROJECT_NAME_AND_VERSION = re.compile( - '([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' - '([a-z0-9_.+-]+)', re.I) +PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' + '([a-z0-9_.+-]+)', re.I) PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') @@ -1003,11 +981,11 @@ def __init__(self, base): logger.warning('Directory \'%s\' is not private', base) self.base = os.path.abspath(os.path.normpath(base)) - def prefix_to_dir(self, prefix): + def prefix_to_dir(self, prefix, use_abspath=True): """ Converts a resource prefix to a directory name in the cache. """ - return path_to_cache_dir(prefix) + return path_to_cache_dir(prefix, use_abspath=use_abspath) def clear(self): """ @@ -1092,8 +1070,7 @@ def publish(self, event, *args, **kwargs): logger.exception('Exception during event publication') value = None result.append(value) - logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event, - args, kwargs, result) + logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event, args, kwargs, result) return result @@ -1145,8 +1122,7 @@ def remove(self, pred, succ): raise ValueError('%r not a successor of %r' % (succ, pred)) def is_step(self, step): - return (step in self._preds or step in self._succs - or step in self._nodes) + return (step in self._preds or step in self._succs or step in self._nodes) def get_steps(self, final): if not self.is_step(final): @@ -1242,8 +1218,7 @@ def dot(self): # Unarchiving functionality for zip, tar, tgz, tbz, whl # -ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz', - '.whl') +ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz', '.whl') def unarchive(archive_filename, dest_dir, format=None, check=True): @@ -1474,8 +1449,7 @@ def _iglob(path_glob): if ssl: - from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, - CertificateError) + from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, CertificateError) # # HTTPSConnection which verifies certificates/matches domains @@ -1487,8 +1461,7 @@ class HTTPSConnection(httplib.HTTPSConnection): # noinspection PyPropertyAccess def connect(self): - sock = socket.create_connection((self.host, self.port), - self.timeout) + sock = socket.create_connection((self.host, self.port), self.timeout) if getattr(self, '_tunnel_host', False): self.sock = sock self._tunnel() @@ -1543,9 +1516,8 @@ def https_open(self, req): return self.do_open(self._conn_maker, req) except URLError as e: if 'certificate verify failed' in str(e.reason): - raise CertificateError( - 'Unable to verify server certificate ' - 'for %s' % req.host) + raise CertificateError('Unable to verify server certificate ' + 'for %s' % req.host) else: raise @@ -1561,9 +1533,8 @@ def https_open(self, req): class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): def http_open(self, req): - raise URLError( - 'Unexpected HTTP request on what should be a secure ' - 'connection: %s' % req) + raise URLError('Unexpected HTTP request on what should be a secure ' + 'connection: %s' % req) # @@ -1598,8 +1569,7 @@ def make_connection(self, host): kwargs['timeout'] = self.timeout if not self._connection or host != self._connection[0]: self._extra_headers = eh - self._connection = host, httplib.HTTPSConnection( - h, None, **kwargs) + self._connection = host, httplib.HTTPSConnection(h, None, **kwargs) return self._connection[1] @@ -1789,10 +1759,7 @@ def reader(self, stream, context): stream.close() def run_command(self, cmd, **kwargs): - p = subprocess.Popen(cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - **kwargs) + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) t1.start() t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) @@ -1847,10 +1814,7 @@ def read(self): if 'distutils' in sections: # let's get the list of servers index_servers = config.get('distutils', 'index-servers') - _servers = [ - server.strip() for server in index_servers.split('\n') - if server.strip() != '' - ] + _servers = [server.strip() for server in index_servers.split('\n') if server.strip() != ''] if _servers == []: # nothing set, let's try to get the default pypi if 'pypi' in sections: @@ -1861,9 +1825,7 @@ def read(self): result['username'] = config.get(server, 'username') # optional params - for key, default in (('repository', - self.DEFAULT_REPOSITORY), - ('realm', self.DEFAULT_REALM), + for key, default in (('repository', self.DEFAULT_REPOSITORY), ('realm', self.DEFAULT_REALM), ('password', None)): if config.has_option(server, key): result[key] = config.get(server, key) @@ -1873,11 +1835,9 @@ def read(self): # work around people having "repository" for the "pypi" # section of their config set to the HTTP (rather than # HTTPS) URL - if (server == 'pypi' and repository - in (self.DEFAULT_REPOSITORY, 'pypi')): + if (server == 'pypi' and repository in (self.DEFAULT_REPOSITORY, 'pypi')): result['repository'] = self.DEFAULT_REPOSITORY - elif (result['server'] != repository - and result['repository'] != repository): + elif (result['server'] != repository and result['repository'] != repository): result = {} elif 'server-login' in sections: # old format @@ -2003,8 +1963,7 @@ def get_host_platform(): from distutils import sysconfig except ImportError: import sysconfig - osname, release, machine = _osx_support.get_platform_osx( - sysconfig.get_config_vars(), osname, release, machine) + osname, release, machine = _osx_support.get_platform_osx(sysconfig.get_config_vars(), osname, release, machine) return '%s-%s-%s' % (osname, release, machine) diff --git a/src/pip/_vendor/distlib/version.py b/src/pip/_vendor/distlib/version.py index 14171ac938d..d70a96ef51e 100644 --- a/src/pip/_vendor/distlib/version.py +++ b/src/pip/_vendor/distlib/version.py @@ -619,8 +619,7 @@ def parse(self, s): def is_prerelease(self): result = False for x in self._parts: - if (isinstance(x, string_types) and x.startswith('*') and - x < '*final'): + if (isinstance(x, string_types) and x.startswith('*') and x < '*final'): result = True break return result diff --git a/src/pip/_vendor/distlib/wheel.py b/src/pip/_vendor/distlib/wheel.py index 4a5a30e1d8d..62ab10fb3ad 100644 --- a/src/pip/_vendor/distlib/wheel.py +++ b/src/pip/_vendor/distlib/wheel.py @@ -25,9 +25,8 @@ from .compat import sysconfig, ZipFile, fsdecode, text_type, filter from .database import InstalledDistribution from .metadata import Metadata, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME -from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, - cached_property, get_cache_base, read_exports, tempdir, - get_platform) +from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, cached_property, get_cache_base, + read_exports, tempdir, get_platform) from .version import NormalizedVersion, UnsupportedVersionError logger = logging.getLogger(__name__) @@ -88,8 +87,7 @@ def _derive_abi(): \.whl$ ''', re.IGNORECASE | re.VERBOSE) -NAME_VERSION_RE = re.compile( - r''' +NAME_VERSION_RE = re.compile(r''' (?P[^-]+) -(?P\d+[^-]*) (-(?P\d+[^-]*))?$ @@ -235,8 +233,7 @@ def filename(self): arch = '.'.join(self.arch) # replace - with _ as a local version separator version = self.version.replace('-', '_') - return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, pyver, - abi, arch) + return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, pyver, abi, arch) @property def exists(self): @@ -334,8 +331,7 @@ def get_hash(self, data, hash_kind=None): try: hasher = getattr(hashlib, hash_kind) except AttributeError: - raise DistlibException('Unsupported hash algorithm: %r' % - hash_kind) + raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) result = hasher(data).digest() result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') return hash_kind, result @@ -513,7 +509,7 @@ def install(self, paths, maker, **kwargs): installed, and the headers, scripts, data and dist-info metadata are not written. If kwarg ``bytecode_hashed_invalidation`` is True, written bytecode will try to use file-hash based invalidation (PEP-552) on - supported interpreter versions (CPython 2.7+). + supported interpreter versions (CPython 3.7+). The return value is a :class:`InstalledDistribution` instance unless ``options.lib_only`` is True, in which case the return value is ``None``. @@ -522,8 +518,7 @@ def install(self, paths, maker, **kwargs): dry_run = maker.dry_run warner = kwargs.get('warner') lib_only = kwargs.get('lib_only', False) - bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', - False) + bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) pathname = os.path.join(self.dirname, self.filename) name_ver = '%s-%s' % (self.name, self.version) @@ -602,8 +597,7 @@ def install(self, paths, maker, **kwargs): if lib_only and u_arcname.startswith((info_pfx, data_pfx)): logger.debug('lib_only: skipping %s', u_arcname) continue - is_script = (u_arcname.startswith(script_pfx) - and not u_arcname.endswith('.exe')) + is_script = (u_arcname.startswith(script_pfx) and not u_arcname.endswith('.exe')) if u_arcname.startswith(data_pfx): _, where, rp = u_arcname.split('/', 2) @@ -622,8 +616,7 @@ def install(self, paths, maker, **kwargs): # So ... manually preserve permission bits as given in zinfo if os.name == 'posix': # just set the normal permission bits - os.chmod(outfile, - (zinfo.external_attr >> 16) & 0x1FF) + os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF) outfiles.append(outfile) # Double check the digest of the written file if not dry_run and row[1]: @@ -636,15 +629,12 @@ def install(self, paths, maker, **kwargs): '%s' % outfile) if bc and outfile.endswith('.py'): try: - pyc = fileop.byte_compile( - outfile, - hashed_invalidation=bc_hashed_invalidation) + pyc = fileop.byte_compile(outfile, hashed_invalidation=bc_hashed_invalidation) outfiles.append(pyc) except Exception: # Don't give up if byte-compilation fails, # but log it and perhaps warn the user - logger.warning('Byte-compilation failed', - exc_info=True) + logger.warning('Byte-compilation failed', exc_info=True) else: fn = os.path.basename(convert_path(arcname)) workname = os.path.join(workdir, fn) @@ -732,8 +722,7 @@ def install(self, paths, maker, **kwargs): outfiles.append(p) # Write RECORD - dist.write_installed_files(outfiles, paths['prefix'], - dry_run) + dist.write_installed_files(outfiles, paths['prefix'], dry_run) return dist except Exception: # pragma: no cover logger.exception('installation failed.') @@ -746,8 +735,7 @@ def _get_dylib_cache(self): global cache if cache is None: # Use native string to avoid issues on 2.x: see Python #20140. - base = os.path.join(get_cache_base(), str('dylib-cache'), - '%s.%s' % sys.version_info[:2]) + base = os.path.join(get_cache_base(), str('dylib-cache'), '%s.%s' % sys.version_info[:2]) cache = Cache(base) return cache @@ -764,7 +752,7 @@ def _get_extensions(self): wf = wrapper(bf) extensions = json.load(wf) cache = self._get_dylib_cache() - prefix = cache.prefix_to_dir(pathname) + prefix = cache.prefix_to_dir(self.filename, use_abspath=False) cache_base = os.path.join(cache.base, prefix) if not os.path.isdir(cache_base): os.makedirs(cache_base) @@ -774,8 +762,7 @@ def _get_extensions(self): extract = True else: file_time = os.stat(dest).st_mtime - file_time = datetime.datetime.fromtimestamp( - file_time) + file_time = datetime.datetime.fromtimestamp(file_time) info = zf.getinfo(relpath) wheel_time = datetime.datetime(*info.date_time) extract = wheel_time > file_time @@ -924,12 +911,10 @@ def update_version(version, path): else: parts = [int(s) for s in version[i + 1:].split('.')] parts[-1] += 1 - updated = '%s+%s' % (version[:i], '.'.join( - str(i) for i in parts)) + updated = '%s+%s' % (version[:i], '.'.join(str(i) for i in parts)) except UnsupportedVersionError: - logger.debug( - 'Cannot update non-compliant (PEP-440) ' - 'version %r', version) + logger.debug('Cannot update non-compliant (PEP-440) ' + 'version %r', version) if updated: md = Metadata(path=path) md.version = updated @@ -971,14 +956,11 @@ def update_version(version, path): update_version(current_version, path) # Decide where the new wheel goes. if dest_dir is None: - fd, newpath = tempfile.mkstemp(suffix='.whl', - prefix='wheel-update-', - dir=workdir) + fd, newpath = tempfile.mkstemp(suffix='.whl', prefix='wheel-update-', dir=workdir) os.close(fd) else: if not os.path.isdir(dest_dir): - raise DistlibException('Not a directory: %r' % - dest_dir) + raise DistlibException('Not a directory: %r' % dest_dir) newpath = os.path.join(dest_dir, self.filename) archive_paths = list(path_map.items()) distinfo = os.path.join(workdir, info_dir) @@ -1005,11 +987,20 @@ def compatible_tags(): """ Return (pyver, abi, arch) tuples compatible with this Python. """ - versions = [VER_SUFFIX] - major = VER_SUFFIX[0] - for minor in range(sys.version_info[1] - 1, -1, -1): - versions.append(''.join([major, str(minor)])) + class _Version: + def __init__(self, major, minor): + self.major = major + self.major_minor = (major, minor) + self.string = ''.join((str(major), str(minor))) + + def __str__(self): + return self.string + + versions = [ + _Version(sys.version_info.major, minor_version) + for minor_version in range(sys.version_info.minor, -1, -1) + ] abis = [] for suffix in _get_suffixes(): if suffix.startswith('.abi'): @@ -1045,35 +1036,45 @@ def compatible_tags(): minor -= 1 # Most specific - our Python version, ABI and arch - for abi in abis: - for arch in arches: - result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) - # manylinux - if abi != 'none' and sys.platform.startswith('linux'): - arch = arch.replace('linux_', '') - parts = _get_glibc_version() - if len(parts) == 2: - if parts >= (2, 5): - result.append((''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux1_%s' % arch)) - if parts >= (2, 12): - result.append((''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux2010_%s' % arch)) - if parts >= (2, 17): - result.append((''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux2014_%s' % arch)) - result.append( - (''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux_%s_%s_%s' % (parts[0], parts[1], arch))) + for i, version_object in enumerate(versions): + version = str(version_object) + add_abis = [] + + if i == 0: + add_abis = abis + + if IMP_PREFIX == 'cp' and version_object.major_minor >= (3, 2): + limited_api_abi = 'abi' + str(version_object.major) + if limited_api_abi not in add_abis: + add_abis.append(limited_api_abi) + + for abi in add_abis: + for arch in arches: + result.append((''.join((IMP_PREFIX, version)), abi, arch)) + # manylinux + if abi != 'none' and sys.platform.startswith('linux'): + arch = arch.replace('linux_', '') + parts = _get_glibc_version() + if len(parts) == 2: + if parts >= (2, 5): + result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux1_%s' % arch)) + if parts >= (2, 12): + result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux2010_%s' % arch)) + if parts >= (2, 17): + result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux2014_%s' % arch)) + result.append((''.join( + (IMP_PREFIX, version)), abi, 'manylinux_%s_%s_%s' % (parts[0], parts[1], arch))) # where no ABI / arch dependency, but IMP_PREFIX dependency - for i, version in enumerate(versions): + for i, version_object in enumerate(versions): + version = str(version_object) result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) if i == 0: result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) # no IMP_PREFIX, ABI or arch dependency - for i, version in enumerate(versions): + for i, version_object in enumerate(versions): + version = str(version_object) result.append((''.join(('py', version)), 'none', 'any')) if i == 0: result.append((''.join(('py', version[0])), 'none', 'any')) diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 8f9e3773cb3..eff20408e93 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,5 +1,5 @@ CacheControl==0.14.0 -distlib==0.3.8 +distlib==0.3.9 distro==1.9.0 msgpack==1.0.8 packaging==24.1 diff --git a/tools/vendoring/patches/distlib.patch b/tools/vendoring/patches/distlib.patch deleted file mode 100644 index 962eab1d74f..00000000000 --- a/tools/vendoring/patches/distlib.patch +++ /dev/null @@ -1,60 +0,0 @@ -diff --git a/src/pip/_vendor/distlib/scripts.py b/src/pip/_vendor/distlib/scripts.py -index cfa45d2af..e16292b83 100644 ---- a/src/pip/_vendor/distlib/scripts.py -+++ b/src/pip/_vendor/distlib/scripts.py -@@ -49,6 +49,24 @@ if __name__ == '__main__': - sys.exit(%(func)s()) - ''' - -+# Pre-fetch the contents of all executable wrapper stubs. -+# This is to address https://github.com/pypa/pip/issues/12666. -+# When updating pip, we rename the old pip in place before installing the -+# new version. If we try to fetch a wrapper *after* that rename, the finder -+# machinery will be confused as the package is no longer available at the -+# location where it was imported from. So we load everything into memory in -+# advance. -+ -+# Issue 31: don't hardcode an absolute package name, but -+# determine it relative to the current package -+distlib_package = __name__.rsplit('.', 1)[0] -+ -+WRAPPERS = { -+ r.name: r.bytes -+ for r in finder(distlib_package).iterator("") -+ if r.name.endswith(".exe") -+} -+ - - def enquote_executable(executable): - if ' ' in executable: -@@ -164,6 +164,12 @@ class ScriptMaker(object): - """ - if os.name != 'posix': - simple_shebang = True -+ elif getattr(sys, "cross_compiling", False): -+ # In a cross-compiling environment, the shebang will likely be a -+ # script; this *must* be invoked with the "safe" version of the -+ # shebang, or else using os.exec() to run the entry script will -+ # fail, raising "OSError 8 [Errno 8] Exec format error". -+ simple_shebang = False - else: - # Add 3 for '#!' prefix and newline suffix. - shebang_length = len(executable) + len(post_interp) + 3 -@@ -409,15 +427,11 @@ class ScriptMaker(object): - bits = '32' - platform_suffix = '-arm' if get_platform() == 'win-arm64' else '' - name = '%s%s%s.exe' % (kind, bits, platform_suffix) -- # Issue 31: don't hardcode an absolute package name, but -- # determine it relative to the current package -- distlib_package = __name__.rsplit('.', 1)[0] -- resource = finder(distlib_package).find(name) -- if not resource: -+ if name not in WRAPPERS: - msg = ('Unable to find resource %s in package %s' % - (name, distlib_package)) - raise ValueError(msg) -- return resource.bytes -+ return WRAPPERS[name] - - # Public API follows -