You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

804 lines
29 KiB

  1. """
  2. Support for installing and building the "wheel" binary package format.
  3. """
  4. from __future__ import absolute_import
  5. import compileall
  6. import csv
  7. import errno
  8. import functools
  9. import hashlib
  10. import logging
  11. import os
  12. import os.path
  13. import re
  14. import shutil
  15. import stat
  16. import sys
  17. import tempfile
  18. import warnings
  19. from base64 import urlsafe_b64encode
  20. from email.parser import Parser
  21. from pip._vendor.six import StringIO
  22. import pip
  23. from pip.download import path_to_url, unpack_url
  24. from pip.exceptions import InvalidWheelFilename, UnsupportedWheel
  25. from pip.locations import distutils_scheme, PIP_DELETE_MARKER_FILENAME
  26. from pip import pep425tags
  27. from pip.utils import (
  28. call_subprocess, ensure_dir, make_path_relative, captured_stdout,
  29. rmtree)
  30. from pip.utils.logging import indent_log
  31. from pip._vendor.distlib.scripts import ScriptMaker
  32. from pip._vendor import pkg_resources
  33. from pip._vendor.six.moves import configparser
  34. wheel_ext = '.whl'
  35. VERSION_COMPATIBLE = (1, 0)
  36. logger = logging.getLogger(__name__)
  37. class WheelCache(object):
  38. """A cache of wheels for future installs."""
  39. def __init__(self, cache_dir, format_control):
  40. """Create a wheel cache.
  41. :param cache_dir: The root of the cache.
  42. :param format_control: A pip.index.FormatControl object to limit
  43. binaries being read from the cache.
  44. """
  45. self._cache_dir = os.path.expanduser(cache_dir) if cache_dir else None
  46. self._format_control = format_control
  47. def cached_wheel(self, link, package_name):
  48. return cached_wheel(
  49. self._cache_dir, link, self._format_control, package_name)
  50. def _cache_for_link(cache_dir, link):
  51. """
  52. Return a directory to store cached wheels in for link.
  53. Because there are M wheels for any one sdist, we provide a directory
  54. to cache them in, and then consult that directory when looking up
  55. cache hits.
  56. We only insert things into the cache if they have plausible version
  57. numbers, so that we don't contaminate the cache with things that were not
  58. unique. E.g. ./package might have dozens of installs done for it and build
  59. a version of 0.0...and if we built and cached a wheel, we'd end up using
  60. the same wheel even if the source has been edited.
  61. :param cache_dir: The cache_dir being used by pip.
  62. :param link: The link of the sdist for which this will cache wheels.
  63. """
  64. # We want to generate an url to use as our cache key, we don't want to just
  65. # re-use the URL because it might have other items in the fragment and we
  66. # don't care about those.
  67. key_parts = [link.url_without_fragment]
  68. if link.hash_name is not None and link.hash is not None:
  69. key_parts.append("=".join([link.hash_name, link.hash]))
  70. key_url = "#".join(key_parts)
  71. # Encode our key url with sha224, we'll use this because it has similar
  72. # security properties to sha256, but with a shorter total output (and thus
  73. # less secure). However the differences don't make a lot of difference for
  74. # our use case here.
  75. hashed = hashlib.sha224(key_url.encode()).hexdigest()
  76. # We want to nest the directories some to prevent having a ton of top level
  77. # directories where we might run out of sub directories on some FS.
  78. parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
  79. # Inside of the base location for cached wheels, expand our parts and join
  80. # them all together.
  81. return os.path.join(cache_dir, "wheels", *parts)
  82. def cached_wheel(cache_dir, link, format_control, package_name):
  83. if not cache_dir:
  84. return link
  85. if not link:
  86. return link
  87. if link.is_wheel:
  88. return link
  89. if not link.is_artifact:
  90. return link
  91. if not package_name:
  92. return link
  93. canonical_name = pkg_resources.safe_name(package_name).lower()
  94. formats = pip.index.fmt_ctl_formats(format_control, canonical_name)
  95. if "binary" not in formats:
  96. return link
  97. root = _cache_for_link(cache_dir, link)
  98. try:
  99. wheel_names = os.listdir(root)
  100. except OSError as e:
  101. if e.errno in (errno.ENOENT, errno.ENOTDIR):
  102. return link
  103. raise
  104. candidates = []
  105. for wheel_name in wheel_names:
  106. try:
  107. wheel = Wheel(wheel_name)
  108. except InvalidWheelFilename:
  109. continue
  110. if not wheel.supported():
  111. # Built for a different python/arch/etc
  112. continue
  113. candidates.append((wheel.support_index_min(), wheel_name))
  114. if not candidates:
  115. return link
  116. candidates.sort()
  117. path = os.path.join(root, candidates[0][1])
  118. return pip.index.Link(path_to_url(path), trusted=True)
  119. def rehash(path, algo='sha256', blocksize=1 << 20):
  120. """Return (hash, length) for path using hashlib.new(algo)"""
  121. h = hashlib.new(algo)
  122. length = 0
  123. with open(path, 'rb') as f:
  124. block = f.read(blocksize)
  125. while block:
  126. length += len(block)
  127. h.update(block)
  128. block = f.read(blocksize)
  129. digest = 'sha256=' + urlsafe_b64encode(
  130. h.digest()
  131. ).decode('latin1').rstrip('=')
  132. return (digest, length)
  133. def open_for_csv(name, mode):
  134. if sys.version_info[0] < 3:
  135. nl = {}
  136. bin = 'b'
  137. else:
  138. nl = {'newline': ''}
  139. bin = ''
  140. return open(name, mode + bin, **nl)
  141. def fix_script(path):
  142. """Replace #!python with #!/path/to/python
  143. Return True if file was changed."""
  144. # XXX RECORD hashes will need to be updated
  145. if os.path.isfile(path):
  146. with open(path, 'rb') as script:
  147. firstline = script.readline()
  148. if not firstline.startswith(b'#!python'):
  149. return False
  150. exename = sys.executable.encode(sys.getfilesystemencoding())
  151. firstline = b'#!' + exename + os.linesep.encode("ascii")
  152. rest = script.read()
  153. with open(path, 'wb') as script:
  154. script.write(firstline)
  155. script.write(rest)
  156. return True
  157. dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?)
  158. \.dist-info$""", re.VERBOSE)
  159. def root_is_purelib(name, wheeldir):
  160. """
  161. Return True if the extracted wheel in wheeldir should go into purelib.
  162. """
  163. name_folded = name.replace("-", "_")
  164. for item in os.listdir(wheeldir):
  165. match = dist_info_re.match(item)
  166. if match and match.group('name') == name_folded:
  167. with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
  168. for line in wheel:
  169. line = line.lower().rstrip()
  170. if line == "root-is-purelib: true":
  171. return True
  172. return False
  173. def get_entrypoints(filename):
  174. if not os.path.exists(filename):
  175. return {}, {}
  176. # This is done because you can pass a string to entry_points wrappers which
  177. # means that they may or may not be valid INI files. The attempt here is to
  178. # strip leading and trailing whitespace in order to make them valid INI
  179. # files.
  180. with open(filename) as fp:
  181. data = StringIO()
  182. for line in fp:
  183. data.write(line.strip())
  184. data.write("\n")
  185. data.seek(0)
  186. cp = configparser.RawConfigParser()
  187. cp.readfp(data)
  188. console = {}
  189. gui = {}
  190. if cp.has_section('console_scripts'):
  191. console = dict(cp.items('console_scripts'))
  192. if cp.has_section('gui_scripts'):
  193. gui = dict(cp.items('gui_scripts'))
  194. return console, gui
  195. def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
  196. pycompile=True, scheme=None, isolated=False, strip_file_prefix=None):
  197. """Install a wheel"""
  198. if not scheme:
  199. scheme = distutils_scheme(
  200. name, user=user, home=home, root=root, isolated=isolated
  201. )
  202. if root_is_purelib(name, wheeldir):
  203. lib_dir = scheme['purelib']
  204. else:
  205. lib_dir = scheme['platlib']
  206. info_dir = []
  207. data_dirs = []
  208. source = wheeldir.rstrip(os.path.sep) + os.path.sep
  209. # Record details of the files moved
  210. # installed = files copied from the wheel to the destination
  211. # changed = files changed while installing (scripts #! line typically)
  212. # generated = files newly generated during the install (script wrappers)
  213. installed = {}
  214. changed = set()
  215. generated = []
  216. # Compile all of the pyc files that we're going to be installing
  217. if pycompile:
  218. with captured_stdout() as stdout:
  219. with warnings.catch_warnings():
  220. warnings.filterwarnings('ignore')
  221. compileall.compile_dir(source, force=True, quiet=True)
  222. logger.debug(stdout.getvalue())
  223. def normpath(src, p):
  224. return make_path_relative(src, p).replace(os.path.sep, '/')
  225. def record_installed(srcfile, destfile, modified=False):
  226. """Map archive RECORD paths to installation RECORD paths."""
  227. oldpath = normpath(srcfile, wheeldir)
  228. newpath = normpath(destfile, lib_dir)
  229. installed[oldpath] = newpath
  230. if modified:
  231. changed.add(destfile)
  232. def clobber(source, dest, is_base, fixer=None, filter=None):
  233. ensure_dir(dest) # common for the 'include' path
  234. for dir, subdirs, files in os.walk(source):
  235. basedir = dir[len(source):].lstrip(os.path.sep)
  236. destdir = os.path.join(dest, basedir)
  237. if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
  238. continue
  239. for s in subdirs:
  240. destsubdir = os.path.join(dest, basedir, s)
  241. if is_base and basedir == '' and destsubdir.endswith('.data'):
  242. data_dirs.append(s)
  243. continue
  244. elif (is_base and
  245. s.endswith('.dist-info') and
  246. # is self.req.project_name case preserving?
  247. s.lower().startswith(
  248. req.project_name.replace('-', '_').lower())):
  249. assert not info_dir, 'Multiple .dist-info directories'
  250. info_dir.append(destsubdir)
  251. for f in files:
  252. # Skip unwanted files
  253. if filter and filter(f):
  254. continue
  255. srcfile = os.path.join(dir, f)
  256. destfile = os.path.join(dest, basedir, f)
  257. # directory creation is lazy and after the file filtering above
  258. # to ensure we don't install empty dirs; empty dirs can't be
  259. # uninstalled.
  260. ensure_dir(destdir)
  261. # We use copyfile (not move, copy, or copy2) to be extra sure
  262. # that we are not moving directories over (copyfile fails for
  263. # directories) as well as to ensure that we are not copying
  264. # over any metadata because we want more control over what
  265. # metadata we actually copy over.
  266. shutil.copyfile(srcfile, destfile)
  267. # Copy over the metadata for the file, currently this only
  268. # includes the atime and mtime.
  269. st = os.stat(srcfile)
  270. if hasattr(os, "utime"):
  271. os.utime(destfile, (st.st_atime, st.st_mtime))
  272. # If our file is executable, then make our destination file
  273. # executable.
  274. if os.access(srcfile, os.X_OK):
  275. st = os.stat(srcfile)
  276. permissions = (
  277. st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
  278. )
  279. os.chmod(destfile, permissions)
  280. changed = False
  281. if fixer:
  282. changed = fixer(destfile)
  283. record_installed(srcfile, destfile, changed)
  284. clobber(source, lib_dir, True)
  285. assert info_dir, "%s .dist-info directory not found" % req
  286. # Get the defined entry points
  287. ep_file = os.path.join(info_dir[0], 'entry_points.txt')
  288. console, gui = get_entrypoints(ep_file)
  289. def is_entrypoint_wrapper(name):
  290. # EP, EP.exe and EP-script.py are scripts generated for
  291. # entry point EP by setuptools
  292. if name.lower().endswith('.exe'):
  293. matchname = name[:-4]
  294. elif name.lower().endswith('-script.py'):
  295. matchname = name[:-10]
  296. elif name.lower().endswith(".pya"):
  297. matchname = name[:-4]
  298. else:
  299. matchname = name
  300. # Ignore setuptools-generated scripts
  301. return (matchname in console or matchname in gui)
  302. for datadir in data_dirs:
  303. fixer = None
  304. filter = None
  305. for subdir in os.listdir(os.path.join(wheeldir, datadir)):
  306. fixer = None
  307. if subdir == 'scripts':
  308. fixer = fix_script
  309. filter = is_entrypoint_wrapper
  310. source = os.path.join(wheeldir, datadir, subdir)
  311. dest = scheme[subdir]
  312. clobber(source, dest, False, fixer=fixer, filter=filter)
  313. maker = ScriptMaker(None, scheme['scripts'])
  314. # Ensure old scripts are overwritten.
  315. # See https://github.com/pypa/pip/issues/1800
  316. maker.clobber = True
  317. # Ensure we don't generate any variants for scripts because this is almost
  318. # never what somebody wants.
  319. # See https://bitbucket.org/pypa/distlib/issue/35/
  320. maker.variants = set(('', ))
  321. # This is required because otherwise distlib creates scripts that are not
  322. # executable.
  323. # See https://bitbucket.org/pypa/distlib/issue/32/
  324. maker.set_mode = True
  325. # Simplify the script and fix the fact that the default script swallows
  326. # every single stack trace.
  327. # See https://bitbucket.org/pypa/distlib/issue/34/
  328. # See https://bitbucket.org/pypa/distlib/issue/33/
  329. def _get_script_text(entry):
  330. return maker.script_template % {
  331. "module": entry.prefix,
  332. "import_name": entry.suffix.split(".")[0],
  333. "func": entry.suffix,
  334. }
  335. maker._get_script_text = _get_script_text
  336. maker.script_template = """# -*- coding: utf-8 -*-
  337. import re
  338. import sys
  339. from %(module)s import %(import_name)s
  340. if __name__ == '__main__':
  341. sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
  342. sys.exit(%(func)s())
  343. """
  344. # Special case pip and setuptools to generate versioned wrappers
  345. #
  346. # The issue is that some projects (specifically, pip and setuptools) use
  347. # code in setup.py to create "versioned" entry points - pip2.7 on Python
  348. # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
  349. # the wheel metadata at build time, and so if the wheel is installed with
  350. # a *different* version of Python the entry points will be wrong. The
  351. # correct fix for this is to enhance the metadata to be able to describe
  352. # such versioned entry points, but that won't happen till Metadata 2.0 is
  353. # available.
  354. # In the meantime, projects using versioned entry points will either have
  355. # incorrect versioned entry points, or they will not be able to distribute
  356. # "universal" wheels (i.e., they will need a wheel per Python version).
  357. #
  358. # Because setuptools and pip are bundled with _ensurepip and virtualenv,
  359. # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
  360. # override the versioned entry points in the wheel and generate the
  361. # correct ones. This code is purely a short-term measure until Metadat 2.0
  362. # is available.
  363. #
  364. # To add the level of hack in this section of code, in order to support
  365. # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
  366. # variable which will control which version scripts get installed.
  367. #
  368. # ENSUREPIP_OPTIONS=altinstall
  369. # - Only pipX.Y and easy_install-X.Y will be generated and installed
  370. # ENSUREPIP_OPTIONS=install
  371. # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
  372. # that this option is technically if ENSUREPIP_OPTIONS is set and is
  373. # not altinstall
  374. # DEFAULT
  375. # - The default behavior is to install pip, pipX, pipX.Y, easy_install
  376. # and easy_install-X.Y.
  377. pip_script = console.pop('pip', None)
  378. if pip_script:
  379. if "ENSUREPIP_OPTIONS" not in os.environ:
  380. spec = 'pip = ' + pip_script
  381. generated.extend(maker.make(spec))
  382. if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
  383. spec = 'pip%s = %s' % (sys.version[:1], pip_script)
  384. generated.extend(maker.make(spec))
  385. spec = 'pip%s = %s' % (sys.version[:3], pip_script)
  386. generated.extend(maker.make(spec))
  387. # Delete any other versioned pip entry points
  388. pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
  389. for k in pip_ep:
  390. del console[k]
  391. easy_install_script = console.pop('easy_install', None)
  392. if easy_install_script:
  393. if "ENSUREPIP_OPTIONS" not in os.environ:
  394. spec = 'easy_install = ' + easy_install_script
  395. generated.extend(maker.make(spec))
  396. spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
  397. generated.extend(maker.make(spec))
  398. # Delete any other versioned easy_install entry points
  399. easy_install_ep = [
  400. k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
  401. ]
  402. for k in easy_install_ep:
  403. del console[k]
  404. # Generate the console and GUI entry points specified in the wheel
  405. if len(console) > 0:
  406. generated.extend(
  407. maker.make_multiple(['%s = %s' % kv for kv in console.items()])
  408. )
  409. if len(gui) > 0:
  410. generated.extend(
  411. maker.make_multiple(
  412. ['%s = %s' % kv for kv in gui.items()],
  413. {'gui': True}
  414. )
  415. )
  416. record = os.path.join(info_dir[0], 'RECORD')
  417. temp_record = os.path.join(info_dir[0], 'RECORD.pip')
  418. with open_for_csv(record, 'r') as record_in:
  419. with open_for_csv(temp_record, 'w+') as record_out:
  420. reader = csv.reader(record_in)
  421. writer = csv.writer(record_out)
  422. for row in reader:
  423. row[0] = installed.pop(row[0], row[0])
  424. if row[0] in changed:
  425. row[1], row[2] = rehash(row[0])
  426. writer.writerow(row)
  427. for f in generated:
  428. h, l = rehash(f)
  429. if strip_file_prefix and f.startswith(strip_file_prefix):
  430. f = os.path.join(os.sep, os.path.relpath(f, strip_file_prefix))
  431. writer.writerow((f, h, l))
  432. for f in installed:
  433. writer.writerow((installed[f], '', ''))
  434. shutil.move(temp_record, record)
  435. def _unique(fn):
  436. @functools.wraps(fn)
  437. def unique(*args, **kw):
  438. seen = set()
  439. for item in fn(*args, **kw):
  440. if item not in seen:
  441. seen.add(item)
  442. yield item
  443. return unique
  444. # TODO: this goes somewhere besides the wheel module
  445. @_unique
  446. def uninstallation_paths(dist):
  447. """
  448. Yield all the uninstallation paths for dist based on RECORD-without-.pyc
  449. Yield paths to all the files in RECORD. For each .py file in RECORD, add
  450. the .pyc in the same directory.
  451. UninstallPathSet.add() takes care of the __pycache__ .pyc.
  452. """
  453. from pip.utils import FakeFile # circular import
  454. r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
  455. for row in r:
  456. path = os.path.join(dist.location, row[0])
  457. yield path
  458. if path.endswith('.py'):
  459. dn, fn = os.path.split(path)
  460. base = fn[:-3]
  461. path = os.path.join(dn, base + '.pyc')
  462. yield path
  463. def wheel_version(source_dir):
  464. """
  465. Return the Wheel-Version of an extracted wheel, if possible.
  466. Otherwise, return False if we couldn't parse / extract it.
  467. """
  468. try:
  469. dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
  470. wheel_data = dist.get_metadata('WHEEL')
  471. wheel_data = Parser().parsestr(wheel_data)
  472. version = wheel_data['Wheel-Version'].strip()
  473. version = tuple(map(int, version.split('.')))
  474. return version
  475. except:
  476. return False
  477. def check_compatibility(version, name):
  478. """
  479. Raises errors or warns if called with an incompatible Wheel-Version.
  480. Pip should refuse to install a Wheel-Version that's a major series
  481. ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
  482. installing a version only minor version ahead (e.g 1.2 > 1.1).
  483. version: a 2-tuple representing a Wheel-Version (Major, Minor)
  484. name: name of wheel or package to raise exception about
  485. :raises UnsupportedWheel: when an incompatible Wheel-Version is given
  486. """
  487. if not version:
  488. raise UnsupportedWheel(
  489. "%s is in an unsupported or invalid wheel" % name
  490. )
  491. if version[0] > VERSION_COMPATIBLE[0]:
  492. raise UnsupportedWheel(
  493. "%s's Wheel-Version (%s) is not compatible with this version "
  494. "of pip" % (name, '.'.join(map(str, version)))
  495. )
  496. elif version > VERSION_COMPATIBLE:
  497. logger.warning(
  498. 'Installing from a newer Wheel-Version (%s)',
  499. '.'.join(map(str, version)),
  500. )
  501. class Wheel(object):
  502. """A wheel file"""
  503. # TODO: maybe move the install code into this class
  504. wheel_file_re = re.compile(
  505. r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))
  506. ((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
  507. \.whl|\.dist-info)$""",
  508. re.VERBOSE
  509. )
  510. def __init__(self, filename):
  511. """
  512. :raises InvalidWheelFilename: when the filename is invalid for a wheel
  513. """
  514. wheel_info = self.wheel_file_re.match(filename)
  515. if not wheel_info:
  516. raise InvalidWheelFilename(
  517. "%s is not a valid wheel filename." % filename
  518. )
  519. self.filename = filename
  520. self.name = wheel_info.group('name').replace('_', '-')
  521. # we'll assume "_" means "-" due to wheel naming scheme
  522. # (https://github.com/pypa/pip/issues/1150)
  523. self.version = wheel_info.group('ver').replace('_', '-')
  524. self.pyversions = wheel_info.group('pyver').split('.')
  525. self.abis = wheel_info.group('abi').split('.')
  526. self.plats = wheel_info.group('plat').split('.')
  527. # All the tag combinations from this file
  528. self.file_tags = set(
  529. (x, y, z) for x in self.pyversions
  530. for y in self.abis for z in self.plats
  531. )
  532. def support_index_min(self, tags=None):
  533. """
  534. Return the lowest index that one of the wheel's file_tag combinations
  535. achieves in the supported_tags list e.g. if there are 8 supported tags,
  536. and one of the file tags is first in the list, then return 0. Returns
  537. None is the wheel is not supported.
  538. """
  539. if tags is None: # for mock
  540. tags = pep425tags.supported_tags
  541. indexes = [tags.index(c) for c in self.file_tags if c in tags]
  542. return min(indexes) if indexes else None
  543. def supported(self, tags=None):
  544. """Is this wheel supported on this system?"""
  545. if tags is None: # for mock
  546. tags = pep425tags.supported_tags
  547. return bool(set(tags).intersection(self.file_tags))
  548. class WheelBuilder(object):
  549. """Build wheels from a RequirementSet."""
  550. def __init__(self, requirement_set, finder, build_options=None,
  551. global_options=None):
  552. self.requirement_set = requirement_set
  553. self.finder = finder
  554. self._cache_root = requirement_set._wheel_cache._cache_dir
  555. self._wheel_dir = requirement_set.wheel_download_dir
  556. self.build_options = build_options or []
  557. self.global_options = global_options or []
  558. def _build_one(self, req, output_dir):
  559. """Build one wheel.
  560. :return: The filename of the built wheel, or None if the build failed.
  561. """
  562. tempd = tempfile.mkdtemp('pip-wheel-')
  563. try:
  564. if self.__build_one(req, tempd):
  565. try:
  566. wheel_name = os.listdir(tempd)[0]
  567. wheel_path = os.path.join(output_dir, wheel_name)
  568. shutil.move(os.path.join(tempd, wheel_name), wheel_path)
  569. logger.info('Stored in directory: %s', output_dir)
  570. return wheel_path
  571. except:
  572. return None
  573. return None
  574. finally:
  575. rmtree(tempd)
  576. def __build_one(self, req, tempd):
  577. base_args = [
  578. sys.executable, '-c',
  579. "import setuptools;__file__=%r;"
  580. "exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), "
  581. "__file__, 'exec'))" % req.setup_py
  582. ] + list(self.global_options)
  583. logger.info('Running setup.py bdist_wheel for %s', req.name)
  584. logger.debug('Destination directory: %s', tempd)
  585. wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
  586. + self.build_options
  587. try:
  588. call_subprocess(wheel_args, cwd=req.source_dir, show_stdout=False)
  589. return True
  590. except:
  591. logger.error('Failed building wheel for %s', req.name)
  592. return False
  593. def build(self, autobuilding=False):
  594. """Build wheels.
  595. :param unpack: If True, replace the sdist we built from the with the
  596. newly built wheel, in preparation for installation.
  597. :return: True if all the wheels built correctly.
  598. """
  599. assert self._wheel_dir or (autobuilding and self._cache_root)
  600. # unpack sdists and constructs req set
  601. self.requirement_set.prepare_files(self.finder)
  602. reqset = self.requirement_set.requirements.values()
  603. buildset = []
  604. for req in reqset:
  605. if req.constraint:
  606. continue
  607. if req.is_wheel:
  608. if not autobuilding:
  609. logger.info(
  610. 'Skipping %s, due to already being wheel.', req.name)
  611. elif req.editable:
  612. if not autobuilding:
  613. logger.info(
  614. 'Skipping bdist_wheel for %s, due to being editable',
  615. req.name)
  616. elif autobuilding and req.link and not req.link.is_artifact:
  617. pass
  618. elif autobuilding and not req.source_dir:
  619. pass
  620. else:
  621. if autobuilding:
  622. link = req.link
  623. base, ext = link.splitext()
  624. if pip.index.egg_info_matches(base, None, link) is None:
  625. # Doesn't look like a package - don't autobuild a wheel
  626. # because we'll have no way to lookup the result sanely
  627. continue
  628. if "binary" not in pip.index.fmt_ctl_formats(
  629. self.finder.format_control,
  630. pkg_resources.safe_name(req.name).lower()):
  631. logger.info(
  632. "Skipping bdist_wheel for %s, due to binaries "
  633. "being disabled for it.", req.name)
  634. continue
  635. buildset.append(req)
  636. if not buildset:
  637. return True
  638. # Build the wheels.
  639. logger.info(
  640. 'Building wheels for collected packages: %s',
  641. ', '.join([req.name for req in buildset]),
  642. )
  643. with indent_log():
  644. build_success, build_failure = [], []
  645. for req in buildset:
  646. if autobuilding:
  647. output_dir = _cache_for_link(self._cache_root, req.link)
  648. ensure_dir(output_dir)
  649. else:
  650. output_dir = self._wheel_dir
  651. wheel_file = self._build_one(req, output_dir)
  652. if wheel_file:
  653. build_success.append(req)
  654. if autobuilding:
  655. # XXX: This is mildly duplicative with prepare_files,
  656. # but not close enough to pull out to a single common
  657. # method.
  658. # The code below assumes temporary source dirs -
  659. # prevent it doing bad things.
  660. if req.source_dir and not os.path.exists(os.path.join(
  661. req.source_dir, PIP_DELETE_MARKER_FILENAME)):
  662. raise AssertionError(
  663. "bad source dir - missing marker")
  664. # Delete the source we built the wheel from
  665. req.remove_temporary_source()
  666. # set the build directory again - name is known from
  667. # the work prepare_files did.
  668. req.source_dir = req.build_location(
  669. self.requirement_set.build_dir)
  670. # Update the link for this.
  671. req.link = pip.index.Link(
  672. path_to_url(wheel_file), trusted=True)
  673. assert req.link.is_wheel
  674. # extract the wheel into the dir
  675. unpack_url(
  676. req.link, req.source_dir, None, False,
  677. session=self.requirement_set.session)
  678. else:
  679. build_failure.append(req)
  680. # notify success/failure
  681. if build_success:
  682. logger.info(
  683. 'Successfully built %s',
  684. ' '.join([req.name for req in build_success]),
  685. )
  686. if build_failure:
  687. logger.info(
  688. 'Failed to build %s',
  689. ' '.join([req.name for req in build_failure]),
  690. )
  691. # Return True if all builds were successful
  692. return len(build_failure) == 0