升级所有modules
This commit is contained in:
parent
3764c4fb07
commit
920d1361df
|
@ -1 +1 @@
|
|||
Subproject commit 4d64ab11f621d0a3d87100a564e83b348d89b48e
|
||||
Subproject commit 91e79dd5d323f2f3a59f1ee0d752f1fe8eb8accf
|
|
@ -88,8 +88,8 @@ __import__('pkg_resources.extern.packaging.markers')
|
|||
__metaclass__ = type
|
||||
|
||||
|
||||
if (3, 0) < sys.version_info < (3, 4):
|
||||
raise RuntimeError("Python 3.4 or later is required")
|
||||
if (3, 0) < sys.version_info < (3, 5):
|
||||
raise RuntimeError("Python 3.5 or later is required")
|
||||
|
||||
if six.PY2:
|
||||
# Those builtin exceptions are only defined in Python 3
|
||||
|
@ -333,7 +333,7 @@ class UnknownExtra(ResolutionError):
|
|||
|
||||
_provider_factories = {}
|
||||
|
||||
PY_MAJOR = sys.version[:3]
|
||||
PY_MAJOR = '{}.{}'.format(*sys.version_info)
|
||||
EGG_DIST = 3
|
||||
BINARY_DIST = 2
|
||||
SOURCE_DIST = 1
|
||||
|
@ -3109,6 +3109,7 @@ class Requirement(packaging.requirements.Requirement):
|
|||
self.extras = tuple(map(safe_extra, self.extras))
|
||||
self.hashCmp = (
|
||||
self.key,
|
||||
self.url,
|
||||
self.specifier,
|
||||
frozenset(self.extras),
|
||||
str(self.marker) if self.marker else None,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: setuptools
|
||||
Version: 41.2.0
|
||||
Version: 44.1.1
|
||||
Summary: Easily download, build, install, upgrade, and uninstall Python packages
|
||||
Home-page: https://github.com/pypa/setuptools
|
||||
Author: Python Packaging Authority
|
||||
|
@ -16,15 +16,15 @@ Classifier: Operating System :: OS Independent
|
|||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: System :: Archiving :: Packaging
|
||||
Classifier: Topic :: System :: Systems Administration
|
||||
Classifier: Topic :: Utilities
|
||||
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
|
||||
Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7
|
||||
Description-Content-Type: text/x-rst; charset=UTF-8
|
||||
Provides-Extra: certs
|
||||
Requires-Dist: certifi (==2016.9.26) ; extra == 'certs'
|
||||
|
@ -67,8 +67,17 @@ To report a security vulnerability, please use the
|
|||
Tidelift will coordinate the fix and disclosure.
|
||||
|
||||
|
||||
For Enterprise
|
||||
==============
|
||||
|
||||
Available as part of the Tidelift Subscription.
|
||||
|
||||
Setuptools and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
|
||||
|
||||
`Learn more <https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=referral&utm_campaign=github>`_.
|
||||
|
||||
Code of Conduct
|
||||
---------------
|
||||
===============
|
||||
|
||||
Everyone interacting in the setuptools project's codebases, issue trackers,
|
||||
chat rooms, and mailing lists is expected to follow the
|
|
@ -1,8 +1,8 @@
|
|||
../../Scripts/easy_install-2.7.exe,sha256=6LE-Yf8B6Kd7I4a7lNe5oPD3CCXOTyL3A6QsdtgHB5M,103273
|
||||
../../Scripts/easy_install.exe,sha256=6LE-Yf8B6Kd7I4a7lNe5oPD3CCXOTyL3A6QsdtgHB5M,103273
|
||||
../../Scripts/easy_install-2.7.exe,sha256=yU4KAVS04ChFCSX0FXKPyk4g3VdSNetKGQl_7tskFsM,106409
|
||||
../../Scripts/easy_install.exe,sha256=yU4KAVS04ChFCSX0FXKPyk4g3VdSNetKGQl_7tskFsM,106409
|
||||
easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126
|
||||
easy_install.pyc,,
|
||||
pkg_resources/__init__.py,sha256=6Kq6B-JSGEFSg_2FAnl6Lmoa5by2RyjDxPBiWuAh-dw,108309
|
||||
pkg_resources/__init__.py,sha256=0IssxXPnaDKpYZRra8Ime0JG4hwosQljItGD0bnIkGk,108349
|
||||
pkg_resources/__init__.pyc,,
|
||||
pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
pkg_resources/_vendor/__init__.pyc,,
|
||||
|
@ -34,38 +34,45 @@ pkg_resources/extern/__init__.py,sha256=cHiEfHuLmm6rs5Ve_ztBfMI7Lr31vss-D4wkqF5x
|
|||
pkg_resources/extern/__init__.pyc,,
|
||||
pkg_resources/py31compat.py,sha256=-WQ0e4c3RG_acdhwC3gLiXhP_lg4G5q7XYkZkQg0gxU,558
|
||||
pkg_resources/py31compat.pyc,,
|
||||
setuptools-41.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools-41.2.0.dist-info/LICENSE,sha256=wyo6w5WvYyHv0ovnPQagDw22q4h9HCHU_sRhKNIFbVo,1078
|
||||
setuptools-41.2.0.dist-info/METADATA,sha256=_t0DvK_bLLT2yjvvNOCXiSIoNJvjX6dTWT8fLNVrUQk,3305
|
||||
setuptools-41.2.0.dist-info/RECORD,,
|
||||
setuptools-41.2.0.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
|
||||
setuptools-41.2.0.dist-info/dependency_links.txt,sha256=HlkCFkoK5TbZ5EMLbLKYhLcY_E31kBWD8TqW2EgmatQ,239
|
||||
setuptools-41.2.0.dist-info/entry_points.txt,sha256=jBqCYDlVjl__sjYFGXo1JQGIMAYFJE-prYWUtnMZEew,2990
|
||||
setuptools-41.2.0.dist-info/top_level.txt,sha256=2HUXVVwA4Pff1xgTFr3GsTXXKaPaO6vlG6oNJ_4u4Tg,38
|
||||
setuptools-41.2.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
||||
setuptools/__init__.py,sha256=WBpCcn2lvdckotabeae1TTYonPOcgCIF3raD2zRWzBc,7283
|
||||
setuptools-44.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
setuptools-44.1.1.dist-info/LICENSE,sha256=wyo6w5WvYyHv0ovnPQagDw22q4h9HCHU_sRhKNIFbVo,1078
|
||||
setuptools-44.1.1.dist-info/METADATA,sha256=QOFp76x006RHV1cCrq4t20hBKrG7Wub1kvvbuDE17IA,3704
|
||||
setuptools-44.1.1.dist-info/RECORD,,
|
||||
setuptools-44.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools-44.1.1.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
|
||||
setuptools-44.1.1.dist-info/dependency_links.txt,sha256=HlkCFkoK5TbZ5EMLbLKYhLcY_E31kBWD8TqW2EgmatQ,239
|
||||
setuptools-44.1.1.dist-info/entry_points.txt,sha256=fwogkZeakIfxMNWog7kxcsbjwnMAMkpCpQv8x1ZjQ70,3206
|
||||
setuptools-44.1.1.dist-info/top_level.txt,sha256=2HUXVVwA4Pff1xgTFr3GsTXXKaPaO6vlG6oNJ_4u4Tg,38
|
||||
setuptools-44.1.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
||||
setuptools/__init__.py,sha256=0SDEgF1acybGfdxJvyjgeAPKC-3-hoYrLKRw8y4YmLQ,7795
|
||||
setuptools/__init__.pyc,,
|
||||
setuptools/_deprecation_warning.py,sha256=jU9-dtfv6cKmtQJOXN8nP1mm7gONw5kKEtiPtbwnZyI,218
|
||||
setuptools/_deprecation_warning.pyc,,
|
||||
setuptools/_imp.py,sha256=jloslOkxrTKbobgemfP94YII0nhqiJzE1bRmCTZ1a5I,2223
|
||||
setuptools/_imp.pyc,,
|
||||
setuptools/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
setuptools/_vendor/__init__.pyc,,
|
||||
setuptools/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720
|
||||
setuptools/_vendor/ordered_set.py,sha256=dbaCcs27dyN9gnMWGF5nA_BrVn6Q-NrjKYJpV9_fgBs,15130
|
||||
setuptools/_vendor/ordered_set.pyc,,
|
||||
setuptools/_vendor/packaging/__about__.py,sha256=CpuMSyh1V7adw8QMjWKkY3LtdqRUkRX4MgJ6nF4stM0,744
|
||||
setuptools/_vendor/packaging/__about__.pyc,,
|
||||
setuptools/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513
|
||||
setuptools/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562
|
||||
setuptools/_vendor/packaging/__init__.pyc,,
|
||||
setuptools/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860
|
||||
setuptools/_vendor/packaging/_compat.py,sha256=Ugdm-qcneSchW25JrtMIKgUxfEEBcCAz6WrEeXeqz9o,865
|
||||
setuptools/_vendor/packaging/_compat.pyc,,
|
||||
setuptools/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416
|
||||
setuptools/_vendor/packaging/_structures.py,sha256=pVd90XcXRGwpZRB_qdFuVEibhCHpX_bL5zYr9-N0mc8,1416
|
||||
setuptools/_vendor/packaging/_structures.pyc,,
|
||||
setuptools/_vendor/packaging/markers.py,sha256=Gvpk9EY20yKaMTiKgQZ8yFEEpodqVgVYtfekoic1Yts,8239
|
||||
setuptools/_vendor/packaging/markers.py,sha256=-meFl9Fr9V8rF5Rduzgett5EHK9wBYRUqssAV2pj0lw,8268
|
||||
setuptools/_vendor/packaging/markers.pyc,,
|
||||
setuptools/_vendor/packaging/requirements.py,sha256=t44M2HVWtr8phIz2OhnILzuGT3rTATaovctV1dpnVIg,4343
|
||||
setuptools/_vendor/packaging/requirements.py,sha256=3dwIJekt8RRGCUbgxX8reeAbgmZYjb0wcCRtmH63kxI,4742
|
||||
setuptools/_vendor/packaging/requirements.pyc,,
|
||||
setuptools/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025
|
||||
setuptools/_vendor/packaging/specifiers.py,sha256=0ZzQpcUnvrQ6LjR-mQRLzMr8G6hdRv-mY0VSf_amFtI,27778
|
||||
setuptools/_vendor/packaging/specifiers.pyc,,
|
||||
setuptools/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421
|
||||
setuptools/_vendor/packaging/tags.py,sha256=EPLXhO6GTD7_oiWEO1U0l0PkfR8R_xivpMDHXnsTlts,12933
|
||||
setuptools/_vendor/packaging/tags.pyc,,
|
||||
setuptools/_vendor/packaging/utils.py,sha256=VaTC0Ei7zO2xl9ARiWmz2YFLFt89PuuhLbAlXMyAGms,1520
|
||||
setuptools/_vendor/packaging/utils.pyc,,
|
||||
setuptools/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556
|
||||
setuptools/_vendor/packaging/version.py,sha256=Npdwnb8OHedj_2L86yiUqscujb7w_i5gmSK1PhOAFzg,11978
|
||||
setuptools/_vendor/packaging/version.pyc,,
|
||||
setuptools/_vendor/pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055
|
||||
setuptools/_vendor/pyparsing.pyc,,
|
||||
|
@ -73,16 +80,16 @@ setuptools/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,300
|
|||
setuptools/_vendor/six.pyc,,
|
||||
setuptools/archive_util.py,sha256=kw8Ib_lKjCcnPKNbS7h8HztRVK0d5RacU3r_KRdVnmM,6592
|
||||
setuptools/archive_util.pyc,,
|
||||
setuptools/build_meta.py,sha256=-9Nmj9YdbW4zX3TssPJZhsENrTa4fw3k86Jm1cdKMik,9597
|
||||
setuptools/build_meta.py,sha256=MQWILThG6texTPLol6icwM83h8V8TLbg0QCacFRt33k,9887
|
||||
setuptools/build_meta.pyc,,
|
||||
setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536
|
||||
setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752
|
||||
setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536
|
||||
setuptools/command/__init__.py,sha256=NWzJ0A1BEengZpVeqUyWLNm2bk4P3F4iL5QUErHy7kA,594
|
||||
setuptools/command/__init__.py,sha256=QCAuA9whnq8Bnoc0bBaS6Lw_KAUO0DiHYZQXEMNn5hg,568
|
||||
setuptools/command/__init__.pyc,,
|
||||
setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426
|
||||
setuptools/command/alias.pyc,,
|
||||
setuptools/command/bdist_egg.py,sha256=be-IBpr1zhS9i6GjKANJgzkbH3ChImdWY7S-j0r2BK8,18167
|
||||
setuptools/command/bdist_egg.py,sha256=nnfV8Ah8IRC_Ifv5Loa9FdxL66MVbyDXwy-foP810zM,18185
|
||||
setuptools/command/bdist_egg.pyc,,
|
||||
setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508
|
||||
setuptools/command/bdist_rpm.pyc,,
|
||||
|
@ -90,19 +97,19 @@ setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-j
|
|||
setuptools/command/bdist_wininst.pyc,,
|
||||
setuptools/command/build_clib.py,sha256=bQ9aBr-5ZSO-9fGsGsDLz0mnnFteHUZnftVLkhvHDq0,4484
|
||||
setuptools/command/build_clib.pyc,,
|
||||
setuptools/command/build_ext.py,sha256=Ib42YUGksBswm2mL5xmQPF6NeTA6HcqrvAtEgFCv32A,13019
|
||||
setuptools/command/build_ext.py,sha256=8k4kJcOp_ZMxZ1sZYmle5_OAtNYLXGjrbWOj-IPjvwY,13023
|
||||
setuptools/command/build_ext.pyc,,
|
||||
setuptools/command/build_py.py,sha256=yWyYaaS9F3o9JbIczn064A5g1C5_UiKRDxGaTqYbtLE,9596
|
||||
setuptools/command/build_py.pyc,,
|
||||
setuptools/command/develop.py,sha256=MQlnGS6uP19erK2JCNOyQYoYyquk3PADrqrrinqqLtA,8184
|
||||
setuptools/command/develop.py,sha256=B3-ImHP30Bxnqx-s_9Dp-fxtHhE245ACE1W5hmKY9xE,8188
|
||||
setuptools/command/develop.pyc,,
|
||||
setuptools/command/dist_info.py,sha256=5t6kOfrdgALT-P3ogss6PF9k-Leyesueycuk3dUyZnI,960
|
||||
setuptools/command/dist_info.pyc,,
|
||||
setuptools/command/easy_install.py,sha256=telww7CuPsoTtvlpY-ktnZGT85cZ6xGCGZa0vHvFJ-Q,87273
|
||||
setuptools/command/easy_install.py,sha256=lY0fTvseKfEbEeHUOypWmbk5akh7-MEgRNObDgrroOY,87548
|
||||
setuptools/command/easy_install.pyc,,
|
||||
setuptools/command/egg_info.py,sha256=w73EdxYSOk2gsaAiHGL2dZrCldoPiuRr2eTfqcFvCds,25570
|
||||
setuptools/command/egg_info.py,sha256=WezqoeF0of9FlAy8s20And7ieqLhQNcUrb6ryUAUX2s,25574
|
||||
setuptools/command/egg_info.pyc,,
|
||||
setuptools/command/install.py,sha256=a0EZpL_A866KEdhicTGbuyD_TYl1sykfzdrri-zazT4,4683
|
||||
setuptools/command/install.py,sha256=8doMxeQEDoK4Eco0mO2WlXXzzp9QnsGJQ7Z7yWkZPG8,4705
|
||||
setuptools/command/install.pyc,,
|
||||
setuptools/command/install_egg_info.py,sha256=bMgeIeRiXzQ4DAGPV1328kcjwQjHjOWU4FngAWLV78Q,2203
|
||||
setuptools/command/install_egg_info.pyc,,
|
||||
|
@ -113,61 +120,63 @@ setuptools/command/install_scripts.pyc,,
|
|||
setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628
|
||||
setuptools/command/py36compat.py,sha256=SzjZcOxF7zdFUT47Zv2n7AM3H8koDys_0OpS-n9gIfc,4986
|
||||
setuptools/command/py36compat.pyc,,
|
||||
setuptools/command/register.py,sha256=LO3MvYKPE8dN1m-KkrBRHC68ZFoPvA_vI8Xgp7vv6zI,534
|
||||
setuptools/command/register.py,sha256=kk3DxXCb5lXTvqnhfwx2g6q7iwbUmgTyXUCaBooBOUk,468
|
||||
setuptools/command/register.pyc,,
|
||||
setuptools/command/rotate.py,sha256=co5C1EkI7P0GGT6Tqz-T2SIj2LBJTZXYELpmao6d4KQ,2164
|
||||
setuptools/command/rotate.pyc,,
|
||||
setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658
|
||||
setuptools/command/saveopts.pyc,,
|
||||
setuptools/command/sdist.py,sha256=gr5hFrDzUtGfp_0tu0sllzIyr3jMQegIkFmlDauQJxw,7388
|
||||
setuptools/command/sdist.py,sha256=14kBw_QOZ9L_RQDqgf9DAlEuoj0zC30X5mfDWeiyZwU,8092
|
||||
setuptools/command/sdist.pyc,,
|
||||
setuptools/command/setopt.py,sha256=NTWDyx-gjDF-txf4dO577s7LOzHVoKR0Mq33rFxaRr8,5085
|
||||
setuptools/command/setopt.pyc,,
|
||||
setuptools/command/test.py,sha256=oePJ49u17ENKtrM-rOrrLlRhtNnrzcSr0IW-gE9XVq0,9285
|
||||
setuptools/command/test.py,sha256=MahF7jRBGYwgM-fdPBLyBpfCqR5ZXSGexPVR-afV3Vc,9610
|
||||
setuptools/command/test.pyc,,
|
||||
setuptools/command/upload.py,sha256=GxtNkIl7SA0r8mACkbDcSCN1m2_WPppK9gZXJmQSiow,6811
|
||||
setuptools/command/upload.py,sha256=XT3YFVfYPAmA5qhGg0euluU98ftxRUW-PzKcODMLxUs,462
|
||||
setuptools/command/upload.pyc,,
|
||||
setuptools/command/upload_docs.py,sha256=oXiGplM_cUKLwE4CWWw98RzCufAu8tBhMC97GegFcms,7311
|
||||
setuptools/command/upload_docs.py,sha256=O137bN9dt_sELevf4vwuwWRkogHf4bPXc-jNRxVQaiw,7315
|
||||
setuptools/command/upload_docs.pyc,,
|
||||
setuptools/config.py,sha256=lz19l1AtoHctpp1_tbYZv176nrEj4Gpf7ykNIYTIkAQ,20425
|
||||
setuptools/config.py,sha256=6SB2OY3qcooOJmG_rsK_s0pKBsorBlDpfMJUyzjQIGk,20575
|
||||
setuptools/config.pyc,,
|
||||
setuptools/dep_util.py,sha256=fgixvC1R7sH3r13ktyf7N0FALoqEXL1cBarmNpSEoWg,935
|
||||
setuptools/dep_util.pyc,,
|
||||
setuptools/depends.py,sha256=hC8QIDcM3VDpRXvRVA6OfL9AaQfxvhxHcN_w6sAyNq8,5837
|
||||
setuptools/depends.py,sha256=qt2RWllArRvhnm8lxsyRpcthEZYp4GHQgREl1q0LkFw,5517
|
||||
setuptools/depends.pyc,,
|
||||
setuptools/dist.py,sha256=MRrBrgBFEwzUvrJrIgW79IepDuAeRxetGuSPky-MawQ,50248
|
||||
setuptools/dist.py,sha256=oP2a6DEi5kP8cK3d34ccEPx7FvrcCBkrJWof5ftE1oI,49877
|
||||
setuptools/dist.pyc,,
|
||||
setuptools/errors.py,sha256=MVOcv381HNSajDgEUWzOQ4J6B5BHCBMSjHfaWcEwA1o,524
|
||||
setuptools/errors.pyc,,
|
||||
setuptools/extension.py,sha256=uc6nHI-MxwmNCNPbUiBnybSyqhpJqjbhvOQ-emdvt_E,1729
|
||||
setuptools/extension.pyc,,
|
||||
setuptools/extern/__init__.py,sha256=TxeNKFMSfBMzBpBDiHx8Dh3RzsdVmvWaXhtZ03DZMs0,2499
|
||||
setuptools/extern/__init__.py,sha256=4q9gtShB1XFP6CisltsyPqtcfTO6ZM9Lu1QBl3l-qmo,2514
|
||||
setuptools/extern/__init__.pyc,,
|
||||
setuptools/glibc.py,sha256=X64VvGPL2AbURKwYRsWJOXXGAYOiF_v2qixeTkAULuU,3146
|
||||
setuptools/glibc.pyc,,
|
||||
setuptools/glob.py,sha256=o75cHrOxYsvn854thSxE0x9k8JrKDuhP_rRXlVB00Q4,5084
|
||||
setuptools/glob.pyc,,
|
||||
setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536
|
||||
setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264
|
||||
setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536
|
||||
setuptools/installer.py,sha256=TCFRonRo01I79zo-ucf3Ymhj8TenPlmhMijN916aaJs,5337
|
||||
setuptools/installer.pyc,,
|
||||
setuptools/launch.py,sha256=sd7ejwhBocCDx_wG9rIs0OaZ8HtmmFU8ZC6IR_S0Lvg,787
|
||||
setuptools/launch.pyc,,
|
||||
setuptools/lib2to3_ex.py,sha256=t5e12hbR2pi9V4ezWDTB4JM-AISUnGOkmcnYHek3xjg,2013
|
||||
setuptools/lib2to3_ex.pyc,,
|
||||
setuptools/monkey.py,sha256=FGc9fffh7gAxMLFmJs2DW_OYWpBjkdbNS2n14UAK4NA,5264
|
||||
setuptools/monkey.pyc,,
|
||||
setuptools/msvc.py,sha256=uuRFaZzjJt5Fv3ZmyKUUuLtjx12_8G9RILigGec4irI,40838
|
||||
setuptools/msvc.py,sha256=8baJ6aYgCA4TRdWQQi185qB9dnU8FaP4wgpbmd7VODs,46751
|
||||
setuptools/msvc.pyc,,
|
||||
setuptools/namespaces.py,sha256=F0Nrbv8KCT2OrO7rwa03om4N4GZKAlnce-rr-cgDQa8,3199
|
||||
setuptools/namespaces.pyc,,
|
||||
setuptools/package_index.py,sha256=F9LBC-hQ5fkjeEVflxif0mo_DzRMrepahdFTPenOtGM,40587
|
||||
setuptools/package_index.py,sha256=6pb-B1POtHyLycAbkDETk4fO-Qv8_sY-rjTXhUOoh6k,40605
|
||||
setuptools/package_index.pyc,,
|
||||
setuptools/pep425tags.py,sha256=o_D_WVeWcXZiI2xjPSg7pouGOvaWRgGRxEDK9DzAXIA,10861
|
||||
setuptools/pep425tags.pyc,,
|
||||
setuptools/py27compat.py,sha256=3mwxRMDk5Q5O1rSXOERbQDXhFqwDJhhUitfMW_qpUCo,536
|
||||
setuptools/py27compat.py,sha256=tvmer0Tn-wk_JummCkoM22UIjpjL-AQ8uUiOaqTs8sI,1496
|
||||
setuptools/py27compat.pyc,,
|
||||
setuptools/py31compat.py,sha256=h2rtZghOfwoGYd8sQ0-auaKiF3TcL3qX0bX3VessqcE,838
|
||||
setuptools/py31compat.pyc,,
|
||||
setuptools/py33compat.py,sha256=SMF9Z8wnGicTOkU1uRNwZ_kz5Z_bj29PUBbqdqeeNsc,1330
|
||||
setuptools/py33compat.pyc,,
|
||||
setuptools/py34compat.py,sha256=KYOd6ybRxjBW8NJmYD8t_UyyVmysppFXqHpFLdslGXU,245
|
||||
setuptools/py34compat.pyc,,
|
||||
setuptools/sandbox.py,sha256=9UbwfEL5QY436oMI1LtFWohhoZ-UzwHvGyZjUH_qhkw,14276
|
||||
setuptools/sandbox.pyc,,
|
||||
setuptools/script (dev).tmpl,sha256=RUzQzCQUaXtwdLtYHWYbIQmOaES5Brqq1FvUA_tu-5I,218
|
||||
|
@ -180,7 +189,7 @@ setuptools/unicode_utils.py,sha256=NOiZ_5hD72A6w-4wVj8awHFM3n51Kmw1Ic_vx15XFqw,9
|
|||
setuptools/unicode_utils.pyc,,
|
||||
setuptools/version.py,sha256=og_cuZQb0QI6ukKZFfZWPlr1HgJBPPn2vO2m_bI9ZTE,144
|
||||
setuptools/version.pyc,,
|
||||
setuptools/wheel.py,sha256=94uqXsOaKt91d9hW5z6ZppZmNSs_nO66R4uiwhcr4V0,8094
|
||||
setuptools/wheel.py,sha256=zct-SEj5_LoHg6XELt2cVRdulsUENenCdS1ekM7TlZA,8455
|
||||
setuptools/wheel.pyc,,
|
||||
setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714
|
||||
setuptools/windows_support.pyc,,
|
|
@ -1,6 +1,6 @@
|
|||
[console_scripts]
|
||||
easy_install = setuptools.command.easy_install:main
|
||||
easy_install-3.6 = setuptools.command.easy_install:main
|
||||
easy_install-3.8 = setuptools.command.easy_install:main
|
||||
|
||||
[distutils.commands]
|
||||
alias = setuptools.command.alias:alias
|
||||
|
@ -18,13 +18,11 @@ install = setuptools.command.install:install
|
|||
install_egg_info = setuptools.command.install_egg_info:install_egg_info
|
||||
install_lib = setuptools.command.install_lib:install_lib
|
||||
install_scripts = setuptools.command.install_scripts:install_scripts
|
||||
register = setuptools.command.register:register
|
||||
rotate = setuptools.command.rotate:rotate
|
||||
saveopts = setuptools.command.saveopts:saveopts
|
||||
sdist = setuptools.command.sdist:sdist
|
||||
setopt = setuptools.command.setopt:setopt
|
||||
test = setuptools.command.test:test
|
||||
upload = setuptools.command.upload:upload
|
||||
upload_docs = setuptools.command.upload_docs:upload_docs
|
||||
|
||||
[distutils.setup_keywords]
|
||||
|
@ -60,6 +58,12 @@ namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
|
|||
requires.txt = setuptools.command.egg_info:write_requirements
|
||||
top_level.txt = setuptools.command.egg_info:write_toplevel_names
|
||||
|
||||
[setuptools.finalize_distribution_options]
|
||||
2to3_doctests = setuptools.dist:Distribution._finalize_2to3_doctests
|
||||
features = setuptools.dist:Distribution._finalize_feature_opts
|
||||
keywords = setuptools.dist:Distribution._finalize_setup_keywords
|
||||
parent_finalize = setuptools.dist:_Distribution.finalize_options
|
||||
|
||||
[setuptools.installation]
|
||||
eggsecutable = setuptools.command.easy_install:bootstrap
|
||||
|
|
@ -129,10 +129,27 @@ if PY3:
|
|||
def _install_setup_requires(attrs):
|
||||
# Note: do not use `setuptools.Distribution` directly, as
|
||||
# our PEP 517 backend patch `distutils.core.Distribution`.
|
||||
dist = distutils.core.Distribution(dict(
|
||||
(k, v) for k, v in attrs.items()
|
||||
if k in ('dependency_links', 'setup_requires')
|
||||
))
|
||||
class MinimalDistribution(distutils.core.Distribution):
|
||||
"""
|
||||
A minimal version of a distribution for supporting the
|
||||
fetch_build_eggs interface.
|
||||
"""
|
||||
def __init__(self, attrs):
|
||||
_incl = 'dependency_links', 'setup_requires'
|
||||
filtered = {
|
||||
k: attrs[k]
|
||||
for k in set(_incl) & set(attrs)
|
||||
}
|
||||
distutils.core.Distribution.__init__(self, filtered)
|
||||
|
||||
def finalize_options(self):
|
||||
"""
|
||||
Disable finalize_options to avoid building the working set.
|
||||
Ref #2158.
|
||||
"""
|
||||
|
||||
dist = MinimalDistribution(attrs)
|
||||
|
||||
# Honor setup.cfg's options.
|
||||
dist.parse_config_files(ignore_option_errors=True)
|
||||
if dist.setup_requires:
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
"""
|
||||
Re-implementation of find_module and get_frozen_object
|
||||
from the deprecated imp module.
|
||||
"""
|
||||
|
||||
import os
|
||||
import importlib.util
|
||||
import importlib.machinery
|
||||
|
||||
from .py34compat import module_from_spec
|
||||
|
||||
|
||||
PY_SOURCE = 1
|
||||
PY_COMPILED = 2
|
||||
C_EXTENSION = 3
|
||||
C_BUILTIN = 6
|
||||
PY_FROZEN = 7
|
||||
|
||||
|
||||
def find_module(module, paths=None):
|
||||
"""Just like 'imp.find_module()', but with package support"""
|
||||
spec = importlib.util.find_spec(module, paths)
|
||||
if spec is None:
|
||||
raise ImportError("Can't find %s" % module)
|
||||
if not spec.has_location and hasattr(spec, 'submodule_search_locations'):
|
||||
spec = importlib.util.spec_from_loader('__init__.py', spec.loader)
|
||||
|
||||
kind = -1
|
||||
file = None
|
||||
static = isinstance(spec.loader, type)
|
||||
if spec.origin == 'frozen' or static and issubclass(
|
||||
spec.loader, importlib.machinery.FrozenImporter):
|
||||
kind = PY_FROZEN
|
||||
path = None # imp compabilty
|
||||
suffix = mode = '' # imp compability
|
||||
elif spec.origin == 'built-in' or static and issubclass(
|
||||
spec.loader, importlib.machinery.BuiltinImporter):
|
||||
kind = C_BUILTIN
|
||||
path = None # imp compabilty
|
||||
suffix = mode = '' # imp compability
|
||||
elif spec.has_location:
|
||||
path = spec.origin
|
||||
suffix = os.path.splitext(path)[1]
|
||||
mode = 'r' if suffix in importlib.machinery.SOURCE_SUFFIXES else 'rb'
|
||||
|
||||
if suffix in importlib.machinery.SOURCE_SUFFIXES:
|
||||
kind = PY_SOURCE
|
||||
elif suffix in importlib.machinery.BYTECODE_SUFFIXES:
|
||||
kind = PY_COMPILED
|
||||
elif suffix in importlib.machinery.EXTENSION_SUFFIXES:
|
||||
kind = C_EXTENSION
|
||||
|
||||
if kind in {PY_SOURCE, PY_COMPILED}:
|
||||
file = open(path, mode)
|
||||
else:
|
||||
path = None
|
||||
suffix = mode = ''
|
||||
|
||||
return file, path, (suffix, mode, kind)
|
||||
|
||||
|
||||
def get_frozen_object(module, paths=None):
|
||||
spec = importlib.util.find_spec(module, paths)
|
||||
if not spec:
|
||||
raise ImportError("Can't find %s" % module)
|
||||
return spec.loader.get_code(module)
|
||||
|
||||
|
||||
def get_module(module, paths, info):
|
||||
spec = importlib.util.find_spec(module, paths)
|
||||
if not spec:
|
||||
raise ImportError("Can't find %s" % module)
|
||||
return module_from_spec(spec)
|
|
@ -0,0 +1,488 @@
|
|||
"""
|
||||
An OrderedSet is a custom MutableSet that remembers its order, so that every
|
||||
entry has an index that can be looked up.
|
||||
|
||||
Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger,
|
||||
and released under the MIT license.
|
||||
"""
|
||||
import itertools as it
|
||||
from collections import deque
|
||||
|
||||
try:
|
||||
# Python 3
|
||||
from collections.abc import MutableSet, Sequence
|
||||
except ImportError:
|
||||
# Python 2.7
|
||||
from collections import MutableSet, Sequence
|
||||
|
||||
SLICE_ALL = slice(None)
|
||||
__version__ = "3.1"
|
||||
|
||||
|
||||
def is_iterable(obj):
|
||||
"""
|
||||
Are we being asked to look up a list of things, instead of a single thing?
|
||||
We check for the `__iter__` attribute so that this can cover types that
|
||||
don't have to be known by this module, such as NumPy arrays.
|
||||
|
||||
Strings, however, should be considered as atomic values to look up, not
|
||||
iterables. The same goes for tuples, since they are immutable and therefore
|
||||
valid entries.
|
||||
|
||||
We don't need to check for the Python 2 `unicode` type, because it doesn't
|
||||
have an `__iter__` attribute anyway.
|
||||
"""
|
||||
return (
|
||||
hasattr(obj, "__iter__")
|
||||
and not isinstance(obj, str)
|
||||
and not isinstance(obj, tuple)
|
||||
)
|
||||
|
||||
|
||||
class OrderedSet(MutableSet, Sequence):
|
||||
"""
|
||||
An OrderedSet is a custom MutableSet that remembers its order, so that
|
||||
every entry has an index that can be looked up.
|
||||
|
||||
Example:
|
||||
>>> OrderedSet([1, 1, 2, 3, 2])
|
||||
OrderedSet([1, 2, 3])
|
||||
"""
|
||||
|
||||
def __init__(self, iterable=None):
|
||||
self.items = []
|
||||
self.map = {}
|
||||
if iterable is not None:
|
||||
self |= iterable
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of unique elements in the ordered set
|
||||
|
||||
Example:
|
||||
>>> len(OrderedSet([]))
|
||||
0
|
||||
>>> len(OrderedSet([1, 2]))
|
||||
2
|
||||
"""
|
||||
return len(self.items)
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Get the item at a given index.
|
||||
|
||||
If `index` is a slice, you will get back that slice of items, as a
|
||||
new OrderedSet.
|
||||
|
||||
If `index` is a list or a similar iterable, you'll get a list of
|
||||
items corresponding to those indices. This is similar to NumPy's
|
||||
"fancy indexing". The result is not an OrderedSet because you may ask
|
||||
for duplicate indices, and the number of elements returned should be
|
||||
the number of elements asked for.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset[1]
|
||||
2
|
||||
"""
|
||||
if isinstance(index, slice) and index == SLICE_ALL:
|
||||
return self.copy()
|
||||
elif is_iterable(index):
|
||||
return [self.items[i] for i in index]
|
||||
elif hasattr(index, "__index__") or isinstance(index, slice):
|
||||
result = self.items[index]
|
||||
if isinstance(result, list):
|
||||
return self.__class__(result)
|
||||
else:
|
||||
return result
|
||||
else:
|
||||
raise TypeError("Don't know how to index an OrderedSet by %r" % index)
|
||||
|
||||
def copy(self):
|
||||
"""
|
||||
Return a shallow copy of this object.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 2, 3])
|
||||
>>> other = this.copy()
|
||||
>>> this == other
|
||||
True
|
||||
>>> this is other
|
||||
False
|
||||
"""
|
||||
return self.__class__(self)
|
||||
|
||||
def __getstate__(self):
|
||||
if len(self) == 0:
|
||||
# The state can't be an empty list.
|
||||
# We need to return a truthy value, or else __setstate__ won't be run.
|
||||
#
|
||||
# This could have been done more gracefully by always putting the state
|
||||
# in a tuple, but this way is backwards- and forwards- compatible with
|
||||
# previous versions of OrderedSet.
|
||||
return (None,)
|
||||
else:
|
||||
return list(self)
|
||||
|
||||
def __setstate__(self, state):
|
||||
if state == (None,):
|
||||
self.__init__([])
|
||||
else:
|
||||
self.__init__(state)
|
||||
|
||||
def __contains__(self, key):
|
||||
"""
|
||||
Test if the item is in this ordered set
|
||||
|
||||
Example:
|
||||
>>> 1 in OrderedSet([1, 3, 2])
|
||||
True
|
||||
>>> 5 in OrderedSet([1, 3, 2])
|
||||
False
|
||||
"""
|
||||
return key in self.map
|
||||
|
||||
def add(self, key):
|
||||
"""
|
||||
Add `key` as an item to this OrderedSet, then return its index.
|
||||
|
||||
If `key` is already in the OrderedSet, return the index it already
|
||||
had.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet()
|
||||
>>> oset.append(3)
|
||||
0
|
||||
>>> print(oset)
|
||||
OrderedSet([3])
|
||||
"""
|
||||
if key not in self.map:
|
||||
self.map[key] = len(self.items)
|
||||
self.items.append(key)
|
||||
return self.map[key]
|
||||
|
||||
append = add
|
||||
|
||||
def update(self, sequence):
|
||||
"""
|
||||
Update the set with the given iterable sequence, then return the index
|
||||
of the last element inserted.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset.update([3, 1, 5, 1, 4])
|
||||
4
|
||||
>>> print(oset)
|
||||
OrderedSet([1, 2, 3, 5, 4])
|
||||
"""
|
||||
item_index = None
|
||||
try:
|
||||
for item in sequence:
|
||||
item_index = self.add(item)
|
||||
except TypeError:
|
||||
raise ValueError(
|
||||
"Argument needs to be an iterable, got %s" % type(sequence)
|
||||
)
|
||||
return item_index
|
||||
|
||||
def index(self, key):
|
||||
"""
|
||||
Get the index of a given entry, raising an IndexError if it's not
|
||||
present.
|
||||
|
||||
`key` can be an iterable of entries that is not a string, in which case
|
||||
this returns a list of indices.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset.index(2)
|
||||
1
|
||||
"""
|
||||
if is_iterable(key):
|
||||
return [self.index(subkey) for subkey in key]
|
||||
return self.map[key]
|
||||
|
||||
# Provide some compatibility with pd.Index
|
||||
get_loc = index
|
||||
get_indexer = index
|
||||
|
||||
def pop(self):
|
||||
"""
|
||||
Remove and return the last element from the set.
|
||||
|
||||
Raises KeyError if the set is empty.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset.pop()
|
||||
3
|
||||
"""
|
||||
if not self.items:
|
||||
raise KeyError("Set is empty")
|
||||
|
||||
elem = self.items[-1]
|
||||
del self.items[-1]
|
||||
del self.map[elem]
|
||||
return elem
|
||||
|
||||
def discard(self, key):
|
||||
"""
|
||||
Remove an element. Do not raise an exception if absent.
|
||||
|
||||
The MutableSet mixin uses this to implement the .remove() method, which
|
||||
*does* raise an error when asked to remove a non-existent item.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset.discard(2)
|
||||
>>> print(oset)
|
||||
OrderedSet([1, 3])
|
||||
>>> oset.discard(2)
|
||||
>>> print(oset)
|
||||
OrderedSet([1, 3])
|
||||
"""
|
||||
if key in self:
|
||||
i = self.map[key]
|
||||
del self.items[i]
|
||||
del self.map[key]
|
||||
for k, v in self.map.items():
|
||||
if v >= i:
|
||||
self.map[k] = v - 1
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
Remove all items from this OrderedSet.
|
||||
"""
|
||||
del self.items[:]
|
||||
self.map.clear()
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Example:
|
||||
>>> list(iter(OrderedSet([1, 2, 3])))
|
||||
[1, 2, 3]
|
||||
"""
|
||||
return iter(self.items)
|
||||
|
||||
def __reversed__(self):
|
||||
"""
|
||||
Example:
|
||||
>>> list(reversed(OrderedSet([1, 2, 3])))
|
||||
[3, 2, 1]
|
||||
"""
|
||||
return reversed(self.items)
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return "%s()" % (self.__class__.__name__,)
|
||||
return "%s(%r)" % (self.__class__.__name__, list(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Returns true if the containers have the same items. If `other` is a
|
||||
Sequence, then order is checked, otherwise it is ignored.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 3, 2])
|
||||
>>> oset == [1, 3, 2]
|
||||
True
|
||||
>>> oset == [1, 2, 3]
|
||||
False
|
||||
>>> oset == [2, 3]
|
||||
False
|
||||
>>> oset == OrderedSet([3, 2, 1])
|
||||
False
|
||||
"""
|
||||
# In Python 2 deque is not a Sequence, so treat it as one for
|
||||
# consistent behavior with Python 3.
|
||||
if isinstance(other, (Sequence, deque)):
|
||||
# Check that this OrderedSet contains the same elements, in the
|
||||
# same order, as the other object.
|
||||
return list(self) == list(other)
|
||||
try:
|
||||
other_as_set = set(other)
|
||||
except TypeError:
|
||||
# If `other` can't be converted into a set, it's not equal.
|
||||
return False
|
||||
else:
|
||||
return set(self) == other_as_set
|
||||
|
||||
def union(self, *sets):
|
||||
"""
|
||||
Combines all unique items.
|
||||
Each items order is defined by its first appearance.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet.union(OrderedSet([3, 1, 4, 1, 5]), [1, 3], [2, 0])
|
||||
>>> print(oset)
|
||||
OrderedSet([3, 1, 4, 5, 2, 0])
|
||||
>>> oset.union([8, 9])
|
||||
OrderedSet([3, 1, 4, 5, 2, 0, 8, 9])
|
||||
>>> oset | {10}
|
||||
OrderedSet([3, 1, 4, 5, 2, 0, 10])
|
||||
"""
|
||||
cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
|
||||
containers = map(list, it.chain([self], sets))
|
||||
items = it.chain.from_iterable(containers)
|
||||
return cls(items)
|
||||
|
||||
def __and__(self, other):
|
||||
# the parent implementation of this is backwards
|
||||
return self.intersection(other)
|
||||
|
||||
def intersection(self, *sets):
|
||||
"""
|
||||
Returns elements in common between all sets. Order is defined only
|
||||
by the first set.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet.intersection(OrderedSet([0, 1, 2, 3]), [1, 2, 3])
|
||||
>>> print(oset)
|
||||
OrderedSet([1, 2, 3])
|
||||
>>> oset.intersection([2, 4, 5], [1, 2, 3, 4])
|
||||
OrderedSet([2])
|
||||
>>> oset.intersection()
|
||||
OrderedSet([1, 2, 3])
|
||||
"""
|
||||
cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
|
||||
if sets:
|
||||
common = set.intersection(*map(set, sets))
|
||||
items = (item for item in self if item in common)
|
||||
else:
|
||||
items = self
|
||||
return cls(items)
|
||||
|
||||
def difference(self, *sets):
|
||||
"""
|
||||
Returns all elements that are in this set but not the others.
|
||||
|
||||
Example:
|
||||
>>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]))
|
||||
OrderedSet([1, 3])
|
||||
>>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]), OrderedSet([3]))
|
||||
OrderedSet([1])
|
||||
>>> OrderedSet([1, 2, 3]) - OrderedSet([2])
|
||||
OrderedSet([1, 3])
|
||||
>>> OrderedSet([1, 2, 3]).difference()
|
||||
OrderedSet([1, 2, 3])
|
||||
"""
|
||||
cls = self.__class__
|
||||
if sets:
|
||||
other = set.union(*map(set, sets))
|
||||
items = (item for item in self if item not in other)
|
||||
else:
|
||||
items = self
|
||||
return cls(items)
|
||||
|
||||
def issubset(self, other):
|
||||
"""
|
||||
Report whether another set contains this set.
|
||||
|
||||
Example:
|
||||
>>> OrderedSet([1, 2, 3]).issubset({1, 2})
|
||||
False
|
||||
>>> OrderedSet([1, 2, 3]).issubset({1, 2, 3, 4})
|
||||
True
|
||||
>>> OrderedSet([1, 2, 3]).issubset({1, 4, 3, 5})
|
||||
False
|
||||
"""
|
||||
if len(self) > len(other): # Fast check for obvious cases
|
||||
return False
|
||||
return all(item in other for item in self)
|
||||
|
||||
def issuperset(self, other):
|
||||
"""
|
||||
Report whether this set contains another set.
|
||||
|
||||
Example:
|
||||
>>> OrderedSet([1, 2]).issuperset([1, 2, 3])
|
||||
False
|
||||
>>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3})
|
||||
True
|
||||
>>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3})
|
||||
False
|
||||
"""
|
||||
if len(self) < len(other): # Fast check for obvious cases
|
||||
return False
|
||||
return all(item in self for item in other)
|
||||
|
||||
def symmetric_difference(self, other):
|
||||
"""
|
||||
Return the symmetric difference of two OrderedSets as a new set.
|
||||
That is, the new set will contain all elements that are in exactly
|
||||
one of the sets.
|
||||
|
||||
Their order will be preserved, with elements from `self` preceding
|
||||
elements from `other`.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 4, 3, 5, 7])
|
||||
>>> other = OrderedSet([9, 7, 1, 3, 2])
|
||||
>>> this.symmetric_difference(other)
|
||||
OrderedSet([4, 5, 9, 2])
|
||||
"""
|
||||
cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
|
||||
diff1 = cls(self).difference(other)
|
||||
diff2 = cls(other).difference(self)
|
||||
return diff1.union(diff2)
|
||||
|
||||
def _update_items(self, items):
|
||||
"""
|
||||
Replace the 'items' list of this OrderedSet with a new one, updating
|
||||
self.map accordingly.
|
||||
"""
|
||||
self.items = items
|
||||
self.map = {item: idx for (idx, item) in enumerate(items)}
|
||||
|
||||
def difference_update(self, *sets):
|
||||
"""
|
||||
Update this OrderedSet to remove items from one or more other sets.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 2, 3])
|
||||
>>> this.difference_update(OrderedSet([2, 4]))
|
||||
>>> print(this)
|
||||
OrderedSet([1, 3])
|
||||
|
||||
>>> this = OrderedSet([1, 2, 3, 4, 5])
|
||||
>>> this.difference_update(OrderedSet([2, 4]), OrderedSet([1, 4, 6]))
|
||||
>>> print(this)
|
||||
OrderedSet([3, 5])
|
||||
"""
|
||||
items_to_remove = set()
|
||||
for other in sets:
|
||||
items_to_remove |= set(other)
|
||||
self._update_items([item for item in self.items if item not in items_to_remove])
|
||||
|
||||
def intersection_update(self, other):
|
||||
"""
|
||||
Update this OrderedSet to keep only items in another set, preserving
|
||||
their order in this set.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 4, 3, 5, 7])
|
||||
>>> other = OrderedSet([9, 7, 1, 3, 2])
|
||||
>>> this.intersection_update(other)
|
||||
>>> print(this)
|
||||
OrderedSet([1, 3, 7])
|
||||
"""
|
||||
other = set(other)
|
||||
self._update_items([item for item in self.items if item in other])
|
||||
|
||||
def symmetric_difference_update(self, other):
|
||||
"""
|
||||
Update this OrderedSet to remove items from another set, then
|
||||
add items from the other set that were not present in this set.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 4, 3, 5, 7])
|
||||
>>> other = OrderedSet([9, 7, 1, 3, 2])
|
||||
>>> this.symmetric_difference_update(other)
|
||||
>>> print(this)
|
||||
OrderedSet([4, 5, 9, 2])
|
||||
"""
|
||||
items_to_add = [item for item in other if item not in self]
|
||||
items_to_remove = set(other)
|
||||
self._update_items(
|
||||
[item for item in self.items if item not in items_to_remove] + items_to_add
|
||||
)
|
|
@ -4,18 +4,24 @@
|
|||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__all__ = [
|
||||
"__title__", "__summary__", "__uri__", "__version__", "__author__",
|
||||
"__email__", "__license__", "__copyright__",
|
||||
"__title__",
|
||||
"__summary__",
|
||||
"__uri__",
|
||||
"__version__",
|
||||
"__author__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__copyright__",
|
||||
]
|
||||
|
||||
__title__ = "packaging"
|
||||
__summary__ = "Core utilities for Python packages"
|
||||
__uri__ = "https://github.com/pypa/packaging"
|
||||
|
||||
__version__ = "16.8"
|
||||
__version__ = "19.2"
|
||||
|
||||
__author__ = "Donald Stufft and individual contributors"
|
||||
__email__ = "donald@stufft.io"
|
||||
|
||||
__license__ = "BSD or Apache License, Version 2.0"
|
||||
__copyright__ = "Copyright 2014-2016 %s" % __author__
|
||||
__copyright__ = "Copyright 2014-2019 %s" % __author__
|
||||
|
|
|
@ -4,11 +4,23 @@
|
|||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from .__about__ import (
|
||||
__author__, __copyright__, __email__, __license__, __summary__, __title__,
|
||||
__uri__, __version__
|
||||
__author__,
|
||||
__copyright__,
|
||||
__email__,
|
||||
__license__,
|
||||
__summary__,
|
||||
__title__,
|
||||
__uri__,
|
||||
__version__,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"__title__", "__summary__", "__uri__", "__version__", "__author__",
|
||||
"__email__", "__license__", "__copyright__",
|
||||
"__title__",
|
||||
"__summary__",
|
||||
"__uri__",
|
||||
"__version__",
|
||||
"__author__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__copyright__",
|
||||
]
|
||||
|
|
|
@ -12,9 +12,9 @@ PY3 = sys.version_info[0] == 3
|
|||
# flake8: noqa
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
string_types = (str,)
|
||||
else:
|
||||
string_types = basestring,
|
||||
string_types = (basestring,)
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
|
@ -27,4 +27,5 @@ def with_metaclass(meta, *bases):
|
|||
class metaclass(meta):
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
return type.__new__(metaclass, "temporary_class", (), {})
|
||||
|
|
|
@ -5,7 +5,6 @@ from __future__ import absolute_import, division, print_function
|
|||
|
||||
|
||||
class Infinity(object):
|
||||
|
||||
def __repr__(self):
|
||||
return "Infinity"
|
||||
|
||||
|
@ -33,11 +32,11 @@ class Infinity(object):
|
|||
def __neg__(self):
|
||||
return NegativeInfinity
|
||||
|
||||
|
||||
Infinity = Infinity()
|
||||
|
||||
|
||||
class NegativeInfinity(object):
|
||||
|
||||
def __repr__(self):
|
||||
return "-Infinity"
|
||||
|
||||
|
@ -65,4 +64,5 @@ class NegativeInfinity(object):
|
|||
def __neg__(self):
|
||||
return Infinity
|
||||
|
||||
|
||||
NegativeInfinity = NegativeInfinity()
|
||||
|
|
|
@ -17,8 +17,11 @@ from .specifiers import Specifier, InvalidSpecifier
|
|||
|
||||
|
||||
__all__ = [
|
||||
"InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
|
||||
"Marker", "default_environment",
|
||||
"InvalidMarker",
|
||||
"UndefinedComparison",
|
||||
"UndefinedEnvironmentName",
|
||||
"Marker",
|
||||
"default_environment",
|
||||
]
|
||||
|
||||
|
||||
|
@ -42,7 +45,6 @@ class UndefinedEnvironmentName(ValueError):
|
|||
|
||||
|
||||
class Node(object):
|
||||
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
|
@ -57,62 +59,52 @@ class Node(object):
|
|||
|
||||
|
||||
class Variable(Node):
|
||||
|
||||
def serialize(self):
|
||||
return str(self)
|
||||
|
||||
|
||||
class Value(Node):
|
||||
|
||||
def serialize(self):
|
||||
return '"{0}"'.format(self)
|
||||
|
||||
|
||||
class Op(Node):
|
||||
|
||||
def serialize(self):
|
||||
return str(self)
|
||||
|
||||
|
||||
VARIABLE = (
|
||||
L("implementation_version") |
|
||||
L("platform_python_implementation") |
|
||||
L("implementation_name") |
|
||||
L("python_full_version") |
|
||||
L("platform_release") |
|
||||
L("platform_version") |
|
||||
L("platform_machine") |
|
||||
L("platform_system") |
|
||||
L("python_version") |
|
||||
L("sys_platform") |
|
||||
L("os_name") |
|
||||
L("os.name") | # PEP-345
|
||||
L("sys.platform") | # PEP-345
|
||||
L("platform.version") | # PEP-345
|
||||
L("platform.machine") | # PEP-345
|
||||
L("platform.python_implementation") | # PEP-345
|
||||
L("python_implementation") | # undocumented setuptools legacy
|
||||
L("extra")
|
||||
L("implementation_version")
|
||||
| L("platform_python_implementation")
|
||||
| L("implementation_name")
|
||||
| L("python_full_version")
|
||||
| L("platform_release")
|
||||
| L("platform_version")
|
||||
| L("platform_machine")
|
||||
| L("platform_system")
|
||||
| L("python_version")
|
||||
| L("sys_platform")
|
||||
| L("os_name")
|
||||
| L("os.name")
|
||||
| L("sys.platform") # PEP-345
|
||||
| L("platform.version") # PEP-345
|
||||
| L("platform.machine") # PEP-345
|
||||
| L("platform.python_implementation") # PEP-345
|
||||
| L("python_implementation") # PEP-345
|
||||
| L("extra") # undocumented setuptools legacy
|
||||
)
|
||||
ALIASES = {
|
||||
'os.name': 'os_name',
|
||||
'sys.platform': 'sys_platform',
|
||||
'platform.version': 'platform_version',
|
||||
'platform.machine': 'platform_machine',
|
||||
'platform.python_implementation': 'platform_python_implementation',
|
||||
'python_implementation': 'platform_python_implementation'
|
||||
"os.name": "os_name",
|
||||
"sys.platform": "sys_platform",
|
||||
"platform.version": "platform_version",
|
||||
"platform.machine": "platform_machine",
|
||||
"platform.python_implementation": "platform_python_implementation",
|
||||
"python_implementation": "platform_python_implementation",
|
||||
}
|
||||
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
|
||||
|
||||
VERSION_CMP = (
|
||||
L("===") |
|
||||
L("==") |
|
||||
L(">=") |
|
||||
L("<=") |
|
||||
L("!=") |
|
||||
L("~=") |
|
||||
L(">") |
|
||||
L("<")
|
||||
L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
|
||||
)
|
||||
|
||||
MARKER_OP = VERSION_CMP | L("not in") | L("in")
|
||||
|
@ -152,8 +144,11 @@ def _format_marker(marker, first=True):
|
|||
# where the single item is itself it's own list. In that case we want skip
|
||||
# the rest of this function so that we don't get extraneous () on the
|
||||
# outside.
|
||||
if (isinstance(marker, list) and len(marker) == 1 and
|
||||
isinstance(marker[0], (list, tuple))):
|
||||
if (
|
||||
isinstance(marker, list)
|
||||
and len(marker) == 1
|
||||
and isinstance(marker[0], (list, tuple))
|
||||
):
|
||||
return _format_marker(marker[0])
|
||||
|
||||
if isinstance(marker, list):
|
||||
|
@ -239,20 +234,20 @@ def _evaluate_markers(markers, environment):
|
|||
|
||||
|
||||
def format_full_version(info):
|
||||
version = '{0.major}.{0.minor}.{0.micro}'.format(info)
|
||||
version = "{0.major}.{0.minor}.{0.micro}".format(info)
|
||||
kind = info.releaselevel
|
||||
if kind != 'final':
|
||||
if kind != "final":
|
||||
version += kind[0] + str(info.serial)
|
||||
return version
|
||||
|
||||
|
||||
def default_environment():
|
||||
if hasattr(sys, 'implementation'):
|
||||
if hasattr(sys, "implementation"):
|
||||
iver = format_full_version(sys.implementation.version)
|
||||
implementation_name = sys.implementation.name
|
||||
else:
|
||||
iver = '0'
|
||||
implementation_name = ''
|
||||
iver = "0"
|
||||
implementation_name = ""
|
||||
|
||||
return {
|
||||
"implementation_name": implementation_name,
|
||||
|
@ -264,19 +259,19 @@ def default_environment():
|
|||
"platform_version": platform.version(),
|
||||
"python_full_version": platform.python_version(),
|
||||
"platform_python_implementation": platform.python_implementation(),
|
||||
"python_version": platform.python_version()[:3],
|
||||
"python_version": ".".join(platform.python_version_tuple()[:2]),
|
||||
"sys_platform": sys.platform,
|
||||
}
|
||||
|
||||
|
||||
class Marker(object):
|
||||
|
||||
def __init__(self, marker):
|
||||
try:
|
||||
self._markers = _coerce_parse_result(MARKER.parseString(marker))
|
||||
except ParseException as e:
|
||||
err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
|
||||
marker, marker[e.loc:e.loc + 8])
|
||||
marker, marker[e.loc : e.loc + 8]
|
||||
)
|
||||
raise InvalidMarker(err_str)
|
||||
|
||||
def __str__(self):
|
||||
|
|
|
@ -38,8 +38,8 @@ IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
|
|||
NAME = IDENTIFIER("name")
|
||||
EXTRA = IDENTIFIER
|
||||
|
||||
URI = Regex(r'[^ ]+')("url")
|
||||
URL = (AT + URI)
|
||||
URI = Regex(r"[^ ]+")("url")
|
||||
URL = AT + URI
|
||||
|
||||
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
|
||||
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
|
||||
|
@ -48,28 +48,31 @@ VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
|
|||
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
|
||||
VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
|
||||
joinString=",", adjacent=False)("_raw_spec")
|
||||
VERSION_MANY = Combine(
|
||||
VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
|
||||
)("_raw_spec")
|
||||
_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
|
||||
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
|
||||
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
|
||||
|
||||
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
|
||||
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
|
||||
|
||||
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
|
||||
MARKER_EXPR.setParseAction(
|
||||
lambda s, l, t: Marker(s[t._original_start:t._original_end])
|
||||
lambda s, l, t: Marker(s[t._original_start : t._original_end])
|
||||
)
|
||||
MARKER_SEPERATOR = SEMICOLON
|
||||
MARKER = MARKER_SEPERATOR + MARKER_EXPR
|
||||
MARKER_SEPARATOR = SEMICOLON
|
||||
MARKER = MARKER_SEPARATOR + MARKER_EXPR
|
||||
|
||||
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
|
||||
URL_AND_MARKER = URL + Optional(MARKER)
|
||||
|
||||
NAMED_REQUIREMENT = \
|
||||
NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
|
||||
NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
|
||||
|
||||
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
|
||||
# setuptools.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see
|
||||
# issue #104
|
||||
REQUIREMENT.parseString("x[]")
|
||||
|
||||
|
||||
class Requirement(object):
|
||||
|
@ -90,15 +93,21 @@ class Requirement(object):
|
|||
req = REQUIREMENT.parseString(requirement_string)
|
||||
except ParseException as e:
|
||||
raise InvalidRequirement(
|
||||
"Invalid requirement, parse error at \"{0!r}\"".format(
|
||||
requirement_string[e.loc:e.loc + 8]))
|
||||
'Parse error at "{0!r}": {1}'.format(
|
||||
requirement_string[e.loc : e.loc + 8], e.msg
|
||||
)
|
||||
)
|
||||
|
||||
self.name = req.name
|
||||
if req.url:
|
||||
parsed_url = urlparse.urlparse(req.url)
|
||||
if not (parsed_url.scheme and parsed_url.netloc) or (
|
||||
not parsed_url.scheme and not parsed_url.netloc):
|
||||
raise InvalidRequirement("Invalid URL given")
|
||||
if parsed_url.scheme == "file":
|
||||
if urlparse.urlunparse(parsed_url) != req.url:
|
||||
raise InvalidRequirement("Invalid URL given")
|
||||
elif not (parsed_url.scheme and parsed_url.netloc) or (
|
||||
not parsed_url.scheme and not parsed_url.netloc
|
||||
):
|
||||
raise InvalidRequirement("Invalid URL: {0}".format(req.url))
|
||||
self.url = req.url
|
||||
else:
|
||||
self.url = None
|
||||
|
@ -117,6 +126,8 @@ class Requirement(object):
|
|||
|
||||
if self.url:
|
||||
parts.append("@ {0}".format(self.url))
|
||||
if self.marker:
|
||||
parts.append(" ")
|
||||
|
||||
if self.marker:
|
||||
parts.append("; {0}".format(self.marker))
|
||||
|
|
|
@ -19,7 +19,6 @@ class InvalidSpecifier(ValueError):
|
|||
|
||||
|
||||
class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
|
||||
|
||||
@abc.abstractmethod
|
||||
def __str__(self):
|
||||
"""
|
||||
|
@ -84,10 +83,7 @@ class _IndividualSpecifier(BaseSpecifier):
|
|||
if not match:
|
||||
raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
|
||||
|
||||
self._spec = (
|
||||
match.group("operator").strip(),
|
||||
match.group("version").strip(),
|
||||
)
|
||||
self._spec = (match.group("operator").strip(), match.group("version").strip())
|
||||
|
||||
# Store whether or not this Specifier should accept prereleases
|
||||
self._prereleases = prereleases
|
||||
|
@ -99,11 +95,7 @@ class _IndividualSpecifier(BaseSpecifier):
|
|||
else ""
|
||||
)
|
||||
|
||||
return "<{0}({1!r}{2})>".format(
|
||||
self.__class__.__name__,
|
||||
str(self),
|
||||
pre,
|
||||
)
|
||||
return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre)
|
||||
|
||||
def __str__(self):
|
||||
return "{0}{1}".format(*self._spec)
|
||||
|
@ -194,11 +186,12 @@ class _IndividualSpecifier(BaseSpecifier):
|
|||
# If our version is a prerelease, and we were not set to allow
|
||||
# prereleases, then we'll store it for later incase nothing
|
||||
# else matches this specifier.
|
||||
if (parsed_version.is_prerelease and not
|
||||
(prereleases or self.prereleases)):
|
||||
if parsed_version.is_prerelease and not (
|
||||
prereleases or self.prereleases
|
||||
):
|
||||
found_prereleases.append(version)
|
||||
# Either this is not a prerelease, or we should have been
|
||||
# accepting prereleases from the begining.
|
||||
# accepting prereleases from the beginning.
|
||||
else:
|
||||
yielded = True
|
||||
yield version
|
||||
|
@ -213,8 +206,7 @@ class _IndividualSpecifier(BaseSpecifier):
|
|||
|
||||
class LegacySpecifier(_IndividualSpecifier):
|
||||
|
||||
_regex_str = (
|
||||
r"""
|
||||
_regex_str = r"""
|
||||
(?P<operator>(==|!=|<=|>=|<|>))
|
||||
\s*
|
||||
(?P<version>
|
||||
|
@ -225,10 +217,8 @@ class LegacySpecifier(_IndividualSpecifier):
|
|||
# them, and a comma since it's a version separator.
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
_regex = re.compile(
|
||||
r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
_operators = {
|
||||
"==": "equal",
|
||||
|
@ -269,13 +259,13 @@ def _require_version_compare(fn):
|
|||
if not isinstance(prospective, Version):
|
||||
return False
|
||||
return fn(self, prospective, spec)
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
class Specifier(_IndividualSpecifier):
|
||||
|
||||
_regex_str = (
|
||||
r"""
|
||||
_regex_str = r"""
|
||||
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
||||
(?P<version>
|
||||
(?:
|
||||
|
@ -367,10 +357,8 @@ class Specifier(_IndividualSpecifier):
|
|||
)
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
_regex = re.compile(
|
||||
r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
_operators = {
|
||||
"~=": "compatible",
|
||||
|
@ -397,8 +385,7 @@ class Specifier(_IndividualSpecifier):
|
|||
prefix = ".".join(
|
||||
list(
|
||||
itertools.takewhile(
|
||||
lambda x: (not x.startswith("post") and not
|
||||
x.startswith("dev")),
|
||||
lambda x: (not x.startswith("post") and not x.startswith("dev")),
|
||||
_version_split(spec),
|
||||
)
|
||||
)[:-1]
|
||||
|
@ -407,8 +394,9 @@ class Specifier(_IndividualSpecifier):
|
|||
# Add the prefix notation to the end of our string
|
||||
prefix += ".*"
|
||||
|
||||
return (self._get_operator(">=")(prospective, spec) and
|
||||
self._get_operator("==")(prospective, prefix))
|
||||
return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
|
||||
prospective, prefix
|
||||
)
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_equal(self, prospective, spec):
|
||||
|
@ -428,7 +416,7 @@ class Specifier(_IndividualSpecifier):
|
|||
# Shorten the prospective version to be the same length as the spec
|
||||
# so that we can determine if the specifier is a prefix of the
|
||||
# prospective version or not.
|
||||
prospective = prospective[:len(spec)]
|
||||
prospective = prospective[: len(spec)]
|
||||
|
||||
# Pad out our two sides with zeros so that they both equal the same
|
||||
# length.
|
||||
|
@ -503,7 +491,7 @@ class Specifier(_IndividualSpecifier):
|
|||
return False
|
||||
|
||||
# Ensure that we do not allow a local version of the version mentioned
|
||||
# in the specifier, which is techincally greater than, to match.
|
||||
# in the specifier, which is technically greater than, to match.
|
||||
if prospective.local is not None:
|
||||
if Version(prospective.base_version) == Version(spec.base_version):
|
||||
return False
|
||||
|
@ -567,27 +555,17 @@ def _pad_version(left, right):
|
|||
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
||||
|
||||
# Get the rest of our versions
|
||||
left_split.append(left[len(left_split[0]):])
|
||||
right_split.append(right[len(right_split[0]):])
|
||||
left_split.append(left[len(left_split[0]) :])
|
||||
right_split.append(right[len(right_split[0]) :])
|
||||
|
||||
# Insert our padding
|
||||
left_split.insert(
|
||||
1,
|
||||
["0"] * max(0, len(right_split[0]) - len(left_split[0])),
|
||||
)
|
||||
right_split.insert(
|
||||
1,
|
||||
["0"] * max(0, len(left_split[0]) - len(right_split[0])),
|
||||
)
|
||||
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
|
||||
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
|
||||
|
||||
return (
|
||||
list(itertools.chain(*left_split)),
|
||||
list(itertools.chain(*right_split)),
|
||||
)
|
||||
return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
|
||||
|
||||
|
||||
class SpecifierSet(BaseSpecifier):
|
||||
|
||||
def __init__(self, specifiers="", prereleases=None):
|
||||
# Split on , to break each indidivual specifier into it's own item, and
|
||||
# strip each item to remove leading/trailing whitespace.
|
||||
|
@ -721,10 +699,7 @@ class SpecifierSet(BaseSpecifier):
|
|||
# given version is contained within all of them.
|
||||
# Note: This use of all() here means that an empty set of specifiers
|
||||
# will always return True, this is an explicit design decision.
|
||||
return all(
|
||||
s.contains(item, prereleases=prereleases)
|
||||
for s in self._specs
|
||||
)
|
||||
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
|
||||
|
||||
def filter(self, iterable, prereleases=None):
|
||||
# Determine if we're forcing a prerelease or not, if we're not forcing
|
||||
|
|
|
@ -0,0 +1,404 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import distutils.util
|
||||
|
||||
try:
|
||||
from importlib.machinery import EXTENSION_SUFFIXES
|
||||
except ImportError: # pragma: no cover
|
||||
import imp
|
||||
|
||||
EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()]
|
||||
del imp
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
import warnings
|
||||
|
||||
|
||||
INTERPRETER_SHORT_NAMES = {
|
||||
"python": "py", # Generic.
|
||||
"cpython": "cp",
|
||||
"pypy": "pp",
|
||||
"ironpython": "ip",
|
||||
"jython": "jy",
|
||||
}
|
||||
|
||||
|
||||
_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
|
||||
|
||||
|
||||
class Tag(object):
|
||||
|
||||
__slots__ = ["_interpreter", "_abi", "_platform"]
|
||||
|
||||
def __init__(self, interpreter, abi, platform):
|
||||
self._interpreter = interpreter.lower()
|
||||
self._abi = abi.lower()
|
||||
self._platform = platform.lower()
|
||||
|
||||
@property
|
||||
def interpreter(self):
|
||||
return self._interpreter
|
||||
|
||||
@property
|
||||
def abi(self):
|
||||
return self._abi
|
||||
|
||||
@property
|
||||
def platform(self):
|
||||
return self._platform
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
(self.platform == other.platform)
|
||||
and (self.abi == other.abi)
|
||||
and (self.interpreter == other.interpreter)
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self._interpreter, self._abi, self._platform))
|
||||
|
||||
def __str__(self):
|
||||
return "{}-{}-{}".format(self._interpreter, self._abi, self._platform)
|
||||
|
||||
def __repr__(self):
|
||||
return "<{self} @ {self_id}>".format(self=self, self_id=id(self))
|
||||
|
||||
|
||||
def parse_tag(tag):
|
||||
tags = set()
|
||||
interpreters, abis, platforms = tag.split("-")
|
||||
for interpreter in interpreters.split("."):
|
||||
for abi in abis.split("."):
|
||||
for platform_ in platforms.split("."):
|
||||
tags.add(Tag(interpreter, abi, platform_))
|
||||
return frozenset(tags)
|
||||
|
||||
|
||||
def _normalize_string(string):
|
||||
return string.replace(".", "_").replace("-", "_")
|
||||
|
||||
|
||||
def _cpython_interpreter(py_version):
|
||||
# TODO: Is using py_version_nodot for interpreter version critical?
|
||||
return "cp{major}{minor}".format(major=py_version[0], minor=py_version[1])
|
||||
|
||||
|
||||
def _cpython_abis(py_version):
|
||||
abis = []
|
||||
version = "{}{}".format(*py_version[:2])
|
||||
debug = pymalloc = ucs4 = ""
|
||||
with_debug = sysconfig.get_config_var("Py_DEBUG")
|
||||
has_refcount = hasattr(sys, "gettotalrefcount")
|
||||
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
|
||||
# extension modules is the best option.
|
||||
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
|
||||
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
|
||||
if with_debug or (with_debug is None and (has_refcount or has_ext)):
|
||||
debug = "d"
|
||||
if py_version < (3, 8):
|
||||
with_pymalloc = sysconfig.get_config_var("WITH_PYMALLOC")
|
||||
if with_pymalloc or with_pymalloc is None:
|
||||
pymalloc = "m"
|
||||
if py_version < (3, 3):
|
||||
unicode_size = sysconfig.get_config_var("Py_UNICODE_SIZE")
|
||||
if unicode_size == 4 or (
|
||||
unicode_size is None and sys.maxunicode == 0x10FFFF
|
||||
):
|
||||
ucs4 = "u"
|
||||
elif debug:
|
||||
# Debug builds can also load "normal" extension modules.
|
||||
# We can also assume no UCS-4 or pymalloc requirement.
|
||||
abis.append("cp{version}".format(version=version))
|
||||
abis.insert(
|
||||
0,
|
||||
"cp{version}{debug}{pymalloc}{ucs4}".format(
|
||||
version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
|
||||
),
|
||||
)
|
||||
return abis
|
||||
|
||||
|
||||
def _cpython_tags(py_version, interpreter, abis, platforms):
|
||||
for abi in abis:
|
||||
for platform_ in platforms:
|
||||
yield Tag(interpreter, abi, platform_)
|
||||
for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms):
|
||||
yield tag
|
||||
for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms):
|
||||
yield tag
|
||||
# PEP 384 was first implemented in Python 3.2.
|
||||
for minor_version in range(py_version[1] - 1, 1, -1):
|
||||
for platform_ in platforms:
|
||||
interpreter = "cp{major}{minor}".format(
|
||||
major=py_version[0], minor=minor_version
|
||||
)
|
||||
yield Tag(interpreter, "abi3", platform_)
|
||||
|
||||
|
||||
def _pypy_interpreter():
|
||||
return "pp{py_major}{pypy_major}{pypy_minor}".format(
|
||||
py_major=sys.version_info[0],
|
||||
pypy_major=sys.pypy_version_info.major,
|
||||
pypy_minor=sys.pypy_version_info.minor,
|
||||
)
|
||||
|
||||
|
||||
def _generic_abi():
|
||||
abi = sysconfig.get_config_var("SOABI")
|
||||
if abi:
|
||||
return _normalize_string(abi)
|
||||
else:
|
||||
return "none"
|
||||
|
||||
|
||||
def _pypy_tags(py_version, interpreter, abi, platforms):
|
||||
for tag in (Tag(interpreter, abi, platform) for platform in platforms):
|
||||
yield tag
|
||||
for tag in (Tag(interpreter, "none", platform) for platform in platforms):
|
||||
yield tag
|
||||
|
||||
|
||||
def _generic_tags(interpreter, py_version, abi, platforms):
|
||||
for tag in (Tag(interpreter, abi, platform) for platform in platforms):
|
||||
yield tag
|
||||
if abi != "none":
|
||||
tags = (Tag(interpreter, "none", platform_) for platform_ in platforms)
|
||||
for tag in tags:
|
||||
yield tag
|
||||
|
||||
|
||||
def _py_interpreter_range(py_version):
|
||||
"""
|
||||
Yield Python versions in descending order.
|
||||
|
||||
After the latest version, the major-only version will be yielded, and then
|
||||
all following versions up to 'end'.
|
||||
"""
|
||||
yield "py{major}{minor}".format(major=py_version[0], minor=py_version[1])
|
||||
yield "py{major}".format(major=py_version[0])
|
||||
for minor in range(py_version[1] - 1, -1, -1):
|
||||
yield "py{major}{minor}".format(major=py_version[0], minor=minor)
|
||||
|
||||
|
||||
def _independent_tags(interpreter, py_version, platforms):
|
||||
"""
|
||||
Return the sequence of tags that are consistent across implementations.
|
||||
|
||||
The tags consist of:
|
||||
- py*-none-<platform>
|
||||
- <interpreter>-none-any
|
||||
- py*-none-any
|
||||
"""
|
||||
for version in _py_interpreter_range(py_version):
|
||||
for platform_ in platforms:
|
||||
yield Tag(version, "none", platform_)
|
||||
yield Tag(interpreter, "none", "any")
|
||||
for version in _py_interpreter_range(py_version):
|
||||
yield Tag(version, "none", "any")
|
||||
|
||||
|
||||
def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER):
|
||||
if not is_32bit:
|
||||
return arch
|
||||
|
||||
if arch.startswith("ppc"):
|
||||
return "ppc"
|
||||
|
||||
return "i386"
|
||||
|
||||
|
||||
def _mac_binary_formats(version, cpu_arch):
|
||||
formats = [cpu_arch]
|
||||
if cpu_arch == "x86_64":
|
||||
if version < (10, 4):
|
||||
return []
|
||||
formats.extend(["intel", "fat64", "fat32"])
|
||||
|
||||
elif cpu_arch == "i386":
|
||||
if version < (10, 4):
|
||||
return []
|
||||
formats.extend(["intel", "fat32", "fat"])
|
||||
|
||||
elif cpu_arch == "ppc64":
|
||||
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
|
||||
if version > (10, 5) or version < (10, 4):
|
||||
return []
|
||||
formats.append("fat64")
|
||||
|
||||
elif cpu_arch == "ppc":
|
||||
if version > (10, 6):
|
||||
return []
|
||||
formats.extend(["fat32", "fat"])
|
||||
|
||||
formats.append("universal")
|
||||
return formats
|
||||
|
||||
|
||||
def _mac_platforms(version=None, arch=None):
|
||||
version_str, _, cpu_arch = platform.mac_ver()
|
||||
if version is None:
|
||||
version = tuple(map(int, version_str.split(".")[:2]))
|
||||
if arch is None:
|
||||
arch = _mac_arch(cpu_arch)
|
||||
platforms = []
|
||||
for minor_version in range(version[1], -1, -1):
|
||||
compat_version = version[0], minor_version
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
platforms.append(
|
||||
"macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=compat_version[0],
|
||||
minor=compat_version[1],
|
||||
binary_format=binary_format,
|
||||
)
|
||||
)
|
||||
return platforms
|
||||
|
||||
|
||||
# From PEP 513.
|
||||
def _is_manylinux_compatible(name, glibc_version):
|
||||
# Check for presence of _manylinux module.
|
||||
try:
|
||||
import _manylinux
|
||||
|
||||
return bool(getattr(_manylinux, name + "_compatible"))
|
||||
except (ImportError, AttributeError):
|
||||
# Fall through to heuristic check below.
|
||||
pass
|
||||
|
||||
return _have_compatible_glibc(*glibc_version)
|
||||
|
||||
|
||||
def _glibc_version_string():
|
||||
# Returns glibc version string, or None if not using glibc.
|
||||
import ctypes
|
||||
|
||||
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
||||
# manpage says, "If filename is NULL, then the returned handle is for the
|
||||
# main program". This way we can let the linker do the work to figure out
|
||||
# which libc our process is actually using.
|
||||
process_namespace = ctypes.CDLL(None)
|
||||
try:
|
||||
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
||||
except AttributeError:
|
||||
# Symbol doesn't exist -> therefore, we are not linked to
|
||||
# glibc.
|
||||
return None
|
||||
|
||||
# Call gnu_get_libc_version, which returns a string like "2.5"
|
||||
gnu_get_libc_version.restype = ctypes.c_char_p
|
||||
version_str = gnu_get_libc_version()
|
||||
# py2 / py3 compatibility:
|
||||
if not isinstance(version_str, str):
|
||||
version_str = version_str.decode("ascii")
|
||||
|
||||
return version_str
|
||||
|
||||
|
||||
# Separated out from have_compatible_glibc for easier unit testing.
|
||||
def _check_glibc_version(version_str, required_major, minimum_minor):
|
||||
# Parse string and check against requested version.
|
||||
#
|
||||
# We use a regexp instead of str.split because we want to discard any
|
||||
# random junk that might come after the minor version -- this might happen
|
||||
# in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
||||
# uses version strings like "2.20-2014.11"). See gh-3588.
|
||||
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
||||
if not m:
|
||||
warnings.warn(
|
||||
"Expected glibc version with 2 components major.minor,"
|
||||
" got: %s" % version_str,
|
||||
RuntimeWarning,
|
||||
)
|
||||
return False
|
||||
return (
|
||||
int(m.group("major")) == required_major
|
||||
and int(m.group("minor")) >= minimum_minor
|
||||
)
|
||||
|
||||
|
||||
def _have_compatible_glibc(required_major, minimum_minor):
|
||||
version_str = _glibc_version_string()
|
||||
if version_str is None:
|
||||
return False
|
||||
return _check_glibc_version(version_str, required_major, minimum_minor)
|
||||
|
||||
|
||||
def _linux_platforms(is_32bit=_32_BIT_INTERPRETER):
|
||||
linux = _normalize_string(distutils.util.get_platform())
|
||||
if linux == "linux_x86_64" and is_32bit:
|
||||
linux = "linux_i686"
|
||||
manylinux_support = (
|
||||
("manylinux2014", (2, 17)), # CentOS 7 w/ glibc 2.17 (PEP 599)
|
||||
("manylinux2010", (2, 12)), # CentOS 6 w/ glibc 2.12 (PEP 571)
|
||||
("manylinux1", (2, 5)), # CentOS 5 w/ glibc 2.5 (PEP 513)
|
||||
)
|
||||
manylinux_support_iter = iter(manylinux_support)
|
||||
for name, glibc_version in manylinux_support_iter:
|
||||
if _is_manylinux_compatible(name, glibc_version):
|
||||
platforms = [linux.replace("linux", name)]
|
||||
break
|
||||
else:
|
||||
platforms = []
|
||||
# Support for a later manylinux implies support for an earlier version.
|
||||
platforms += [linux.replace("linux", name) for name, _ in manylinux_support_iter]
|
||||
platforms.append(linux)
|
||||
return platforms
|
||||
|
||||
|
||||
def _generic_platforms():
|
||||
platform = _normalize_string(distutils.util.get_platform())
|
||||
return [platform]
|
||||
|
||||
|
||||
def _interpreter_name():
|
||||
name = platform.python_implementation().lower()
|
||||
return INTERPRETER_SHORT_NAMES.get(name) or name
|
||||
|
||||
|
||||
def _generic_interpreter(name, py_version):
|
||||
version = sysconfig.get_config_var("py_version_nodot")
|
||||
if not version:
|
||||
version = "".join(map(str, py_version[:2]))
|
||||
return "{name}{version}".format(name=name, version=version)
|
||||
|
||||
|
||||
def sys_tags():
|
||||
"""
|
||||
Returns the sequence of tag triples for the running interpreter.
|
||||
|
||||
The order of the sequence corresponds to priority order for the
|
||||
interpreter, from most to least important.
|
||||
"""
|
||||
py_version = sys.version_info[:2]
|
||||
interpreter_name = _interpreter_name()
|
||||
if platform.system() == "Darwin":
|
||||
platforms = _mac_platforms()
|
||||
elif platform.system() == "Linux":
|
||||
platforms = _linux_platforms()
|
||||
else:
|
||||
platforms = _generic_platforms()
|
||||
|
||||
if interpreter_name == "cp":
|
||||
interpreter = _cpython_interpreter(py_version)
|
||||
abis = _cpython_abis(py_version)
|
||||
for tag in _cpython_tags(py_version, interpreter, abis, platforms):
|
||||
yield tag
|
||||
elif interpreter_name == "pp":
|
||||
interpreter = _pypy_interpreter()
|
||||
abi = _generic_abi()
|
||||
for tag in _pypy_tags(py_version, interpreter, abi, platforms):
|
||||
yield tag
|
||||
else:
|
||||
interpreter = _generic_interpreter(interpreter_name, py_version)
|
||||
abi = _generic_abi()
|
||||
for tag in _generic_tags(interpreter, py_version, abi, platforms):
|
||||
yield tag
|
||||
for tag in _independent_tags(interpreter, py_version, platforms):
|
||||
yield tag
|
|
@ -5,6 +5,8 @@ from __future__ import absolute_import, division, print_function
|
|||
|
||||
import re
|
||||
|
||||
from .version import InvalidVersion, Version
|
||||
|
||||
|
||||
_canonicalize_regex = re.compile(r"[-_.]+")
|
||||
|
||||
|
@ -12,3 +14,44 @@ _canonicalize_regex = re.compile(r"[-_.]+")
|
|||
def canonicalize_name(name):
|
||||
# This is taken from PEP 503.
|
||||
return _canonicalize_regex.sub("-", name).lower()
|
||||
|
||||
|
||||
def canonicalize_version(version):
|
||||
"""
|
||||
This is very similar to Version.__str__, but has one subtle differences
|
||||
with the way it handles the release segment.
|
||||
"""
|
||||
|
||||
try:
|
||||
version = Version(version)
|
||||
except InvalidVersion:
|
||||
# Legacy versions cannot be normalized
|
||||
return version
|
||||
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if version.epoch != 0:
|
||||
parts.append("{0}!".format(version.epoch))
|
||||
|
||||
# Release segment
|
||||
# NB: This strips trailing '.0's to normalize
|
||||
parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release)))
|
||||
|
||||
# Pre-release
|
||||
if version.pre is not None:
|
||||
parts.append("".join(str(x) for x in version.pre))
|
||||
|
||||
# Post-release
|
||||
if version.post is not None:
|
||||
parts.append(".post{0}".format(version.post))
|
||||
|
||||
# Development release
|
||||
if version.dev is not None:
|
||||
parts.append(".dev{0}".format(version.dev))
|
||||
|
||||
# Local version segment
|
||||
if version.local is not None:
|
||||
parts.append("+{0}".format(version.local))
|
||||
|
||||
return "".join(parts)
|
||||
|
|
|
@ -10,14 +10,11 @@ import re
|
|||
from ._structures import Infinity
|
||||
|
||||
|
||||
__all__ = [
|
||||
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
|
||||
]
|
||||
__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
|
||||
|
||||
|
||||
_Version = collections.namedtuple(
|
||||
"_Version",
|
||||
["epoch", "release", "dev", "pre", "post", "local"],
|
||||
"_Version", ["epoch", "release", "dev", "pre", "post", "local"]
|
||||
)
|
||||
|
||||
|
||||
|
@ -40,7 +37,6 @@ class InvalidVersion(ValueError):
|
|||
|
||||
|
||||
class _BaseVersion(object):
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._key)
|
||||
|
||||
|
@ -70,7 +66,6 @@ class _BaseVersion(object):
|
|||
|
||||
|
||||
class LegacyVersion(_BaseVersion):
|
||||
|
||||
def __init__(self, version):
|
||||
self._version = str(version)
|
||||
self._key = _legacy_cmpkey(self._version)
|
||||
|
@ -89,6 +84,26 @@ class LegacyVersion(_BaseVersion):
|
|||
def base_version(self):
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def epoch(self):
|
||||
return -1
|
||||
|
||||
@property
|
||||
def release(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def pre(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def post(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def dev(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
return None
|
||||
|
@ -101,13 +116,19 @@ class LegacyVersion(_BaseVersion):
|
|||
def is_postrelease(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_devrelease(self):
|
||||
return False
|
||||
|
||||
_legacy_version_component_re = re.compile(
|
||||
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
|
||||
)
|
||||
|
||||
_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
|
||||
|
||||
_legacy_version_replacement_map = {
|
||||
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
|
||||
"pre": "c",
|
||||
"preview": "c",
|
||||
"-": "final-",
|
||||
"rc": "c",
|
||||
"dev": "@",
|
||||
}
|
||||
|
||||
|
||||
|
@ -154,6 +175,7 @@ def _legacy_cmpkey(version):
|
|||
|
||||
return epoch, parts
|
||||
|
||||
|
||||
# Deliberately not anchored to the start and end of the string, to make it
|
||||
# easier for 3rd party code to reuse
|
||||
VERSION_PATTERN = r"""
|
||||
|
@ -190,10 +212,7 @@ VERSION_PATTERN = r"""
|
|||
|
||||
class Version(_BaseVersion):
|
||||
|
||||
_regex = re.compile(
|
||||
r"^\s*" + VERSION_PATTERN + r"\s*$",
|
||||
re.VERBOSE | re.IGNORECASE,
|
||||
)
|
||||
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
def __init__(self, version):
|
||||
# Validate the version and parse it into pieces
|
||||
|
@ -205,18 +224,11 @@ class Version(_BaseVersion):
|
|||
self._version = _Version(
|
||||
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
||||
release=tuple(int(i) for i in match.group("release").split(".")),
|
||||
pre=_parse_letter_version(
|
||||
match.group("pre_l"),
|
||||
match.group("pre_n"),
|
||||
),
|
||||
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
||||
post=_parse_letter_version(
|
||||
match.group("post_l"),
|
||||
match.group("post_n1") or match.group("post_n2"),
|
||||
),
|
||||
dev=_parse_letter_version(
|
||||
match.group("dev_l"),
|
||||
match.group("dev_n"),
|
||||
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
||||
),
|
||||
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
||||
local=_parse_local_version(match.group("local")),
|
||||
)
|
||||
|
||||
|
@ -237,32 +249,57 @@ class Version(_BaseVersion):
|
|||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self._version.epoch != 0:
|
||||
parts.append("{0}!".format(self._version.epoch))
|
||||
if self.epoch != 0:
|
||||
parts.append("{0}!".format(self.epoch))
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self._version.release))
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
# Pre-release
|
||||
if self._version.pre is not None:
|
||||
parts.append("".join(str(x) for x in self._version.pre))
|
||||
if self.pre is not None:
|
||||
parts.append("".join(str(x) for x in self.pre))
|
||||
|
||||
# Post-release
|
||||
if self._version.post is not None:
|
||||
parts.append(".post{0}".format(self._version.post[1]))
|
||||
if self.post is not None:
|
||||
parts.append(".post{0}".format(self.post))
|
||||
|
||||
# Development release
|
||||
if self._version.dev is not None:
|
||||
parts.append(".dev{0}".format(self._version.dev[1]))
|
||||
if self.dev is not None:
|
||||
parts.append(".dev{0}".format(self.dev))
|
||||
|
||||
# Local version segment
|
||||
if self._version.local is not None:
|
||||
parts.append(
|
||||
"+{0}".format(".".join(str(x) for x in self._version.local))
|
||||
)
|
||||
if self.local is not None:
|
||||
parts.append("+{0}".format(self.local))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def epoch(self):
|
||||
return self._version.epoch
|
||||
|
||||
@property
|
||||
def release(self):
|
||||
return self._version.release
|
||||
|
||||
@property
|
||||
def pre(self):
|
||||
return self._version.pre
|
||||
|
||||
@property
|
||||
def post(self):
|
||||
return self._version.post[1] if self._version.post else None
|
||||
|
||||
@property
|
||||
def dev(self):
|
||||
return self._version.dev[1] if self._version.dev else None
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
if self._version.local:
|
||||
return ".".join(str(x) for x in self._version.local)
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
return str(self).split("+", 1)[0]
|
||||
|
@ -272,27 +309,25 @@ class Version(_BaseVersion):
|
|||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self._version.epoch != 0:
|
||||
parts.append("{0}!".format(self._version.epoch))
|
||||
if self.epoch != 0:
|
||||
parts.append("{0}!".format(self.epoch))
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self._version.release))
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
version_string = str(self)
|
||||
if "+" in version_string:
|
||||
return version_string.split("+", 1)[1]
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
return bool(self._version.dev or self._version.pre)
|
||||
return self.dev is not None or self.pre is not None
|
||||
|
||||
@property
|
||||
def is_postrelease(self):
|
||||
return bool(self._version.post)
|
||||
return self.post is not None
|
||||
|
||||
@property
|
||||
def is_devrelease(self):
|
||||
return self.dev is not None
|
||||
|
||||
|
||||
def _parse_letter_version(letter, number):
|
||||
|
@ -326,7 +361,7 @@ def _parse_letter_version(letter, number):
|
|||
return letter, int(number)
|
||||
|
||||
|
||||
_local_version_seperators = re.compile(r"[\._-]")
|
||||
_local_version_separators = re.compile(r"[\._-]")
|
||||
|
||||
|
||||
def _parse_local_version(local):
|
||||
|
@ -336,7 +371,7 @@ def _parse_local_version(local):
|
|||
if local is not None:
|
||||
return tuple(
|
||||
part.lower() if not part.isdigit() else int(part)
|
||||
for part in _local_version_seperators.split(local)
|
||||
for part in _local_version_separators.split(local)
|
||||
)
|
||||
|
||||
|
||||
|
@ -347,12 +382,7 @@ def _cmpkey(epoch, release, pre, post, dev, local):
|
|||
# re-reverse it back into the correct order and make it a tuple and use
|
||||
# that for our sorting key.
|
||||
release = tuple(
|
||||
reversed(list(
|
||||
itertools.dropwhile(
|
||||
lambda x: x == 0,
|
||||
reversed(release),
|
||||
)
|
||||
))
|
||||
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
||||
)
|
||||
|
||||
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
||||
|
@ -385,9 +415,6 @@ def _cmpkey(epoch, release, pre, post, dev, local):
|
|||
# - Numeric segments sort numerically
|
||||
# - Shorter versions sort before longer versions when the prefixes
|
||||
# match exactly
|
||||
local = tuple(
|
||||
(i, "") if isinstance(i, int) else (-Infinity, i)
|
||||
for i in local
|
||||
)
|
||||
local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local)
|
||||
|
||||
return epoch, release, pre, post, dev, local
|
||||
|
|
|
@ -232,6 +232,12 @@ class _BuildMetaLegacyBackend(_BuildMetaBackend):
|
|||
if script_dir not in sys.path:
|
||||
sys.path.insert(0, script_dir)
|
||||
|
||||
# Some setup.py scripts (e.g. in pygame and numpy) use sys.argv[0] to
|
||||
# get the directory of the source code. They expect it to refer to the
|
||||
# setup.py script.
|
||||
sys_argv_0 = sys.argv[0]
|
||||
sys.argv[0] = setup_script
|
||||
|
||||
try:
|
||||
super(_BuildMetaLegacyBackend,
|
||||
self).run_setup(setup_script=setup_script)
|
||||
|
@ -242,6 +248,7 @@ class _BuildMetaLegacyBackend(_BuildMetaBackend):
|
|||
# the original path so that the path manipulation does not persist
|
||||
# within the hook after run_setup is called.
|
||||
sys.path[:] = sys_path
|
||||
sys.argv[0] = sys_argv_0
|
||||
|
||||
# The primary backend
|
||||
_BACKEND = _BuildMetaBackend()
|
||||
|
|
|
@ -2,8 +2,7 @@ __all__ = [
|
|||
'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
|
||||
'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
|
||||
'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts',
|
||||
'register', 'bdist_wininst', 'upload_docs', 'upload', 'build_clib',
|
||||
'dist_info',
|
||||
'bdist_wininst', 'upload_docs', 'build_clib', 'dist_info',
|
||||
]
|
||||
|
||||
from distutils.command.bdist import bdist
|
||||
|
|
|
@ -284,7 +284,7 @@ class bdist_egg(Command):
|
|||
"or refer to a module" % (ep,)
|
||||
)
|
||||
|
||||
pyver = sys.version[:3]
|
||||
pyver = '{}.{}'.format(*sys.version_info)
|
||||
pkg = ep.module_name
|
||||
full = '.'.join(ep.attrs)
|
||||
base = ep.attrs[0]
|
||||
|
|
|
@ -113,7 +113,7 @@ class build_ext(_build_ext):
|
|||
if fullname in self.ext_map:
|
||||
ext = self.ext_map[fullname]
|
||||
use_abi3 = (
|
||||
six.PY3
|
||||
not six.PY2
|
||||
and getattr(ext, 'py_limited_api')
|
||||
and get_abi3_suffix()
|
||||
)
|
||||
|
|
|
@ -108,7 +108,7 @@ class develop(namespaces.DevelopInstaller, easy_install):
|
|||
return path_to_setup
|
||||
|
||||
def install_for_development(self):
|
||||
if six.PY3 and getattr(self.distribution, 'use_2to3', False):
|
||||
if not six.PY2 and getattr(self.distribution, 'use_2to3', False):
|
||||
# If we run 2to3 we can not do this inplace:
|
||||
|
||||
# Ensure metadata is up-to-date
|
||||
|
|
|
@ -241,7 +241,7 @@ class easy_install(Command):
|
|||
"""
|
||||
Render the Setuptools version and installation details, then exit.
|
||||
"""
|
||||
ver = sys.version[:3]
|
||||
ver = '{}.{}'.format(*sys.version_info)
|
||||
dist = get_distribution('setuptools')
|
||||
tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'
|
||||
print(tmpl.format(**locals()))
|
||||
|
@ -410,7 +410,13 @@ class easy_install(Command):
|
|||
]
|
||||
self._expand_attrs(dirs)
|
||||
|
||||
def run(self):
|
||||
def run(self, show_deprecation=True):
|
||||
if show_deprecation:
|
||||
self.announce(
|
||||
"WARNING: The easy_install command is deprecated "
|
||||
"and will be removed in a future version."
|
||||
, log.WARN,
|
||||
)
|
||||
if self.verbose != self.distribution.verbose:
|
||||
log.set_verbosity(self.verbose)
|
||||
try:
|
||||
|
@ -1180,8 +1186,7 @@ class easy_install(Command):
|
|||
# to the setup.cfg file.
|
||||
ei_opts = self.distribution.get_option_dict('easy_install').copy()
|
||||
fetch_directives = (
|
||||
'find_links', 'site_dirs', 'index_url', 'optimize',
|
||||
'site_dirs', 'allow_hosts',
|
||||
'find_links', 'site_dirs', 'index_url', 'optimize', 'allow_hosts',
|
||||
)
|
||||
fetch_options = {}
|
||||
for key, val in ei_opts.items():
|
||||
|
@ -1412,7 +1417,7 @@ def get_site_dirs():
|
|||
os.path.join(
|
||||
prefix,
|
||||
"lib",
|
||||
"python" + sys.version[:3],
|
||||
"python{}.{}".format(*sys.version_info),
|
||||
"site-packages",
|
||||
),
|
||||
os.path.join(prefix, "lib", "site-python"),
|
||||
|
@ -1433,7 +1438,7 @@ def get_site_dirs():
|
|||
home,
|
||||
'Library',
|
||||
'Python',
|
||||
sys.version[:3],
|
||||
'{}.{}'.format(*sys.version_info),
|
||||
'site-packages',
|
||||
)
|
||||
sitedirs.append(home_sp)
|
||||
|
@ -1562,7 +1567,7 @@ def get_exe_prefixes(exe_filename):
|
|||
continue
|
||||
if parts[0].upper() in ('PURELIB', 'PLATLIB'):
|
||||
contents = z.read(name)
|
||||
if six.PY3:
|
||||
if not six.PY2:
|
||||
contents = contents.decode()
|
||||
for pth in yield_lines(contents):
|
||||
pth = pth.strip().replace('\\', '/')
|
||||
|
|
|
@ -266,7 +266,7 @@ class egg_info(InfoCommon, Command):
|
|||
to the file.
|
||||
"""
|
||||
log.info("writing %s to %s", what, filename)
|
||||
if six.PY3:
|
||||
if not six.PY2:
|
||||
data = data.encode("utf-8")
|
||||
if not self.dry_run:
|
||||
f = open(filename, 'wb')
|
||||
|
|
|
@ -114,7 +114,7 @@ class install(orig.install):
|
|||
args.insert(0, setuptools.bootstrap_install_from)
|
||||
|
||||
cmd.args = args
|
||||
cmd.run()
|
||||
cmd.run(show_deprecation=False)
|
||||
setuptools.bootstrap_install_from = None
|
||||
|
||||
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
from distutils import log
|
||||
import distutils.command.register as orig
|
||||
|
||||
from setuptools.errors import RemovedCommandError
|
||||
|
||||
|
||||
class register(orig.register):
|
||||
__doc__ = orig.register.__doc__
|
||||
"""Formerly used to register packages on PyPI."""
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
# Make sure that we are using valid current name/version info
|
||||
self.run_command('egg_info')
|
||||
orig.register.run(self)
|
||||
finally:
|
||||
self.announce(
|
||||
"WARNING: Registering is deprecated, use twine to "
|
||||
"upload instead (https://pypi.org/p/twine/)",
|
||||
log.WARN
|
||||
)
|
||||
msg = (
|
||||
"The register command has been removed, use twine to upload "
|
||||
+ "instead (https://pypi.org/p/twine)"
|
||||
)
|
||||
|
||||
self.announce("ERROR: " + msg, log.ERROR)
|
||||
|
||||
raise RemovedCommandError(msg)
|
||||
|
|
|
@ -5,7 +5,7 @@ import sys
|
|||
import io
|
||||
import contextlib
|
||||
|
||||
from setuptools.extern import six
|
||||
from setuptools.extern import six, ordered_set
|
||||
|
||||
from .py36compat import sdist_add_defaults
|
||||
|
||||
|
@ -121,19 +121,40 @@ class sdist(sdist_add_defaults, orig.sdist):
|
|||
if has_leaky_handle:
|
||||
read_template = __read_template_hack
|
||||
|
||||
def _add_defaults_optional(self):
|
||||
if six.PY2:
|
||||
sdist_add_defaults._add_defaults_optional(self)
|
||||
else:
|
||||
super()._add_defaults_optional()
|
||||
if os.path.isfile('pyproject.toml'):
|
||||
self.filelist.append('pyproject.toml')
|
||||
|
||||
def _add_defaults_python(self):
|
||||
"""getting python files"""
|
||||
if self.distribution.has_pure_modules():
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
self.filelist.extend(build_py.get_source_files())
|
||||
# This functionality is incompatible with include_package_data, and
|
||||
# will in fact create an infinite recursion if include_package_data
|
||||
# is True. Use of include_package_data will imply that
|
||||
# distutils-style automatic handling of package_data is disabled
|
||||
if not self.distribution.include_package_data:
|
||||
for _, src_dir, _, filenames in build_py.data_files:
|
||||
self.filelist.extend([os.path.join(src_dir, filename)
|
||||
for filename in filenames])
|
||||
self._add_data_files(self._safe_data_files(build_py))
|
||||
|
||||
def _safe_data_files(self, build_py):
|
||||
"""
|
||||
Extracting data_files from build_py is known to cause
|
||||
infinite recursion errors when `include_package_data`
|
||||
is enabled, so suppress it in that case.
|
||||
"""
|
||||
if self.distribution.include_package_data:
|
||||
return ()
|
||||
return build_py.data_files
|
||||
|
||||
def _add_data_files(self, data_files):
|
||||
"""
|
||||
Add data files as found in build_py.data_files.
|
||||
"""
|
||||
self.filelist.extend(
|
||||
os.path.join(src_dir, name)
|
||||
for _, src_dir, _, filenames in data_files
|
||||
for name in filenames
|
||||
)
|
||||
|
||||
def _add_defaults_data_files(self):
|
||||
try:
|
||||
|
@ -186,7 +207,7 @@ class sdist(sdist_add_defaults, orig.sdist):
|
|||
manifest = open(self.manifest, 'rb')
|
||||
for line in manifest:
|
||||
# The manifest must contain UTF-8. See #303.
|
||||
if six.PY3:
|
||||
if not six.PY2:
|
||||
try:
|
||||
line = line.decode('UTF-8')
|
||||
except UnicodeDecodeError:
|
||||
|
@ -200,10 +221,12 @@ class sdist(sdist_add_defaults, orig.sdist):
|
|||
manifest.close()
|
||||
|
||||
def check_license(self):
|
||||
"""Checks if license_file' is configured and adds it to
|
||||
'self.filelist' if the value contains a valid path.
|
||||
"""Checks if license_file' or 'license_files' is configured and adds any
|
||||
valid paths to 'self.filelist'.
|
||||
"""
|
||||
|
||||
files = ordered_set.OrderedSet()
|
||||
|
||||
opts = self.distribution.get_option_dict('metadata')
|
||||
|
||||
# ignore the source of the value
|
||||
|
@ -211,11 +234,19 @@ class sdist(sdist_add_defaults, orig.sdist):
|
|||
|
||||
if license_file is None:
|
||||
log.debug("'license_file' option was not specified")
|
||||
return
|
||||
else:
|
||||
files.add(license_file)
|
||||
|
||||
if not os.path.exists(license_file):
|
||||
log.warn("warning: Failed to find the configured license file '%s'",
|
||||
license_file)
|
||||
return
|
||||
try:
|
||||
files.update(self.distribution.metadata.license_files)
|
||||
except TypeError:
|
||||
log.warn("warning: 'license_files' option is malformed")
|
||||
|
||||
self.filelist.append(license_file)
|
||||
for f in files:
|
||||
if not os.path.exists(f):
|
||||
log.warn(
|
||||
"warning: Failed to find the configured license file '%s'",
|
||||
f)
|
||||
files.remove(f)
|
||||
|
||||
self.filelist.extend(files)
|
||||
|
|
|
@ -74,7 +74,7 @@ class NonDataProperty:
|
|||
class test(Command):
|
||||
"""Command to run unit tests after in-place build"""
|
||||
|
||||
description = "run unit tests after in-place build"
|
||||
description = "run unit tests after in-place build (deprecated)"
|
||||
|
||||
user_options = [
|
||||
('test-module=', 'm', "Run 'test_suite' in specified module"),
|
||||
|
@ -129,7 +129,7 @@ class test(Command):
|
|||
|
||||
@contextlib.contextmanager
|
||||
def project_on_sys_path(self, include_dists=[]):
|
||||
with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False)
|
||||
with_2to3 = not six.PY2 and getattr(self.distribution, 'use_2to3', False)
|
||||
|
||||
if with_2to3:
|
||||
# If we run 2to3 we can not do this inplace:
|
||||
|
@ -214,6 +214,14 @@ class test(Command):
|
|||
return itertools.chain(ir_d, tr_d, er_d)
|
||||
|
||||
def run(self):
|
||||
self.announce(
|
||||
"WARNING: Testing via this command is deprecated and will be "
|
||||
"removed in a future version. Users looking for a generic test "
|
||||
"entry point independent of test runner are encouraged to use "
|
||||
"tox.",
|
||||
log.WARN,
|
||||
)
|
||||
|
||||
installed_dists = self.install_dists(self.distribution)
|
||||
|
||||
cmd = ' '.join(self._argv)
|
||||
|
@ -232,7 +240,7 @@ class test(Command):
|
|||
# Purge modules under test from sys.modules. The test loader will
|
||||
# re-import them from the build location. Required when 2to3 is used
|
||||
# with namespace packages.
|
||||
if six.PY3 and getattr(self.distribution, 'use_2to3', False):
|
||||
if not six.PY2 and getattr(self.distribution, 'use_2to3', False):
|
||||
module = self.test_suite.split('.')[0]
|
||||
if module in _namespace_packages:
|
||||
del_modules = []
|
||||
|
|
|
@ -1,196 +1,17 @@
|
|||
import io
|
||||
import os
|
||||
import hashlib
|
||||
import getpass
|
||||
|
||||
from base64 import standard_b64encode
|
||||
|
||||
from distutils import log
|
||||
from distutils.command import upload as orig
|
||||
from distutils.spawn import spawn
|
||||
|
||||
from distutils.errors import DistutilsError
|
||||
|
||||
from setuptools.extern.six.moves.urllib.request import urlopen, Request
|
||||
from setuptools.extern.six.moves.urllib.error import HTTPError
|
||||
from setuptools.extern.six.moves.urllib.parse import urlparse
|
||||
from setuptools.errors import RemovedCommandError
|
||||
|
||||
|
||||
class upload(orig.upload):
|
||||
"""
|
||||
Override default upload behavior to obtain password
|
||||
in a variety of different ways.
|
||||
"""
|
||||
"""Formerly used to upload packages to PyPI."""
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
orig.upload.run(self)
|
||||
finally:
|
||||
self.announce(
|
||||
"WARNING: Uploading via this command is deprecated, use twine "
|
||||
"to upload instead (https://pypi.org/p/twine/)",
|
||||
log.WARN
|
||||
)
|
||||
|
||||
def finalize_options(self):
|
||||
orig.upload.finalize_options(self)
|
||||
self.username = (
|
||||
self.username or
|
||||
getpass.getuser()
|
||||
)
|
||||
# Attempt to obtain password. Short circuit evaluation at the first
|
||||
# sign of success.
|
||||
self.password = (
|
||||
self.password or
|
||||
self._load_password_from_keyring() or
|
||||
self._prompt_for_password()
|
||||
msg = (
|
||||
"The upload command has been removed, use twine to upload "
|
||||
+ "instead (https://pypi.org/p/twine)"
|
||||
)
|
||||
|
||||
def upload_file(self, command, pyversion, filename):
|
||||
# Makes sure the repository URL is compliant
|
||||
schema, netloc, url, params, query, fragments = \
|
||||
urlparse(self.repository)
|
||||
if params or query or fragments:
|
||||
raise AssertionError("Incompatible url %s" % self.repository)
|
||||
|
||||
if schema not in ('http', 'https'):
|
||||
raise AssertionError("unsupported schema " + schema)
|
||||
|
||||
# Sign if requested
|
||||
if self.sign:
|
||||
gpg_args = ["gpg", "--detach-sign", "-a", filename]
|
||||
if self.identity:
|
||||
gpg_args[2:2] = ["--local-user", self.identity]
|
||||
spawn(gpg_args,
|
||||
dry_run=self.dry_run)
|
||||
|
||||
# Fill in the data - send all the meta-data in case we need to
|
||||
# register a new release
|
||||
with open(filename, 'rb') as f:
|
||||
content = f.read()
|
||||
|
||||
meta = self.distribution.metadata
|
||||
|
||||
data = {
|
||||
# action
|
||||
':action': 'file_upload',
|
||||
'protocol_version': '1',
|
||||
|
||||
# identify release
|
||||
'name': meta.get_name(),
|
||||
'version': meta.get_version(),
|
||||
|
||||
# file content
|
||||
'content': (os.path.basename(filename), content),
|
||||
'filetype': command,
|
||||
'pyversion': pyversion,
|
||||
'md5_digest': hashlib.md5(content).hexdigest(),
|
||||
|
||||
# additional meta-data
|
||||
'metadata_version': str(meta.get_metadata_version()),
|
||||
'summary': meta.get_description(),
|
||||
'home_page': meta.get_url(),
|
||||
'author': meta.get_contact(),
|
||||
'author_email': meta.get_contact_email(),
|
||||
'license': meta.get_licence(),
|
||||
'description': meta.get_long_description(),
|
||||
'keywords': meta.get_keywords(),
|
||||
'platform': meta.get_platforms(),
|
||||
'classifiers': meta.get_classifiers(),
|
||||
'download_url': meta.get_download_url(),
|
||||
# PEP 314
|
||||
'provides': meta.get_provides(),
|
||||
'requires': meta.get_requires(),
|
||||
'obsoletes': meta.get_obsoletes(),
|
||||
}
|
||||
|
||||
data['comment'] = ''
|
||||
|
||||
if self.sign:
|
||||
data['gpg_signature'] = (os.path.basename(filename) + ".asc",
|
||||
open(filename+".asc", "rb").read())
|
||||
|
||||
# set up the authentication
|
||||
user_pass = (self.username + ":" + self.password).encode('ascii')
|
||||
# The exact encoding of the authentication string is debated.
|
||||
# Anyway PyPI only accepts ascii for both username or password.
|
||||
auth = "Basic " + standard_b64encode(user_pass).decode('ascii')
|
||||
|
||||
# Build up the MIME payload for the POST data
|
||||
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
||||
sep_boundary = b'\r\n--' + boundary.encode('ascii')
|
||||
end_boundary = sep_boundary + b'--\r\n'
|
||||
body = io.BytesIO()
|
||||
for key, value in data.items():
|
||||
title = '\r\nContent-Disposition: form-data; name="%s"' % key
|
||||
# handle multiple entries for the same name
|
||||
if not isinstance(value, list):
|
||||
value = [value]
|
||||
for value in value:
|
||||
if type(value) is tuple:
|
||||
title += '; filename="%s"' % value[0]
|
||||
value = value[1]
|
||||
else:
|
||||
value = str(value).encode('utf-8')
|
||||
body.write(sep_boundary)
|
||||
body.write(title.encode('utf-8'))
|
||||
body.write(b"\r\n\r\n")
|
||||
body.write(value)
|
||||
body.write(end_boundary)
|
||||
body = body.getvalue()
|
||||
|
||||
msg = "Submitting %s to %s" % (filename, self.repository)
|
||||
self.announce(msg, log.INFO)
|
||||
|
||||
# build the Request
|
||||
headers = {
|
||||
'Content-type': 'multipart/form-data; boundary=%s' % boundary,
|
||||
'Content-length': str(len(body)),
|
||||
'Authorization': auth,
|
||||
}
|
||||
|
||||
request = Request(self.repository, data=body,
|
||||
headers=headers)
|
||||
# send the data
|
||||
try:
|
||||
result = urlopen(request)
|
||||
status = result.getcode()
|
||||
reason = result.msg
|
||||
except HTTPError as e:
|
||||
status = e.code
|
||||
reason = e.msg
|
||||
except OSError as e:
|
||||
self.announce(str(e), log.ERROR)
|
||||
raise
|
||||
|
||||
if status == 200:
|
||||
self.announce('Server response (%s): %s' % (status, reason),
|
||||
log.INFO)
|
||||
if self.show_response:
|
||||
text = getattr(self, '_read_pypi_response',
|
||||
lambda x: None)(result)
|
||||
if text is not None:
|
||||
msg = '\n'.join(('-' * 75, text, '-' * 75))
|
||||
self.announce(msg, log.INFO)
|
||||
else:
|
||||
msg = 'Upload failed (%s): %s' % (status, reason)
|
||||
self.announce(msg, log.ERROR)
|
||||
raise DistutilsError(msg)
|
||||
|
||||
def _load_password_from_keyring(self):
|
||||
"""
|
||||
Attempt to load password from keyring. Suppress Exceptions.
|
||||
"""
|
||||
try:
|
||||
keyring = __import__('keyring')
|
||||
return keyring.get_password(self.repository, self.username)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _prompt_for_password(self):
|
||||
"""
|
||||
Prompt for a password on the tty. Suppress Exceptions.
|
||||
"""
|
||||
try:
|
||||
return getpass.getpass()
|
||||
except (Exception, KeyboardInterrupt):
|
||||
pass
|
||||
self.announce("ERROR: " + msg, log.ERROR)
|
||||
raise RemovedCommandError(msg)
|
||||
|
|
|
@ -24,7 +24,7 @@ from .upload import upload
|
|||
|
||||
|
||||
def _encode(s):
|
||||
errors = 'surrogateescape' if six.PY3 else 'strict'
|
||||
errors = 'strict' if six.PY2 else 'surrogateescape'
|
||||
return s.encode('utf-8', errors)
|
||||
|
||||
|
||||
|
@ -153,7 +153,7 @@ class upload_docs(upload):
|
|||
# set up the authentication
|
||||
credentials = _encode(self.username + ':' + self.password)
|
||||
credentials = standard_b64encode(credentials)
|
||||
if six.PY3:
|
||||
if not six.PY2:
|
||||
credentials = credentials.decode('ascii')
|
||||
auth = "Basic " + credentials
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ from importlib import import_module
|
|||
|
||||
from distutils.errors import DistutilsOptionError, DistutilsFileError
|
||||
from setuptools.extern.packaging.version import LegacyVersion, parse
|
||||
from setuptools.extern.packaging.specifiers import SpecifierSet
|
||||
from setuptools.extern.six import string_types, PY3
|
||||
|
||||
|
||||
|
@ -482,6 +483,7 @@ class ConfigMetadataHandler(ConfigHandler):
|
|||
'obsoletes': parse_list,
|
||||
'classifiers': self._get_parser_compound(parse_file, parse_list),
|
||||
'license': exclude_files_parser('license'),
|
||||
'license_files': parse_list,
|
||||
'description': parse_file,
|
||||
'long_description': parse_file,
|
||||
'version': self._parse_version,
|
||||
|
@ -554,6 +556,7 @@ class ConfigOptionsHandler(ConfigHandler):
|
|||
'packages': self._parse_packages,
|
||||
'entry_points': self._parse_file,
|
||||
'py_modules': parse_list,
|
||||
'python_requires': SpecifierSet,
|
||||
}
|
||||
|
||||
def _parse_packages(self, value):
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
import sys
|
||||
import imp
|
||||
import marshal
|
||||
import contextlib
|
||||
from distutils.version import StrictVersion
|
||||
from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
|
||||
|
||||
from .py33compat import Bytecode
|
||||
|
||||
from .py27compat import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE
|
||||
from . import py27compat
|
||||
|
||||
|
||||
__all__ = [
|
||||
'Require', 'find_module', 'get_module_constant', 'extract_constant'
|
||||
|
@ -15,7 +17,8 @@ __all__ = [
|
|||
class Require:
|
||||
"""A prerequisite to building or installing a distribution"""
|
||||
|
||||
def __init__(self, name, requested_version, module, homepage='',
|
||||
def __init__(
|
||||
self, name, requested_version, module, homepage='',
|
||||
attribute=None, format=None):
|
||||
|
||||
if format is None and requested_version is not None:
|
||||
|
@ -79,23 +82,15 @@ class Require:
|
|||
return self.version_ok(version)
|
||||
|
||||
|
||||
def find_module(module, paths=None):
|
||||
"""Just like 'imp.find_module()', but with package support"""
|
||||
def maybe_close(f):
|
||||
@contextlib.contextmanager
|
||||
def empty():
|
||||
yield
|
||||
return
|
||||
if not f:
|
||||
return empty()
|
||||
|
||||
parts = module.split('.')
|
||||
|
||||
while parts:
|
||||
part = parts.pop(0)
|
||||
f, path, (suffix, mode, kind) = info = imp.find_module(part, paths)
|
||||
|
||||
if kind == PKG_DIRECTORY:
|
||||
parts = parts or ['__init__']
|
||||
paths = [path]
|
||||
|
||||
elif parts:
|
||||
raise ImportError("Can't find %r in %s" % (parts, module))
|
||||
|
||||
return info
|
||||
return contextlib.closing(f)
|
||||
|
||||
|
||||
def get_module_constant(module, symbol, default=-1, paths=None):
|
||||
|
@ -106,28 +101,23 @@ def get_module_constant(module, symbol, default=-1, paths=None):
|
|||
constant. Otherwise, return 'default'."""
|
||||
|
||||
try:
|
||||
f, path, (suffix, mode, kind) = find_module(module, paths)
|
||||
f, path, (suffix, mode, kind) = info = find_module(module, paths)
|
||||
except ImportError:
|
||||
# Module doesn't exist
|
||||
return None
|
||||
|
||||
try:
|
||||
with maybe_close(f):
|
||||
if kind == PY_COMPILED:
|
||||
f.read(8) # skip magic & date
|
||||
code = marshal.load(f)
|
||||
elif kind == PY_FROZEN:
|
||||
code = imp.get_frozen_object(module)
|
||||
code = py27compat.get_frozen_object(module, paths)
|
||||
elif kind == PY_SOURCE:
|
||||
code = compile(f.read(), path, 'exec')
|
||||
else:
|
||||
# Not something we can parse; we'll have to import it. :(
|
||||
if module not in sys.modules:
|
||||
imp.load_module(module, f, path, (suffix, mode, kind))
|
||||
return getattr(sys.modules[module], symbol, None)
|
||||
|
||||
finally:
|
||||
if f:
|
||||
f.close()
|
||||
imported = py27compat.get_module(module, paths, info)
|
||||
return getattr(imported, symbol, None)
|
||||
|
||||
return extract_constant(code, symbol, default)
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ from distutils.version import StrictVersion
|
|||
|
||||
from setuptools.extern import six
|
||||
from setuptools.extern import packaging
|
||||
from setuptools.extern import ordered_set
|
||||
from setuptools.extern.six.moves import map, filter, filterfalse
|
||||
|
||||
from . import SetuptoolsDeprecationWarning
|
||||
|
@ -407,7 +408,8 @@ class Distribution(_Distribution):
|
|||
_DISTUTILS_UNSUPPORTED_METADATA = {
|
||||
'long_description_content_type': None,
|
||||
'project_urls': dict,
|
||||
'provides_extras': set,
|
||||
'provides_extras': ordered_set.OrderedSet,
|
||||
'license_files': ordered_set.OrderedSet,
|
||||
}
|
||||
|
||||
_patched_dist = None
|
||||
|
@ -569,7 +571,7 @@ class Distribution(_Distribution):
|
|||
from setuptools.extern.six.moves.configparser import ConfigParser
|
||||
|
||||
# Ignore install directory options if we have a venv
|
||||
if six.PY3 and sys.prefix != sys.base_prefix:
|
||||
if not six.PY2 and sys.prefix != sys.base_prefix:
|
||||
ignore_options = [
|
||||
'install-base', 'install-platbase', 'install-lib',
|
||||
'install-platlib', 'install-purelib', 'install-headers',
|
||||
|
@ -591,7 +593,7 @@ class Distribution(_Distribution):
|
|||
with io.open(filename, encoding='utf-8') as reader:
|
||||
if DEBUG:
|
||||
self.announce(" reading {filename}".format(**locals()))
|
||||
(parser.read_file if six.PY3 else parser.readfp)(reader)
|
||||
(parser.readfp if six.PY2 else parser.read_file)(reader)
|
||||
for section in parser.sections():
|
||||
options = parser.options(section)
|
||||
opt_dict = self.get_option_dict(section)
|
||||
|
@ -634,7 +636,7 @@ class Distribution(_Distribution):
|
|||
|
||||
Ref #1653
|
||||
"""
|
||||
if six.PY3:
|
||||
if not six.PY2:
|
||||
return val
|
||||
try:
|
||||
return val.encode()
|
||||
|
@ -723,15 +725,28 @@ class Distribution(_Distribution):
|
|||
return resolved_dists
|
||||
|
||||
def finalize_options(self):
|
||||
_Distribution.finalize_options(self)
|
||||
if self.features:
|
||||
self._set_global_opts_from_features()
|
||||
"""
|
||||
Allow plugins to apply arbitrary operations to the
|
||||
distribution. Each hook may optionally define a 'order'
|
||||
to influence the order of execution. Smaller numbers
|
||||
go first and the default is 0.
|
||||
"""
|
||||
group = 'setuptools.finalize_distribution_options'
|
||||
|
||||
def by_order(hook):
|
||||
return getattr(hook, 'order', 0)
|
||||
eps = map(lambda e: e.load(), pkg_resources.iter_entry_points(group))
|
||||
for ep in sorted(eps, key=by_order):
|
||||
ep(self)
|
||||
|
||||
def _finalize_setup_keywords(self):
|
||||
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
|
||||
value = getattr(self, ep.name, None)
|
||||
if value is not None:
|
||||
ep.require(installer=self.fetch_build_egg)
|
||||
ep.load()(self, ep.name, value)
|
||||
|
||||
def _finalize_2to3_doctests(self):
|
||||
if getattr(self, 'convert_2to3_doctests', None):
|
||||
# XXX may convert to set here when we can rely on set being builtin
|
||||
self.convert_2to3_doctests = [
|
||||
|
@ -758,36 +773,15 @@ class Distribution(_Distribution):
|
|||
|
||||
def fetch_build_egg(self, req):
|
||||
"""Fetch an egg needed for building"""
|
||||
from setuptools.command.easy_install import easy_install
|
||||
dist = self.__class__({'script_args': ['easy_install']})
|
||||
opts = dist.get_option_dict('easy_install')
|
||||
opts.clear()
|
||||
opts.update(
|
||||
(k, v)
|
||||
for k, v in self.get_option_dict('easy_install').items()
|
||||
if k in (
|
||||
# don't use any other settings
|
||||
'find_links', 'site_dirs', 'index_url',
|
||||
'optimize', 'site_dirs', 'allow_hosts',
|
||||
))
|
||||
if self.dependency_links:
|
||||
links = self.dependency_links[:]
|
||||
if 'find_links' in opts:
|
||||
links = opts['find_links'][1] + links
|
||||
opts['find_links'] = ('setup', links)
|
||||
install_dir = self.get_egg_cache_dir()
|
||||
cmd = easy_install(
|
||||
dist, args=["x"], install_dir=install_dir,
|
||||
exclude_scripts=True,
|
||||
always_copy=False, build_directory=None, editable=False,
|
||||
upgrade=False, multi_version=True, no_report=True, user=False
|
||||
)
|
||||
cmd.ensure_finalized()
|
||||
return cmd.easy_install(req)
|
||||
from setuptools.installer import fetch_build_egg
|
||||
return fetch_build_egg(self, req)
|
||||
|
||||
def _set_global_opts_from_features(self):
|
||||
def _finalize_feature_opts(self):
|
||||
"""Add --with-X/--without-X options based on optional features"""
|
||||
|
||||
if not self.features:
|
||||
return
|
||||
|
||||
go = []
|
||||
no = self.negative_opt.copy()
|
||||
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
"""setuptools.errors
|
||||
|
||||
Provides exceptions used by setuptools modules.
|
||||
"""
|
||||
|
||||
from distutils.errors import DistutilsError
|
||||
|
||||
|
||||
class RemovedCommandError(DistutilsError, RuntimeError):
|
||||
"""Error used for commands that have been removed in setuptools.
|
||||
|
||||
Since ``setuptools`` is built on ``distutils``, simply removing a command
|
||||
from ``setuptools`` will make the behavior fall back to ``distutils``; this
|
||||
error is raised if a command exists in ``distutils`` but has been actively
|
||||
removed in ``setuptools``.
|
||||
"""
|
|
@ -69,5 +69,5 @@ class VendorImporter:
|
|||
sys.meta_path.append(self)
|
||||
|
||||
|
||||
names = 'six', 'packaging', 'pyparsing',
|
||||
names = 'six', 'packaging', 'pyparsing', 'ordered_set',
|
||||
VendorImporter(__name__, names, 'setuptools._vendor').install()
|
||||
|
|
|
@ -1,86 +0,0 @@
|
|||
# This file originally from pip:
|
||||
# https://github.com/pypa/pip/blob/8f4f15a5a95d7d5b511ceaee9ed261176c181970/src/pip/_internal/utils/glibc.py
|
||||
from __future__ import absolute_import
|
||||
|
||||
import ctypes
|
||||
import re
|
||||
import warnings
|
||||
|
||||
|
||||
def glibc_version_string():
|
||||
"Returns glibc version string, or None if not using glibc."
|
||||
|
||||
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
||||
# manpage says, "If filename is NULL, then the returned handle is for the
|
||||
# main program". This way we can let the linker do the work to figure out
|
||||
# which libc our process is actually using.
|
||||
process_namespace = ctypes.CDLL(None)
|
||||
try:
|
||||
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
||||
except AttributeError:
|
||||
# Symbol doesn't exist -> therefore, we are not linked to
|
||||
# glibc.
|
||||
return None
|
||||
|
||||
# Call gnu_get_libc_version, which returns a string like "2.5"
|
||||
gnu_get_libc_version.restype = ctypes.c_char_p
|
||||
version_str = gnu_get_libc_version()
|
||||
# py2 / py3 compatibility:
|
||||
if not isinstance(version_str, str):
|
||||
version_str = version_str.decode("ascii")
|
||||
|
||||
return version_str
|
||||
|
||||
|
||||
# Separated out from have_compatible_glibc for easier unit testing
|
||||
def check_glibc_version(version_str, required_major, minimum_minor):
|
||||
# Parse string and check against requested version.
|
||||
#
|
||||
# We use a regexp instead of str.split because we want to discard any
|
||||
# random junk that might come after the minor version -- this might happen
|
||||
# in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
||||
# uses version strings like "2.20-2014.11"). See gh-3588.
|
||||
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
||||
if not m:
|
||||
warnings.warn("Expected glibc version with 2 components major.minor,"
|
||||
" got: %s" % version_str, RuntimeWarning)
|
||||
return False
|
||||
return (int(m.group("major")) == required_major and
|
||||
int(m.group("minor")) >= minimum_minor)
|
||||
|
||||
|
||||
def have_compatible_glibc(required_major, minimum_minor):
|
||||
version_str = glibc_version_string()
|
||||
if version_str is None:
|
||||
return False
|
||||
return check_glibc_version(version_str, required_major, minimum_minor)
|
||||
|
||||
|
||||
# platform.libc_ver regularly returns completely nonsensical glibc
|
||||
# versions. E.g. on my computer, platform says:
|
||||
#
|
||||
# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
|
||||
# ('glibc', '2.7')
|
||||
# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
|
||||
# ('glibc', '2.9')
|
||||
#
|
||||
# But the truth is:
|
||||
#
|
||||
# ~$ ldd --version
|
||||
# ldd (Debian GLIBC 2.22-11) 2.22
|
||||
#
|
||||
# This is unfortunate, because it means that the linehaul data on libc
|
||||
# versions that was generated by pip 8.1.2 and earlier is useless and
|
||||
# misleading. Solution: instead of using platform, use our code that actually
|
||||
# works.
|
||||
def libc_ver():
|
||||
"""Try to determine the glibc version
|
||||
|
||||
Returns a tuple of strings (lib, version) which default to empty strings
|
||||
in case the lookup fails.
|
||||
"""
|
||||
glibc_version = glibc_version_string()
|
||||
if glibc_version is None:
|
||||
return ("", "")
|
||||
else:
|
||||
return ("glibc", glibc_version)
|
|
@ -0,0 +1,150 @@
|
|||
import glob
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsError
|
||||
|
||||
import pkg_resources
|
||||
from setuptools.command.easy_install import easy_install
|
||||
from setuptools.extern import six
|
||||
from setuptools.wheel import Wheel
|
||||
|
||||
from .py31compat import TemporaryDirectory
|
||||
|
||||
|
||||
def _fixup_find_links(find_links):
|
||||
"""Ensure find-links option end-up being a list of strings."""
|
||||
if isinstance(find_links, six.string_types):
|
||||
return find_links.split()
|
||||
assert isinstance(find_links, (tuple, list))
|
||||
return find_links
|
||||
|
||||
|
||||
def _legacy_fetch_build_egg(dist, req):
|
||||
"""Fetch an egg needed for building.
|
||||
|
||||
Legacy path using EasyInstall.
|
||||
"""
|
||||
tmp_dist = dist.__class__({'script_args': ['easy_install']})
|
||||
opts = tmp_dist.get_option_dict('easy_install')
|
||||
opts.clear()
|
||||
opts.update(
|
||||
(k, v)
|
||||
for k, v in dist.get_option_dict('easy_install').items()
|
||||
if k in (
|
||||
# don't use any other settings
|
||||
'find_links', 'site_dirs', 'index_url',
|
||||
'optimize', 'site_dirs', 'allow_hosts',
|
||||
))
|
||||
if dist.dependency_links:
|
||||
links = dist.dependency_links[:]
|
||||
if 'find_links' in opts:
|
||||
links = _fixup_find_links(opts['find_links'][1]) + links
|
||||
opts['find_links'] = ('setup', links)
|
||||
install_dir = dist.get_egg_cache_dir()
|
||||
cmd = easy_install(
|
||||
tmp_dist, args=["x"], install_dir=install_dir,
|
||||
exclude_scripts=True,
|
||||
always_copy=False, build_directory=None, editable=False,
|
||||
upgrade=False, multi_version=True, no_report=True, user=False
|
||||
)
|
||||
cmd.ensure_finalized()
|
||||
return cmd.easy_install(req)
|
||||
|
||||
|
||||
def fetch_build_egg(dist, req):
|
||||
"""Fetch an egg needed for building.
|
||||
|
||||
Use pip/wheel to fetch/build a wheel."""
|
||||
# Check pip is available.
|
||||
try:
|
||||
pkg_resources.get_distribution('pip')
|
||||
except pkg_resources.DistributionNotFound:
|
||||
dist.announce(
|
||||
'WARNING: The pip package is not available, falling back '
|
||||
'to EasyInstall for handling setup_requires/test_requires; '
|
||||
'this is deprecated and will be removed in a future version.'
|
||||
, log.WARN
|
||||
)
|
||||
return _legacy_fetch_build_egg(dist, req)
|
||||
# Warn if wheel is not.
|
||||
try:
|
||||
pkg_resources.get_distribution('wheel')
|
||||
except pkg_resources.DistributionNotFound:
|
||||
dist.announce('WARNING: The wheel package is not available.', log.WARN)
|
||||
# Ignore environment markers; if supplied, it is required.
|
||||
req = strip_marker(req)
|
||||
# Take easy_install options into account, but do not override relevant
|
||||
# pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll
|
||||
# take precedence.
|
||||
opts = dist.get_option_dict('easy_install')
|
||||
if 'allow_hosts' in opts:
|
||||
raise DistutilsError('the `allow-hosts` option is not supported '
|
||||
'when using pip to install requirements.')
|
||||
if 'PIP_QUIET' in os.environ or 'PIP_VERBOSE' in os.environ:
|
||||
quiet = False
|
||||
else:
|
||||
quiet = True
|
||||
if 'PIP_INDEX_URL' in os.environ:
|
||||
index_url = None
|
||||
elif 'index_url' in opts:
|
||||
index_url = opts['index_url'][1]
|
||||
else:
|
||||
index_url = None
|
||||
if 'find_links' in opts:
|
||||
find_links = _fixup_find_links(opts['find_links'][1])[:]
|
||||
else:
|
||||
find_links = []
|
||||
if dist.dependency_links:
|
||||
find_links.extend(dist.dependency_links)
|
||||
eggs_dir = os.path.realpath(dist.get_egg_cache_dir())
|
||||
environment = pkg_resources.Environment()
|
||||
for egg_dist in pkg_resources.find_distributions(eggs_dir):
|
||||
if egg_dist in req and environment.can_add(egg_dist):
|
||||
return egg_dist
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
cmd = [
|
||||
sys.executable, '-m', 'pip',
|
||||
'--disable-pip-version-check',
|
||||
'wheel', '--no-deps',
|
||||
'-w', tmpdir,
|
||||
]
|
||||
if quiet:
|
||||
cmd.append('--quiet')
|
||||
if index_url is not None:
|
||||
cmd.extend(('--index-url', index_url))
|
||||
if find_links is not None:
|
||||
for link in find_links:
|
||||
cmd.extend(('--find-links', link))
|
||||
# If requirement is a PEP 508 direct URL, directly pass
|
||||
# the URL to pip, as `req @ url` does not work on the
|
||||
# command line.
|
||||
if req.url:
|
||||
cmd.append(req.url)
|
||||
else:
|
||||
cmd.append(str(req))
|
||||
try:
|
||||
subprocess.check_call(cmd)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise DistutilsError(str(e))
|
||||
wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0])
|
||||
dist_location = os.path.join(eggs_dir, wheel.egg_name())
|
||||
wheel.install_as_egg(dist_location)
|
||||
dist_metadata = pkg_resources.PathMetadata(
|
||||
dist_location, os.path.join(dist_location, 'EGG-INFO'))
|
||||
dist = pkg_resources.Distribution.from_filename(
|
||||
dist_location, metadata=dist_metadata)
|
||||
return dist
|
||||
|
||||
|
||||
def strip_marker(req):
|
||||
"""
|
||||
Return a new requirement without the environment marker to avoid
|
||||
calling pip with something like `babel; extra == "i18n"`, which
|
||||
would always be ignored.
|
||||
"""
|
||||
# create a copy to avoid mutating the input
|
||||
req = pkg_resources.Requirement.parse(str(req))
|
||||
req.marker = None
|
||||
return req
|
File diff suppressed because it is too large
Load Diff
|
@ -46,7 +46,7 @@ __all__ = [
|
|||
_SOCKET_TIMEOUT = 15
|
||||
|
||||
_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}"
|
||||
user_agent = _tmpl.format(py_major=sys.version[:3], setuptools=setuptools)
|
||||
user_agent = _tmpl.format(py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools)
|
||||
|
||||
|
||||
def parse_requirement_arg(spec):
|
||||
|
|
|
@ -1,319 +0,0 @@
|
|||
# This file originally from pip:
|
||||
# https://github.com/pypa/pip/blob/8f4f15a5a95d7d5b511ceaee9ed261176c181970/src/pip/_internal/pep425tags.py
|
||||
"""Generate and work with PEP 425 Compatibility Tags."""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import distutils.util
|
||||
from distutils import log
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
import warnings
|
||||
from collections import OrderedDict
|
||||
|
||||
from .extern import six
|
||||
|
||||
from . import glibc
|
||||
|
||||
_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
|
||||
|
||||
|
||||
def get_config_var(var):
|
||||
try:
|
||||
return sysconfig.get_config_var(var)
|
||||
except IOError as e: # Issue #1074
|
||||
warnings.warn("{}".format(e), RuntimeWarning)
|
||||
return None
|
||||
|
||||
|
||||
def get_abbr_impl():
|
||||
"""Return abbreviated implementation name."""
|
||||
if hasattr(sys, 'pypy_version_info'):
|
||||
pyimpl = 'pp'
|
||||
elif sys.platform.startswith('java'):
|
||||
pyimpl = 'jy'
|
||||
elif sys.platform == 'cli':
|
||||
pyimpl = 'ip'
|
||||
else:
|
||||
pyimpl = 'cp'
|
||||
return pyimpl
|
||||
|
||||
|
||||
def get_impl_ver():
|
||||
"""Return implementation version."""
|
||||
impl_ver = get_config_var("py_version_nodot")
|
||||
if not impl_ver or get_abbr_impl() == 'pp':
|
||||
impl_ver = ''.join(map(str, get_impl_version_info()))
|
||||
return impl_ver
|
||||
|
||||
|
||||
def get_impl_version_info():
|
||||
"""Return sys.version_info-like tuple for use in decrementing the minor
|
||||
version."""
|
||||
if get_abbr_impl() == 'pp':
|
||||
# as per https://github.com/pypa/pip/issues/2882
|
||||
return (sys.version_info[0], sys.pypy_version_info.major,
|
||||
sys.pypy_version_info.minor)
|
||||
else:
|
||||
return sys.version_info[0], sys.version_info[1]
|
||||
|
||||
|
||||
def get_impl_tag():
|
||||
"""
|
||||
Returns the Tag for this specific implementation.
|
||||
"""
|
||||
return "{}{}".format(get_abbr_impl(), get_impl_ver())
|
||||
|
||||
|
||||
def get_flag(var, fallback, expected=True, warn=True):
|
||||
"""Use a fallback method for determining SOABI flags if the needed config
|
||||
var is unset or unavailable."""
|
||||
val = get_config_var(var)
|
||||
if val is None:
|
||||
if warn:
|
||||
log.debug("Config variable '%s' is unset, Python ABI tag may "
|
||||
"be incorrect", var)
|
||||
return fallback()
|
||||
return val == expected
|
||||
|
||||
|
||||
def get_abi_tag():
|
||||
"""Return the ABI tag based on SOABI (if available) or emulate SOABI
|
||||
(CPython 2, PyPy)."""
|
||||
soabi = get_config_var('SOABI')
|
||||
impl = get_abbr_impl()
|
||||
if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
|
||||
d = ''
|
||||
m = ''
|
||||
u = ''
|
||||
if get_flag('Py_DEBUG',
|
||||
lambda: hasattr(sys, 'gettotalrefcount'),
|
||||
warn=(impl == 'cp')):
|
||||
d = 'd'
|
||||
if get_flag('WITH_PYMALLOC',
|
||||
lambda: impl == 'cp',
|
||||
warn=(impl == 'cp')):
|
||||
m = 'm'
|
||||
if get_flag('Py_UNICODE_SIZE',
|
||||
lambda: sys.maxunicode == 0x10ffff,
|
||||
expected=4,
|
||||
warn=(impl == 'cp' and
|
||||
six.PY2)) \
|
||||
and six.PY2:
|
||||
u = 'u'
|
||||
abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
|
||||
elif soabi and soabi.startswith('cpython-'):
|
||||
abi = 'cp' + soabi.split('-')[1]
|
||||
elif soabi:
|
||||
abi = soabi.replace('.', '_').replace('-', '_')
|
||||
else:
|
||||
abi = None
|
||||
return abi
|
||||
|
||||
|
||||
def _is_running_32bit():
|
||||
return sys.maxsize == 2147483647
|
||||
|
||||
|
||||
def get_platform():
|
||||
"""Return our platform name 'win32', 'linux_x86_64'"""
|
||||
if sys.platform == 'darwin':
|
||||
# distutils.util.get_platform() returns the release based on the value
|
||||
# of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
|
||||
# be significantly older than the user's current machine.
|
||||
release, _, machine = platform.mac_ver()
|
||||
split_ver = release.split('.')
|
||||
|
||||
if machine == "x86_64" and _is_running_32bit():
|
||||
machine = "i386"
|
||||
elif machine == "ppc64" and _is_running_32bit():
|
||||
machine = "ppc"
|
||||
|
||||
return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)
|
||||
|
||||
# XXX remove distutils dependency
|
||||
result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
|
||||
if result == "linux_x86_64" and _is_running_32bit():
|
||||
# 32 bit Python program (running on a 64 bit Linux): pip should only
|
||||
# install and run 32 bit compiled extensions in that case.
|
||||
result = "linux_i686"
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def is_manylinux1_compatible():
|
||||
# Only Linux, and only x86-64 / i686
|
||||
if get_platform() not in {"linux_x86_64", "linux_i686"}:
|
||||
return False
|
||||
|
||||
# Check for presence of _manylinux module
|
||||
try:
|
||||
import _manylinux
|
||||
return bool(_manylinux.manylinux1_compatible)
|
||||
except (ImportError, AttributeError):
|
||||
# Fall through to heuristic check below
|
||||
pass
|
||||
|
||||
# Check glibc version. CentOS 5 uses glibc 2.5.
|
||||
return glibc.have_compatible_glibc(2, 5)
|
||||
|
||||
|
||||
def get_darwin_arches(major, minor, machine):
|
||||
"""Return a list of supported arches (including group arches) for
|
||||
the given major, minor and machine architecture of a macOS machine.
|
||||
"""
|
||||
arches = []
|
||||
|
||||
def _supports_arch(major, minor, arch):
|
||||
# Looking at the application support for macOS versions in the chart
|
||||
# provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
|
||||
# our timeline looks roughly like:
|
||||
#
|
||||
# 10.0 - Introduces ppc support.
|
||||
# 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
|
||||
# and x86_64 support is CLI only, and cannot be used for GUI
|
||||
# applications.
|
||||
# 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
|
||||
# 10.6 - Drops support for ppc64
|
||||
# 10.7 - Drops support for ppc
|
||||
#
|
||||
# Given that we do not know if we're installing a CLI or a GUI
|
||||
# application, we must be conservative and assume it might be a GUI
|
||||
# application and behave as if ppc64 and x86_64 support did not occur
|
||||
# until 10.5.
|
||||
#
|
||||
# Note: The above information is taken from the "Application support"
|
||||
# column in the chart not the "Processor support" since I believe
|
||||
# that we care about what instruction sets an application can use
|
||||
# not which processors the OS supports.
|
||||
if arch == 'ppc':
|
||||
return (major, minor) <= (10, 5)
|
||||
if arch == 'ppc64':
|
||||
return (major, minor) == (10, 5)
|
||||
if arch == 'i386':
|
||||
return (major, minor) >= (10, 4)
|
||||
if arch == 'x86_64':
|
||||
return (major, minor) >= (10, 5)
|
||||
if arch in groups:
|
||||
for garch in groups[arch]:
|
||||
if _supports_arch(major, minor, garch):
|
||||
return True
|
||||
return False
|
||||
|
||||
groups = OrderedDict([
|
||||
("fat", ("i386", "ppc")),
|
||||
("intel", ("x86_64", "i386")),
|
||||
("fat64", ("x86_64", "ppc64")),
|
||||
("fat32", ("x86_64", "i386", "ppc")),
|
||||
])
|
||||
|
||||
if _supports_arch(major, minor, machine):
|
||||
arches.append(machine)
|
||||
|
||||
for garch in groups:
|
||||
if machine in groups[garch] and _supports_arch(major, minor, garch):
|
||||
arches.append(garch)
|
||||
|
||||
arches.append('universal')
|
||||
|
||||
return arches
|
||||
|
||||
|
||||
def get_supported(versions=None, noarch=False, platform=None,
|
||||
impl=None, abi=None):
|
||||
"""Return a list of supported tags for each version specified in
|
||||
`versions`.
|
||||
|
||||
:param versions: a list of string versions, of the form ["33", "32"],
|
||||
or None. The first version will be assumed to support our ABI.
|
||||
:param platform: specify the exact platform you want valid
|
||||
tags for, or None. If None, use the local system platform.
|
||||
:param impl: specify the exact implementation you want valid
|
||||
tags for, or None. If None, use the local interpreter impl.
|
||||
:param abi: specify the exact abi you want valid
|
||||
tags for, or None. If None, use the local interpreter abi.
|
||||
"""
|
||||
supported = []
|
||||
|
||||
# Versions must be given with respect to the preference
|
||||
if versions is None:
|
||||
versions = []
|
||||
version_info = get_impl_version_info()
|
||||
major = version_info[:-1]
|
||||
# Support all previous minor Python versions.
|
||||
for minor in range(version_info[-1], -1, -1):
|
||||
versions.append(''.join(map(str, major + (minor,))))
|
||||
|
||||
impl = impl or get_abbr_impl()
|
||||
|
||||
abis = []
|
||||
|
||||
abi = abi or get_abi_tag()
|
||||
if abi:
|
||||
abis[0:0] = [abi]
|
||||
|
||||
abi3s = set()
|
||||
import imp
|
||||
for suffix in imp.get_suffixes():
|
||||
if suffix[0].startswith('.abi'):
|
||||
abi3s.add(suffix[0].split('.', 2)[1])
|
||||
|
||||
abis.extend(sorted(list(abi3s)))
|
||||
|
||||
abis.append('none')
|
||||
|
||||
if not noarch:
|
||||
arch = platform or get_platform()
|
||||
if arch.startswith('macosx'):
|
||||
# support macosx-10.6-intel on macosx-10.9-x86_64
|
||||
match = _osx_arch_pat.match(arch)
|
||||
if match:
|
||||
name, major, minor, actual_arch = match.groups()
|
||||
tpl = '{}_{}_%i_%s'.format(name, major)
|
||||
arches = []
|
||||
for m in reversed(range(int(minor) + 1)):
|
||||
for a in get_darwin_arches(int(major), m, actual_arch):
|
||||
arches.append(tpl % (m, a))
|
||||
else:
|
||||
# arch pattern didn't match (?!)
|
||||
arches = [arch]
|
||||
elif platform is None and is_manylinux1_compatible():
|
||||
arches = [arch.replace('linux', 'manylinux1'), arch]
|
||||
else:
|
||||
arches = [arch]
|
||||
|
||||
# Current version, current API (built specifically for our Python):
|
||||
for abi in abis:
|
||||
for arch in arches:
|
||||
supported.append(('%s%s' % (impl, versions[0]), abi, arch))
|
||||
|
||||
# abi3 modules compatible with older version of Python
|
||||
for version in versions[1:]:
|
||||
# abi3 was introduced in Python 3.2
|
||||
if version in {'31', '30'}:
|
||||
break
|
||||
for abi in abi3s: # empty set if not Python 3
|
||||
for arch in arches:
|
||||
supported.append(("%s%s" % (impl, version), abi, arch))
|
||||
|
||||
# Has binaries, does not use the Python API:
|
||||
for arch in arches:
|
||||
supported.append(('py%s' % (versions[0][0]), 'none', arch))
|
||||
|
||||
# No abi / arch, but requires our implementation:
|
||||
supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
|
||||
# Tagged specifically as being cross-version compatible
|
||||
# (with just the major version specified)
|
||||
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
|
||||
|
||||
# No abi / arch, generic Python
|
||||
for i, version in enumerate(versions):
|
||||
supported.append(('py%s' % (version,), 'none', 'any'))
|
||||
if i == 0:
|
||||
supported.append(('py%s' % (version[0]), 'none', 'any'))
|
||||
|
||||
return supported
|
||||
|
||||
|
||||
implementation_tag = get_impl_tag()
|
|
@ -2,6 +2,7 @@
|
|||
Compatibility Support for Python 2.7 and earlier
|
||||
"""
|
||||
|
||||
import sys
|
||||
import platform
|
||||
|
||||
from setuptools.extern import six
|
||||
|
@ -26,3 +27,34 @@ linux_py2_ascii = (
|
|||
|
||||
rmtree_safe = str if linux_py2_ascii else lambda x: x
|
||||
"""Workaround for http://bugs.python.org/issue24672"""
|
||||
|
||||
|
||||
try:
|
||||
from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE
|
||||
from ._imp import get_frozen_object, get_module
|
||||
except ImportError:
|
||||
import imp
|
||||
from imp import PY_COMPILED, PY_FROZEN, PY_SOURCE # noqa
|
||||
|
||||
def find_module(module, paths=None):
|
||||
"""Just like 'imp.find_module()', but with package support"""
|
||||
parts = module.split('.')
|
||||
while parts:
|
||||
part = parts.pop(0)
|
||||
f, path, (suffix, mode, kind) = info = imp.find_module(part, paths)
|
||||
|
||||
if kind == imp.PKG_DIRECTORY:
|
||||
parts = parts or ['__init__']
|
||||
paths = [path]
|
||||
|
||||
elif parts:
|
||||
raise ImportError("Can't find %r in %s" % (parts, module))
|
||||
|
||||
return info
|
||||
|
||||
def get_frozen_object(module, paths):
|
||||
return imp.get_frozen_object(module)
|
||||
|
||||
def get_module(module, paths, info):
|
||||
imp.load_module(module, *info)
|
||||
return sys.modules[module]
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
import importlib
|
||||
|
||||
try:
|
||||
import importlib.util
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
module_from_spec = importlib.util.module_from_spec
|
||||
except AttributeError:
|
||||
def module_from_spec(spec):
|
||||
return spec.loader.load_module(spec.name)
|
|
@ -1,6 +1,7 @@
|
|||
"""Wheels support."""
|
||||
|
||||
from distutils.util import get_platform
|
||||
from distutils import log
|
||||
import email
|
||||
import itertools
|
||||
import os
|
||||
|
@ -11,9 +12,9 @@ import zipfile
|
|||
import pkg_resources
|
||||
import setuptools
|
||||
from pkg_resources import parse_version
|
||||
from setuptools.extern.packaging.tags import sys_tags
|
||||
from setuptools.extern.packaging.utils import canonicalize_name
|
||||
from setuptools.extern.six import PY3
|
||||
from setuptools import pep425tags
|
||||
from setuptools.command.egg_info import write_requirements
|
||||
|
||||
|
||||
|
@ -76,7 +77,7 @@ class Wheel:
|
|||
|
||||
def is_compatible(self):
|
||||
'''Is the wheel is compatible with the current platform?'''
|
||||
supported_tags = pep425tags.get_supported()
|
||||
supported_tags = set((t.interpreter, t.abi, t.platform) for t in sys_tags())
|
||||
return next((True for t in self.tags() if t in supported_tags), False)
|
||||
|
||||
def egg_name(self):
|
||||
|
@ -162,11 +163,17 @@ class Wheel:
|
|||
extras_require=extras_require,
|
||||
),
|
||||
)
|
||||
write_requirements(
|
||||
setup_dist.get_command_obj('egg_info'),
|
||||
None,
|
||||
os.path.join(egg_info, 'requires.txt'),
|
||||
)
|
||||
# Temporarily disable info traces.
|
||||
log_threshold = log._global_log.threshold
|
||||
log.set_threshold(log.WARN)
|
||||
try:
|
||||
write_requirements(
|
||||
setup_dist.get_command_obj('egg_info'),
|
||||
None,
|
||||
os.path.join(egg_info, 'requires.txt'),
|
||||
)
|
||||
finally:
|
||||
log.set_threshold(log_threshold)
|
||||
|
||||
@staticmethod
|
||||
def _move_data_entries(destination_eggdir, dist_data):
|
||||
|
@ -206,6 +213,8 @@ class Wheel:
|
|||
for mod in namespace_packages:
|
||||
mod_dir = os.path.join(destination_eggdir, *mod.split('.'))
|
||||
mod_init = os.path.join(mod_dir, '__init__.py')
|
||||
if os.path.exists(mod_dir) and not os.path.exists(mod_init):
|
||||
if not os.path.exists(mod_dir):
|
||||
os.mkdir(mod_dir)
|
||||
if not os.path.exists(mod_init):
|
||||
with open(mod_init, 'w') as fp:
|
||||
fp.write(NAMESPACE_PACKAGE_INIT)
|
||||
|
|
Binary file not shown.
Binary file not shown.
Loading…
Reference in New Issue