WQBYW3K4L4DFULCVEC3PNFGBDHESVBPON576F7ZTVR3P3HQ4C2KAC chriscummingshellojimbilllyYAYwith errors&*()!@%shit
{"last_check":"2017-11-28T03:03:08Z","pypi_version":"9.0.1"}
"""Append module search paths for third-party packages to sys.path.***************************************************************** This module is automatically imported during initialization. *****************************************************************In earlier versions of Python (up to 1.5a3), scripts or modules thatneeded to use site-specific modules would place ``import site''somewhere near the top of their code. Because of the automaticimport, this is no longer necessary (but code that does it stillworks).This will append site-specific paths to the module search path. OnUnix, it starts with sys.prefix and sys.exec_prefix (if different) andappends lib/python<version>/site-packages as well as lib/site-python.It also supports the Debian convention oflib/python<version>/dist-packages. On other platforms (mainly Mac andWindows), it uses just sys.prefix (and sys.exec_prefix, if different,but this is unlikely). The resulting directories, if they exist, areappended to sys.path, and also inspected for path configuration files.FOR DEBIAN, this sys.path is augmented with directories in /usr/local.Local addons go into /usr/local/lib/python<version>/site-packages(resp. /usr/local/lib/site-python), Debian addons install into/usr/{lib,share}/python<version>/dist-packages.A path configuration file is a file whose name has the form<package>.pth; its contents are additional directories (one per line)to be added to sys.path. Non-existing directories (ornon-directories) are never added to sys.path; no directory is added tosys.path more than once. Blank lines and lines beginning with'#' are skipped. Lines starting with 'import' are executed.For example, suppose sys.prefix and sys.exec_prefix are set to/usr/local and there is a directory /usr/local/lib/python2.X/site-packageswith three subdirectories, foo, bar and spam, and two pathconfiguration files, foo.pth and bar.pth. Assume foo.pth contains thefollowing:# foo package configurationfoobarbletchand bar.pth contains:# bar package configurationbarThen the following directories are added to sys.path, in this order:/usr/local/lib/python2.X/site-packages/bar/usr/local/lib/python2.X/site-packages/fooNote that bletch is omitted because it doesn't exist; bar precedes foobecause bar.pth comes alphabetically before foo.pth; and spam isomitted because it is not mentioned in either path configuration file.After these path manipulations, an attempt is made to import a modulenamed sitecustomize, which can perform arbitrary additionalsite-specific customizations. If this import fails with anImportError exception, it is silently ignored."""import sysimport ostry:import __builtin__ as builtinsexcept ImportError:import builtinstry:setexcept NameError:from sets import Set as set# Prefixes for site-packages; add additional prefixes like /usr/local herePREFIXES = [sys.prefix, sys.exec_prefix]# Enable per user site-packages directory# set it to False to disable the feature or True to force the featureENABLE_USER_SITE = None# for distutils.commands.installUSER_SITE = NoneUSER_BASE = None_is_64bit = (getattr(sys, 'maxsize', None) or getattr(sys, 'maxint')) > 2**32_is_pypy = hasattr(sys, 'pypy_version_info')_is_jython = sys.platform[:4] == 'java'if _is_jython:ModuleType = type(os)def makepath(*paths):dir = os.path.join(*paths)if _is_jython and (dir == '__classpath__' ordir.startswith('__pyclasspath__')):return dir, dirdir = os.path.abspath(dir)return dir, os.path.normcase(dir)def abs__file__():"""Set all module' __file__ attribute to an absolute path"""for m in sys.modules.values():if ((_is_jython and not isinstance(m, ModuleType)) orhasattr(m, '__loader__')):# only modules need the abspath in Jython. and don't mess# with a PEP 302-supplied __file__continuef = getattr(m, '__file__', None)if f is None:continuem.__file__ = os.path.abspath(f)def removeduppaths():""" Remove duplicate entries from sys.path along with making themabsolute"""# This ensures that the initial path provided by the interpreter contains# only absolute pathnames, even if we're running from the build directory.L = []known_paths = set()for dir in sys.path:# Filter out duplicate paths (on case-insensitive file systems also# if they only differ in case); turn relative paths into absolute# paths.dir, dircase = makepath(dir)if not dircase in known_paths:L.append(dir)known_paths.add(dircase)sys.path[:] = Lreturn known_paths# XXX This should not be part of site.py, since it is needed even when# using the -S option for Python. See http://www.python.org/sf/586680def addbuilddir():"""Append ./build/lib.<platform> in case we're running in the build dir(especially for Guido :-)"""from distutils.util import get_platforms = "build/lib.%s-%.3s" % (get_platform(), sys.version)if hasattr(sys, 'gettotalrefcount'):s += '-pydebug's = os.path.join(os.path.dirname(sys.path[-1]), s)sys.path.append(s)def _init_pathinfo():"""Return a set containing all existing directory entries from sys.path"""d = set()for dir in sys.path:try:if os.path.isdir(dir):dir, dircase = makepath(dir)d.add(dircase)except TypeError:continuereturn ddef addpackage(sitedir, name, known_paths):"""Add a new path to known_paths by combining sitedir and 'name' or executesitedir if it starts with 'import'"""if known_paths is None:_init_pathinfo()reset = 1else:reset = 0fullname = os.path.join(sitedir, name)try:f = open(fullname, "rU")except IOError:returntry:for line in f:if line.startswith("#"):continueif line.startswith("import"):exec(line)continueline = line.rstrip()dir, dircase = makepath(sitedir, line)if not dircase in known_paths and os.path.exists(dir):sys.path.append(dir)known_paths.add(dircase)finally:f.close()if reset:known_paths = Nonereturn known_pathsdef addsitedir(sitedir, known_paths=None):"""Add 'sitedir' argument to sys.path if missing and handle .pth files in'sitedir'"""if known_paths is None:known_paths = _init_pathinfo()reset = 1else:reset = 0sitedir, sitedircase = makepath(sitedir)if not sitedircase in known_paths:sys.path.append(sitedir) # Add path componenttry:names = os.listdir(sitedir)except os.error:returnnames.sort()for name in names:if name.endswith(os.extsep + "pth"):addpackage(sitedir, name, known_paths)if reset:known_paths = Nonereturn known_pathsdef addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix):"""Add site-packages (and possibly site-python) to sys.path"""prefixes = [os.path.join(sys_prefix, "local"), sys_prefix]if exec_prefix != sys_prefix:prefixes.append(os.path.join(exec_prefix, "local"))for prefix in prefixes:if prefix:if sys.platform in ('os2emx', 'riscos') or _is_jython:sitedirs = [os.path.join(prefix, "Lib", "site-packages")]elif _is_pypy:sitedirs = [os.path.join(prefix, 'site-packages')]elif sys.platform == 'darwin' and prefix == sys_prefix:if prefix.startswith("/System/Library/Frameworks/"): # Apple's Pythonsitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"),os.path.join(prefix, "Extras", "lib", "python")]else: # any other Python distros on OSX work this waysitedirs = [os.path.join(prefix, "lib","python" + sys.version[:3], "site-packages")]elif os.sep == '/':sitedirs = [os.path.join(prefix,"lib","python" + sys.version[:3],"site-packages"),os.path.join(prefix, "lib", "site-python"),os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")]lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages")if (os.path.exists(lib64_dir) andos.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]):if _is_64bit:sitedirs.insert(0, lib64_dir)else:sitedirs.append(lib64_dir)try:# sys.getobjects only available in --with-pydebug buildsys.getobjectssitedirs.insert(0, os.path.join(sitedirs[0], 'debug'))except AttributeError:pass# Debian-specific dist-packages directories:sitedirs.append(os.path.join(prefix, "local/lib","python" + sys.version[:3],"dist-packages"))if sys.version[0] == '2':sitedirs.append(os.path.join(prefix, "lib","python" + sys.version[:3],"dist-packages"))else:sitedirs.append(os.path.join(prefix, "lib","python" + sys.version[0],"dist-packages"))sitedirs.append(os.path.join(prefix, "lib", "dist-python"))else:sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]if sys.platform == 'darwin':# for framework builds *only* we add the standard Apple# locations. Currently only per-user, but /Library and# /Network/Library could be added tooif 'Python.framework' in prefix:home = os.environ.get('HOME')if home:sitedirs.append(os.path.join(home,'Library','Python',sys.version[:3],'site-packages'))for sitedir in sitedirs:if os.path.isdir(sitedir):addsitedir(sitedir, known_paths)return Nonedef check_enableusersite():"""Check if user site directory is safe for inclusionThe function tests for the command line flag (including environment var),process uid/gid equal to effective uid/gid.None: Disabled for security reasonsFalse: Disabled by user (command line option)True: Safe and enabled"""if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False):return Falseif hasattr(os, "getuid") and hasattr(os, "geteuid"):# check process uid == effective uidif os.geteuid() != os.getuid():return Noneif hasattr(os, "getgid") and hasattr(os, "getegid"):# check process gid == effective gidif os.getegid() != os.getgid():return Nonereturn Truedef addusersitepackages(known_paths):"""Add a per user site-package to sys.pathEach user has its own python directory with site-packages in thehome directory.USER_BASE is the root directory for all Python versionsUSER_SITE is the user specific site-packages directoryUSER_SITE/.. can be used for data."""global USER_BASE, USER_SITE, ENABLE_USER_SITEenv_base = os.environ.get("PYTHONUSERBASE", None)def joinuser(*args):return os.path.expanduser(os.path.join(*args))#if sys.platform in ('os2emx', 'riscos'):# # Don't know what to put here# USER_BASE = ''# USER_SITE = ''if os.name == "nt":base = os.environ.get("APPDATA") or "~"if env_base:USER_BASE = env_baseelse:USER_BASE = joinuser(base, "Python")USER_SITE = os.path.join(USER_BASE,"Python" + sys.version[0] + sys.version[2],"site-packages")else:if env_base:USER_BASE = env_baseelse:USER_BASE = joinuser("~", ".local")USER_SITE = os.path.join(USER_BASE, "lib","python" + sys.version[:3],"site-packages")if ENABLE_USER_SITE and os.path.isdir(USER_SITE):addsitedir(USER_SITE, known_paths)if ENABLE_USER_SITE:for dist_libdir in ("lib", "local/lib"):user_site = os.path.join(USER_BASE, dist_libdir,"python" + sys.version[:3],"dist-packages")if os.path.isdir(user_site):addsitedir(user_site, known_paths)return known_pathsdef setBEGINLIBPATH():"""The OS/2 EMX port has optional extension modules that do double dutyas DLLs (and must use the .DLL file extension) for other extensions.The library search path needs to be amended so these will be foundduring module import. Use BEGINLIBPATH so that these are at the startof the library search path."""dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")libpath = os.environ['BEGINLIBPATH'].split(';')if libpath[-1]:libpath.append(dllpath)else:libpath[-1] = dllpathos.environ['BEGINLIBPATH'] = ';'.join(libpath)def setquit():"""Define new built-ins 'quit' and 'exit'.These are simply strings that display a hint on how to exit."""if os.sep == ':':eof = 'Cmd-Q'elif os.sep == '\\':eof = 'Ctrl-Z plus Return'else:eof = 'Ctrl-D (i.e. EOF)'class Quitter(object):def __init__(self, name):self.name = namedef __repr__(self):return 'Use %s() or %s to exit' % (self.name, eof)def __call__(self, code=None):# Shells like IDLE catch the SystemExit, but listen when their# stdin wrapper is closed.try:sys.stdin.close()except:passraise SystemExit(code)builtins.quit = Quitter('quit')builtins.exit = Quitter('exit')class _Printer(object):"""interactive prompt objects for printing the license text, a list ofcontributors and the copyright notice."""MAXLINES = 23def __init__(self, name, data, files=(), dirs=()):self.__name = nameself.__data = dataself.__files = filesself.__dirs = dirsself.__lines = Nonedef __setup(self):if self.__lines:returndata = Nonefor dir in self.__dirs:for filename in self.__files:filename = os.path.join(dir, filename)try:fp = open(filename, "rU")data = fp.read()fp.close()breakexcept IOError:passif data:breakif not data:data = self.__dataself.__lines = data.split('\n')self.__linecnt = len(self.__lines)def __repr__(self):self.__setup()if len(self.__lines) <= self.MAXLINES:return "\n".join(self.__lines)else:return "Type %s() to see the full %s text" % ((self.__name,)*2)def __call__(self):self.__setup()prompt = 'Hit Return for more, or q (and Return) to quit: 'lineno = 0while 1:try:for i in range(lineno, lineno + self.MAXLINES):print(self.__lines[i])except IndexError:breakelse:lineno += self.MAXLINESkey = Nonewhile key is None:try:key = raw_input(prompt)except NameError:key = input(prompt)if key not in ('', 'q'):key = Noneif key == 'q':breakdef setcopyright():"""Set 'copyright' and 'credits' in __builtin__"""builtins.copyright = _Printer("copyright", sys.copyright)if _is_jython:builtins.credits = _Printer("credits","Jython is maintained by the Jython developers (www.jython.org).")elif _is_pypy:builtins.credits = _Printer("credits","PyPy is maintained by the PyPy developers: http://pypy.org/")else:builtins.credits = _Printer("credits", """\Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousandsfor supporting Python development. See www.python.org for more information.""")here = os.path.dirname(os.__file__)builtins.license = _Printer("license", "See http://www.python.org/%.3s/license.html" % sys.version,["LICENSE.txt", "LICENSE"],[os.path.join(here, os.pardir), here, os.curdir])class _Helper(object):"""Define the built-in 'help'.This is a wrapper around pydoc.help (with a twist)."""def __repr__(self):return "Type help() for interactive help, " \"or help(object) for help about object."def __call__(self, *args, **kwds):import pydocreturn pydoc.help(*args, **kwds)def sethelper():builtins.help = _Helper()def aliasmbcs():"""On Windows, some default encodings are not provided by Python,while they are always available as "mbcs" in each locale. Makethem usable by aliasing to "mbcs" in such a case."""if sys.platform == 'win32':import locale, codecsenc = locale.getdefaultlocale()[1]if enc.startswith('cp'): # "cp***" ?try:codecs.lookup(enc)except LookupError:import encodingsencodings._cache[enc] = encodings._unknownencodings.aliases.aliases[enc] = 'mbcs'def setencoding():"""Set the string encoding used by the Unicode implementation. Thedefault is 'ascii', but if you're willing to experiment, you canchange this."""encoding = "ascii" # Default value set by _PyUnicode_Init()if 0:# Enable to support locale aware default string encodings.import localeloc = locale.getdefaultlocale()if loc[1]:encoding = loc[1]if 0:# Enable to switch off string to Unicode coercion and implicit# Unicode to string conversion.encoding = "undefined"if encoding != "ascii":# On Non-Unicode builds this will raise an AttributeError...sys.setdefaultencoding(encoding) # Needs Python Unicode build !def execsitecustomize():"""Run custom site specific code, if available."""try:import sitecustomizeexcept ImportError:passdef virtual_install_main_packages():f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt'))sys.real_prefix = f.read().strip()f.close()pos = 2hardcoded_relative_dirs = []if sys.path[0] == '':pos += 1if _is_jython:paths = [os.path.join(sys.real_prefix, 'Lib')]elif _is_pypy:if sys.version_info > (3, 2):cpyver = '%d' % sys.version_info[0]elif sys.pypy_version_info >= (1, 5):cpyver = '%d.%d' % sys.version_info[:2]else:cpyver = '%d.%d.%d' % sys.version_info[:3]paths = [os.path.join(sys.real_prefix, 'lib_pypy'),os.path.join(sys.real_prefix, 'lib-python', cpyver)]if sys.pypy_version_info < (1, 9):paths.insert(1, os.path.join(sys.real_prefix,'lib-python', 'modified-%s' % cpyver))hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below## This is hardcoded in the Python executable, but relative to sys.prefix:for path in paths[:]:plat_path = os.path.join(path, 'plat-%s' % sys.platform)if os.path.exists(plat_path):paths.append(plat_path)elif sys.platform == 'win32':paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')]else:paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])]hardcoded_relative_dirs = paths[:] # for the special 'darwin' case belowlib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3])if os.path.exists(lib64_path):if _is_64bit:paths.insert(0, lib64_path)else:paths.append(lib64_path)# This is hardcoded in the Python executable, but relative to# sys.prefix. Debian change: we need to add the multiarch triplet# here, which is where the real stuff lives. As per PEP 421, in# Python 3.3+, this lives in sys.implementation, while in Python 2.7# it lives in sys.try:arch = getattr(sys, 'implementation', sys)._multiarchexcept AttributeError:# This is a non-multiarch aware Python. Fallback to the old way.arch = sys.platformplat_path = os.path.join(sys.real_prefix, 'lib','python'+sys.version[:3],'plat-%s' % arch)if os.path.exists(plat_path):paths.append(plat_path)# This is hardcoded in the Python executable, but# relative to sys.prefix, so we have to fix up:for path in list(paths):tk_dir = os.path.join(path, 'lib-tk')if os.path.exists(tk_dir):paths.append(tk_dir)# These are hardcoded in the Apple's Python executable,# but relative to sys.prefix, so we have to fix them up:if sys.platform == 'darwin':hardcoded_paths = [os.path.join(relative_dir, module)for relative_dir in hardcoded_relative_dirsfor module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')]for path in hardcoded_paths:if os.path.exists(path):paths.append(path)sys.path.extend(paths)def force_global_eggs_after_local_site_packages():"""Force easy_installed eggs in the global environment to get placedin sys.path after all packages inside the virtualenv. Thismaintains the "least surprise" result that packages in thevirtualenv always mask global packages, never the other wayaround."""egginsert = getattr(sys, '__egginsert', 0)for i, path in enumerate(sys.path):if i > egginsert and path.startswith(sys.prefix):egginsert = isys.__egginsert = egginsert + 1def virtual_addsitepackages(known_paths):force_global_eggs_after_local_site_packages()return addsitepackages(known_paths, sys_prefix=sys.real_prefix)def fixclasspath():"""Adjust the special classpath sys.path entries for Jython. Theseentries should follow the base virtualenv lib directories."""paths = []classpaths = []for path in sys.path:if path == '__classpath__' or path.startswith('__pyclasspath__'):classpaths.append(path)else:paths.append(path)sys.path = pathssys.path.extend(classpaths)def execusercustomize():"""Run custom user specific code, if available."""try:import usercustomizeexcept ImportError:passdef main():global ENABLE_USER_SITEvirtual_install_main_packages()abs__file__()paths_in_sys = removeduppaths()if (os.name == "posix" and sys.path andos.path.basename(sys.path[-1]) == "Modules"):addbuilddir()if _is_jython:fixclasspath()GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt'))if not GLOBAL_SITE_PACKAGES:ENABLE_USER_SITE = Falseif ENABLE_USER_SITE is None:ENABLE_USER_SITE = check_enableusersite()paths_in_sys = addsitepackages(paths_in_sys)paths_in_sys = addusersitepackages(paths_in_sys)if GLOBAL_SITE_PACKAGES:paths_in_sys = virtual_addsitepackages(paths_in_sys)if sys.platform == 'os2emx':setBEGINLIBPATH()setquit()setcopyright()sethelper()aliasmbcs()setencoding()execsitecustomize()if ENABLE_USER_SITE:execusercustomize()# Remove sys.setdefaultencoding() so that users cannot change the# encoding after initialization. The test for presence is needed when# this module is run as a script, because this code is executed twice.if hasattr(sys, "setdefaultencoding"):del sys.setdefaultencodingmain()def _script():help = """\%s [--user-base] [--user-site]Without arguments print some useful informationWith arguments print the value of USER_BASE and/or USER_SITE separatedby '%s'.Exit codes with --user-base or --user-site:0 - user site directory is enabled1 - user site directory is disabled by user2 - uses site directory is disabled by super useror for security reasons>2 - unknown error"""args = sys.argv[1:]if not args:print("sys.path = [")for dir in sys.path:print(" %r," % (dir,))print("]")def exists(path):if os.path.isdir(path):return "exists"else:return "doesn't exist"print("USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE)))print("USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE)))print("ENABLE_USER_SITE: %r" % ENABLE_USER_SITE)sys.exit(0)buffer = []if '--user-base' in args:buffer.append(USER_BASE)if '--user-site' in args:buffer.append(USER_SITE)if buffer:print(os.pathsep.join(buffer))if ENABLE_USER_SITE:sys.exit(0)elif ENABLE_USER_SITE is False:sys.exit(1)elif ENABLE_USER_SITE is None:sys.exit(2)else:sys.exit(3)else:import textwrapprint(textwrap.dedent(help % (sys.argv[0], os.pathsep)))sys.exit(10)if __name__ == '__main__':_script()
wheel
{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6"], "description_content_type": "UNKNOWN", "extensions": {"python.commands": {"wrap_console": {"wheel": "wheel.tool:main"}}, "python.details": {"contacts": [{"email": "alex.gronholm@nextday.fi", "name": "Alex Gr\u00f6nholm", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://github.com/pypa/wheel"}}, "python.exports": {"console_scripts": {"wheel": "wheel.tool:main"}, "distutils.commands": {"bdist_wheel": "wheel.bdist_wheel:bdist_wheel"}}}, "extras": ["faster-signatures", "signatures", "test", "tool"], "generator": "bdist_wheel (0.30.0)", "keywords": ["wheel", "packaging"], "license": "MIT", "metadata_version": "2.0", "name": "wheel", "run_requires": [{"extra": "faster-signatures", "requires": ["ed25519ll"]}, {"extra": "test", "requires": ["jsonschema", "pytest (>=3.0.0)", "pytest-cov"]}, {"extra": "signatures", "requires": ["keyring", "keyrings.alt"]}, {"environment": "sys_platform!=\"win32\"", "extra": "signatures", "requires": ["pyxdg"]}], "summary": "A built-package format for Python.", "version": "0.30.0"}
[console_scripts]wheel = wheel.tool:main[distutils.commands]bdist_wheel = wheel.bdist_wheel:bdist_wheel
Wheel-Version: 1.0Generator: bdist_wheel (0.30.0)Root-Is-Purelib: trueTag: py2-none-anyTag: py3-none-any
wheel/__init__.py,sha256=ja92NKda3sstt4uKroYgFATu736whcI33p3GJNdslLQ,96wheel/__main__.py,sha256=K--m7mq-27NO0fm-a8KlthkucCe0w_-0hVxL3uDujkU,419wheel/archive.py,sha256=oEv42UnpxkoFMKcLXQ9RD8a8oic4X3oe2_H5FAgJ7_M,2376wheel/bdist_wheel.py,sha256=qKWdyvpkdmuLB4_GGIZsjmlcMLZuZDd8tRvaQI0w_eo,18852wheel/decorator.py,sha256=U2K77ZZ8x3x5vSIGCcEeh8GAxB6rABB7AlDwRukaoCk,541wheel/egg2wheel.py,sha256=me4Iaz4idCvS-xjfAzfb2dXXlXx_w6AgLjH6hi1Bt1A,3043wheel/install.py,sha256=zYQ-A8uQi-R2PwMvOh64YMlQDplqYpcBVM0EmbxZu8Y,18417wheel/metadata.py,sha256=SzI1MtzITZJuAJuvUVzEWi60VhgDbXSV_hapyiX0rlw,11561wheel/paths.py,sha256=OAtaJgCivlKvJKw1qC3YbJypvp2d38Eka8GQWdBWNZw,1129wheel/pep425tags.py,sha256=Lk9zYm1rrHG1X3RKlf9plcwpsoSZT8UR7fG3jhaoZrQ,5760wheel/pkginfo.py,sha256=GR76kupQzn1x9sKDaXuE6B6FsZ4OkfRtG7pndlXPvQ4,1257wheel/util.py,sha256=eJB-mrhMAaCGcoKhTLDYdpCf5N8BMLtX4usW_7qeZBg,4732wheel/wininst2wheel.py,sha256=afPAHWwa7FY0IkpG-BuuuY-dlB93VmFPrXff511NkBk,7772wheel/signatures/__init__.py,sha256=O7kZICZvXxN5YRkCYrPmAEr1LpGaZKJh5sLPWIRIoYE,3766wheel/signatures/djbec.py,sha256=jnfWxdS7dwLjiO6n0hy-4jLa_71SPrKWL0-7ocDrSHc,7035wheel/signatures/ed25519py.py,sha256=nFKDMq4LW2iJKk4IZKMxY46GyZNYPKxuWha9xYHk9lE,1669wheel/signatures/keys.py,sha256=k4j4yGZL31Dt2pa5TneIEeq6qkVIXEPExmFxiZxpE1Y,3299wheel/tool/__init__.py,sha256=rOy5VFvj-gTKgMwi_u2_iNu_Pq6aqw4rEfaciDTbmwg,13421wheel-0.30.0.dist-info/DESCRIPTION.rst,sha256=Alb3Ol--LhPgmWuBBPfzu54xzQ8J2skWNV34XCjhe0k,10549wheel-0.30.0.dist-info/LICENSE.txt,sha256=zKniDGrx_Pv2lAjzd3aShsvuvN7TNhAMm0o_NfvmNeQ,1125wheel-0.30.0.dist-info/METADATA,sha256=fYLxr6baQD-wDn4Yu8t-8fF7PJuiBTcThsl2UKBE7kg,11815wheel-0.30.0.dist-info/RECORD,,wheel-0.30.0.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110wheel-0.30.0.dist-info/entry_points.txt,sha256=pTyeGVsucyfr_BXe5OQKuA1Bp5YKaIAWy5pejkq4Qx0,109wheel-0.30.0.dist-info/metadata.json,sha256=neXQocJnVqPTjr4zpuOVdxBGCmjrTsOs76AvP8ngyJY,1522wheel-0.30.0.dist-info/top_level.txt,sha256=HxSBIbgEstMPe4eFawhA66Mq-QYHMopXVoAncfjb_1c,6../../../bin/wheel,sha256=QsrtKa7xSeWfGnx0ko_mlgpzl7GpF8cV5YMGIoo1nQI,286wheel-0.30.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4wheel/signatures/__init__.pyc,,wheel/egg2wheel.pyc,,wheel/util.pyc,,wheel/install.pyc,,wheel/paths.pyc,,wheel/wininst2wheel.pyc,,wheel/__init__.pyc,,wheel/signatures/djbec.pyc,,wheel/archive.pyc,,wheel/metadata.pyc,,wheel/pkginfo.pyc,,wheel/__main__.pyc,,wheel/pep425tags.pyc,,wheel/signatures/ed25519py.pyc,,wheel/signatures/keys.pyc,,wheel/decorator.pyc,,wheel/tool/__init__.pyc,,wheel/bdist_wheel.pyc,,
Metadata-Version: 2.0Name: wheelVersion: 0.30.0Summary: A built-package format for Python.Home-page: https://github.com/pypa/wheelAuthor: Alex GrönholmAuthor-email: alex.gronholm@nextday.fiLicense: MITDescription-Content-Type: UNKNOWNKeywords: wheel,packagingPlatform: UNKNOWNClassifier: Development Status :: 5 - Production/StableClassifier: Intended Audience :: DevelopersClassifier: License :: OSI Approved :: MIT LicenseClassifier: Programming Language :: PythonClassifier: Programming Language :: Python :: 2Classifier: Programming Language :: Python :: 2.7Classifier: Programming Language :: Python :: 3Classifier: Programming Language :: Python :: 3.4Classifier: Programming Language :: Python :: 3.5Classifier: Programming Language :: Python :: 3.6Provides-Extra: faster-signaturesRequires-Dist: ed25519ll; extra == 'faster-signatures'Provides-Extra: signaturesRequires-Dist: keyring; extra == 'signatures'Requires-Dist: keyrings.alt; extra == 'signatures'Provides-Extra: signaturesRequires-Dist: pyxdg; sys_platform!="win32" and extra == 'signatures'Provides-Extra: testRequires-Dist: jsonschema; extra == 'test'Requires-Dist: pytest (>=3.0.0); extra == 'test'Requires-Dist: pytest-cov; extra == 'test'Provides-Extra: toolWheel=====A built-package format for Python.A wheel is a ZIP-format archive with a specially formatted filenameand the .whl extension. It is designed to contain all the files for aPEP 376 compatible install in a way that is very close to the on-diskformat. Many packages will be properly installed with only the "Unpack"step (simply extracting the file onto sys.path), and the unpacked archivepreserves enough information to "Spread" (copy data and scripts to theirfinal locations) at any later time.The wheel project provides a `bdist_wheel` command for setuptools(requires setuptools >= 0.8.0). Wheel files can be installed with anewer `pip` from https://github.com/pypa/pip or with wheel's own commandline utility.The wheel documentation is at http://wheel.rtfd.org/. The file formatis documented in PEP 427 (http://www.python.org/dev/peps/pep-0427/).The reference implementation is at https://github.com/pypa/wheelWhy not egg?------------Python's egg format predates the packaging related standards we havetoday, the most important being PEP 376 "Database of Installed PythonDistributions" which specifies the .dist-info directory (instead of.egg-info) and PEP 426 "Metadata for Python Software Packages 2.0"which specifies how to express dependencies (instead of requires.txtin .egg-info).Wheel implements these things. It also provides a richer file namingconvention that communicates the Python implementation and ABI as wellas simply the language version used in a particular package.Unlike .egg, wheel will be a fully-documented standard at the binarylevel that is truly easy to install even if you do not want to use thereference implementation.Code of Conduct---------------Everyone interacting in the wheel project's codebases, issue trackers, chatrooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_... _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/0.30.0======- Added py-limited-api {cp32|cp33|cp34|...} flag to produce cpNN.abi3.{arch}tags on CPython 3.- Documented the ``license_file`` metadata key- Improved Python, abi tagging for `wheel convert`. Thanks Ales Erjavec.- Fixed `>` being prepended to lines starting with "From" in the long description- Added support for specifying a build number (as per PEP 427).Thanks Ian Cordasco.- Made the order of files in generated ZIP files deterministic.Thanks Matthias Bach.- Made the order of requirements in metadata deterministic. Thanks Chris Lamb.- Fixed `wheel install` clobbering existing files- Improved the error message when trying to verify an unsigned wheel file- Removed support for Python 2.6, 3.2 and 3.3.0.29.0======- Fix compression type of files in archive (Issue #155, Pull Request #62,thanks Xavier Fernandez)0.28.0======- Fix file modes in archive (Issue #154)0.27.0======- Support forcing a platform tag using `--plat-name` on pure-Python wheels, aswell as nonstandard platform tags on non-pure wheels (Pull Request #60, Issue#144, thanks Andrés Díaz)- Add SOABI tags to platform-specific wheels built for Python 2.X (Pull Request#55, Issue #63, Issue #101)- Support reproducible wheel files, wheels that can be rebuilt and will hash tothe same values as previous builds (Pull Request #52, Issue #143, thanksBarry Warsaw)- Support for changes in keyring >= 8.0 (Pull Request #61, thanks Jason R.Coombs)- Use the file context manager when checking if dependency_links.txt is empty,fixes problems building wheels under PyPy on Windows (Issue #150, thanksCosimo Lupo)- Don't attempt to (recursively) create a build directory ending with `..`(invalid on all platforms, but code was only executed on Windows) (Issue #91)- Added the PyPA Code of Conduct (Pull Request #56)0.26.0======- Fix multiple entrypoint comparison failure on Python 3 (Issue #148)0.25.0======- Add Python 3.5 to tox configuration- Deterministic (sorted) metadata- Fix tagging for Python 3.5 compatibility- Support py2-none-'arch' and py3-none-'arch' tags- Treat data-only wheels as pure- Write to temporary file and rename when using wheel install --force0.24.0======- The python tag used for pure-python packages is now .pyN (major versiononly). This change actually occurred in 0.23.0 when the --python-tagoption was added, but was not explicitly mentioned in the changelog then.- wininst2wheel and egg2wheel removed. Use "wheel convert [archive]"instead.- Wheel now supports setuptools style conditional requirements via theextras_require={} syntax. Separate 'extra' names from conditions usingthe : character. Wheel's own setup.py does this. (The empty-stringextra is the same as install_requires.) These conditional requirementsshould work the same whether the package is installed by wheel orby setup.py.0.23.0======- Compatibility tag flags added to the bdist_wheel command- sdist should include files necessary for tests- 'wheel convert' can now also convert unpacked eggs to wheel- Rename pydist.json to metadata.json to avoid stepping on the PEP- The --skip-scripts option has been removed, and not generating scripts is nowthe default. The option was a temporary approach until installers couldgenerate scripts themselves. That is now the case with pip 1.5 and later.Note that using pip 1.4 to install a wheel without scripts will leave theinstallation without entry-point wrappers. The "wheel install-scripts"command can be used to generate the scripts in such cases.- Thank you contributors0.22.0======- Include entry_points.txt, scripts a.k.a. commands, in experimentalpydist.json- Improved test_requires parsing- Python 2.6 fixes, "wheel version" command courtesy pombredanne0.21.0======- Pregenerated scripts are the default again.- "setup.py bdist_wheel --skip-scripts" turns them off.- setuptools is no longer a listed requirement for the 'wheel'package. It is of course still required in order for bdist_wheelto work.- "python -m wheel" avoids importing pkg_resources until it's necessary.0.20.0======- No longer include console_scripts in wheels. Ordinary scripts (shell files,standalone Python files) are included as usual.- Include new command "python -m wheel install-scripts [distribution[distribution ...]]" to install the console_scripts (setuptools-stylescripts using pkg_resources) for a distribution.0.19.0======- pymeta.json becomes pydist.json0.18.0======- Python 3 Unicode improvements0.17.0======- Support latest PEP-426 "pymeta.json" (json-format metadata)0.16.0======- Python 2.6 compatibility bugfix (thanks John McFarlane)- Non-prerelease version number1.0.0a2=======- Bugfix for C-extension tags for CPython 3.3 (using SOABI)1.0.0a1=======- Bugfix for bdist_wininst converter "wheel convert"- Bugfix for dists where "is pure" is None instead of True or False1.0.0a0=======- Update for version 1.0 of Wheel (PEP accepted).- Python 3 fix for moving Unicode Description to metadata body- Include rudimentary API documentation in Sphinx (thanks Kevin Horn)0.15.0======- Various improvements0.14.0======- Changed the signature format to better comply with the current JWS spec.Breaks all existing signatures.- Include ``wheel unsign`` command to remove RECORD.jws from an archive.- Put the description in the newly allowed payload section of PKG-INFO(METADATA) files.0.13.0======- Use distutils instead of sysconfig to get installation paths; can installheaders.- Improve WheelFile() sort.- Allow bootstrap installs without any pkg_resources.0.12.0======- Unit test for wheel.tool.install0.11.0======- API cleanup0.10.3======- Scripts fixer fix0.10.2======- Fix keygen0.10.1======- Preserve attributes on install.0.10.0======- Include a copy of pkg_resources. Wheel can now install into a virtualenvthat does not have distribute (though most packages still requirepkg_resources to actually work; wheel install distribute)- Define a new setup.cfg section [wheel]. universal=1 willapply the py2.py3-none-any tag for pure python wheels.0.9.7=====- Only import dirspec when needed. dirspec is only needed to find theconfiguration for keygen/signing operations.0.9.6=====- requires-dist from setup.cfg overwrites any requirements from setup.pyCare must be taken that the requirements are the same in both cases,or just always install from wheel.- drop dirspec requirement on win32- improved command line utility, adds 'wheel convert [egg or wininst]' toconvert legacy binary formats to wheel0.9.5=====- Wheel's own wheel file can be executed by Python, and can install itself:``python wheel-0.9.5-py27-none-any/wheel install ...``- Use argparse; basic ``wheel install`` command should run with only stdlibdependencies.- Allow requires_dist in setup.cfg's [metadata] section. In addition todependencies in setup.py, but will only be interpreted when installingfrom wheel, not from sdist. Can be qualified with environment markers.0.9.4=====- Fix wheel.signatures in sdist0.9.3=====- Integrated digital signatures support without C extensions.- Integrated "wheel install" command (single package, no dependencyresolution) including compatibility check.- Support Python 3.3- Use Metadata 1.3 (PEP 426)0.9.2=====- Automatic signing if WHEEL_TOOL points to the wheel binary- Even more Python 3 fixes0.9.1=====- 'wheel sign' uses the keys generated by 'wheel keygen' (instead of generatinga new key at random each time)- Python 2/3 encoding/decoding fixes- Run tests on Python 2.6 (without signature verification)0.9===- Updated digital signatures scheme- Python 3 support for digital signatures- Always verify RECORD hashes on extract- "wheel" command line tool to sign, verify, unpack wheel files0.8===- none/any draft pep tags update- improved wininst2wheel script- doc changes and other improvements0.7===- sort .dist-info at end of wheel archive- Windows & Python 3 fixes from Paul Moore- pep8- scripts to convert wininst & egg to wheel0.6===- require distribute >= 0.6.28- stop using verlib0.5===- working pretty well0.4.2=====- hyphenated name fix0.4===- improve test coverage- improve Windows compatibility- include tox.ini courtesy of Marc Abramowitz- draft hmac sha-256 signing function0.3===- prototype egg2wheel conversion script0.2===- Python 3 compatibility0.1===- Initial version
"wheel" copyright (c) 2012-2014 Daniel Holth <dholth@fastmail.fm> andcontributors.The MIT LicensePermission is hereby granted, free of charge, to any person obtaining acopy of this software and associated documentation files (the "Software"),to deal in the Software without restriction, including without limitationthe rights to use, copy, modify, merge, publish, distribute, sublicense,and/or sell copies of the Software, and to permit persons to whom theSoftware is furnished to do so, subject to the following conditions:The above copyright notice and this permission notice shall be includedin all copies or substantial portions of the Software.THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS ORIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALLTHE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OROTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OROTHER DEALINGS IN THE SOFTWARE.
pip
Wheel=====A built-package format for Python.A wheel is a ZIP-format archive with a specially formatted filenameand the .whl extension. It is designed to contain all the files for aPEP 376 compatible install in a way that is very close to the on-diskformat. Many packages will be properly installed with only the "Unpack"step (simply extracting the file onto sys.path), and the unpacked archivepreserves enough information to "Spread" (copy data and scripts to theirfinal locations) at any later time.The wheel project provides a `bdist_wheel` command for setuptools(requires setuptools >= 0.8.0). Wheel files can be installed with anewer `pip` from https://github.com/pypa/pip or with wheel's own commandline utility.The wheel documentation is at http://wheel.rtfd.org/. The file formatis documented in PEP 427 (http://www.python.org/dev/peps/pep-0427/).The reference implementation is at https://github.com/pypa/wheelWhy not egg?------------Python's egg format predates the packaging related standards we havetoday, the most important being PEP 376 "Database of Installed PythonDistributions" which specifies the .dist-info directory (instead of.egg-info) and PEP 426 "Metadata for Python Software Packages 2.0"which specifies how to express dependencies (instead of requires.txtin .egg-info).Wheel implements these things. It also provides a richer file namingconvention that communicates the Python implementation and ABI as wellas simply the language version used in a particular package.Unlike .egg, wheel will be a fully-documented standard at the binarylevel that is truly easy to install even if you do not want to use thereference implementation.Code of Conduct---------------Everyone interacting in the wheel project's codebases, issue trackers, chatrooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_... _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/0.30.0======- Added py-limited-api {cp32|cp33|cp34|...} flag to produce cpNN.abi3.{arch}tags on CPython 3.- Documented the ``license_file`` metadata key- Improved Python, abi tagging for `wheel convert`. Thanks Ales Erjavec.- Fixed `>` being prepended to lines starting with "From" in the long description- Added support for specifying a build number (as per PEP 427).Thanks Ian Cordasco.- Made the order of files in generated ZIP files deterministic.Thanks Matthias Bach.- Made the order of requirements in metadata deterministic. Thanks Chris Lamb.- Fixed `wheel install` clobbering existing files- Improved the error message when trying to verify an unsigned wheel file- Removed support for Python 2.6, 3.2 and 3.3.0.29.0======- Fix compression type of files in archive (Issue #155, Pull Request #62,thanks Xavier Fernandez)0.28.0======- Fix file modes in archive (Issue #154)0.27.0======- Support forcing a platform tag using `--plat-name` on pure-Python wheels, aswell as nonstandard platform tags on non-pure wheels (Pull Request #60, Issue#144, thanks Andrés Díaz)- Add SOABI tags to platform-specific wheels built for Python 2.X (Pull Request#55, Issue #63, Issue #101)- Support reproducible wheel files, wheels that can be rebuilt and will hash tothe same values as previous builds (Pull Request #52, Issue #143, thanksBarry Warsaw)- Support for changes in keyring >= 8.0 (Pull Request #61, thanks Jason R.Coombs)- Use the file context manager when checking if dependency_links.txt is empty,fixes problems building wheels under PyPy on Windows (Issue #150, thanksCosimo Lupo)- Don't attempt to (recursively) create a build directory ending with `..`(invalid on all platforms, but code was only executed on Windows) (Issue #91)- Added the PyPA Code of Conduct (Pull Request #56)0.26.0======- Fix multiple entrypoint comparison failure on Python 3 (Issue #148)0.25.0======- Add Python 3.5 to tox configuration- Deterministic (sorted) metadata- Fix tagging for Python 3.5 compatibility- Support py2-none-'arch' and py3-none-'arch' tags- Treat data-only wheels as pure- Write to temporary file and rename when using wheel install --force0.24.0======- The python tag used for pure-python packages is now .pyN (major versiononly). This change actually occurred in 0.23.0 when the --python-tagoption was added, but was not explicitly mentioned in the changelog then.- wininst2wheel and egg2wheel removed. Use "wheel convert [archive]"instead.- Wheel now supports setuptools style conditional requirements via theextras_require={} syntax. Separate 'extra' names from conditions usingthe : character. Wheel's own setup.py does this. (The empty-stringextra is the same as install_requires.) These conditional requirementsshould work the same whether the package is installed by wheel orby setup.py.0.23.0======- Compatibility tag flags added to the bdist_wheel command- sdist should include files necessary for tests- 'wheel convert' can now also convert unpacked eggs to wheel- Rename pydist.json to metadata.json to avoid stepping on the PEP- The --skip-scripts option has been removed, and not generating scripts is nowthe default. The option was a temporary approach until installers couldgenerate scripts themselves. That is now the case with pip 1.5 and later.Note that using pip 1.4 to install a wheel without scripts will leave theinstallation without entry-point wrappers. The "wheel install-scripts"command can be used to generate the scripts in such cases.- Thank you contributors0.22.0======- Include entry_points.txt, scripts a.k.a. commands, in experimentalpydist.json- Improved test_requires parsing- Python 2.6 fixes, "wheel version" command courtesy pombredanne0.21.0======- Pregenerated scripts are the default again.- "setup.py bdist_wheel --skip-scripts" turns them off.- setuptools is no longer a listed requirement for the 'wheel'package. It is of course still required in order for bdist_wheelto work.- "python -m wheel" avoids importing pkg_resources until it's necessary.0.20.0======- No longer include console_scripts in wheels. Ordinary scripts (shell files,standalone Python files) are included as usual.- Include new command "python -m wheel install-scripts [distribution[distribution ...]]" to install the console_scripts (setuptools-stylescripts using pkg_resources) for a distribution.0.19.0======- pymeta.json becomes pydist.json0.18.0======- Python 3 Unicode improvements0.17.0======- Support latest PEP-426 "pymeta.json" (json-format metadata)0.16.0======- Python 2.6 compatibility bugfix (thanks John McFarlane)- Non-prerelease version number1.0.0a2=======- Bugfix for C-extension tags for CPython 3.3 (using SOABI)1.0.0a1=======- Bugfix for bdist_wininst converter "wheel convert"- Bugfix for dists where "is pure" is None instead of True or False1.0.0a0=======- Update for version 1.0 of Wheel (PEP accepted).- Python 3 fix for moving Unicode Description to metadata body- Include rudimentary API documentation in Sphinx (thanks Kevin Horn)0.15.0======- Various improvements0.14.0======- Changed the signature format to better comply with the current JWS spec.Breaks all existing signatures.- Include ``wheel unsign`` command to remove RECORD.jws from an archive.- Put the description in the newly allowed payload section of PKG-INFO(METADATA) files.0.13.0======- Use distutils instead of sysconfig to get installation paths; can installheaders.- Improve WheelFile() sort.- Allow bootstrap installs without any pkg_resources.0.12.0======- Unit test for wheel.tool.install0.11.0======- API cleanup0.10.3======- Scripts fixer fix0.10.2======- Fix keygen0.10.1======- Preserve attributes on install.0.10.0======- Include a copy of pkg_resources. Wheel can now install into a virtualenvthat does not have distribute (though most packages still requirepkg_resources to actually work; wheel install distribute)- Define a new setup.cfg section [wheel]. universal=1 willapply the py2.py3-none-any tag for pure python wheels.0.9.7=====- Only import dirspec when needed. dirspec is only needed to find theconfiguration for keygen/signing operations.0.9.6=====- requires-dist from setup.cfg overwrites any requirements from setup.pyCare must be taken that the requirements are the same in both cases,or just always install from wheel.- drop dirspec requirement on win32- improved command line utility, adds 'wheel convert [egg or wininst]' toconvert legacy binary formats to wheel0.9.5=====- Wheel's own wheel file can be executed by Python, and can install itself:``python wheel-0.9.5-py27-none-any/wheel install ...``- Use argparse; basic ``wheel install`` command should run with only stdlibdependencies.- Allow requires_dist in setup.cfg's [metadata] section. In addition todependencies in setup.py, but will only be interpreted when installingfrom wheel, not from sdist. Can be qualified with environment markers.0.9.4=====- Fix wheel.signatures in sdist0.9.3=====- Integrated digital signatures support without C extensions.- Integrated "wheel install" command (single package, no dependencyresolution) including compatibility check.- Support Python 3.3- Use Metadata 1.3 (PEP 426)0.9.2=====- Automatic signing if WHEEL_TOOL points to the wheel binary- Even more Python 3 fixes0.9.1=====- 'wheel sign' uses the keys generated by 'wheel keygen' (instead of generatinga new key at random each time)- Python 2/3 encoding/decoding fixes- Run tests on Python 2.6 (without signature verification)0.9===- Updated digital signatures scheme- Python 3 support for digital signatures- Always verify RECORD hashes on extract- "wheel" command line tool to sign, verify, unpack wheel files0.8===- none/any draft pep tags update- improved wininst2wheel script- doc changes and other improvements0.7===- sort .dist-info at end of wheel archive- Windows & Python 3 fixes from Paul Moore- pep8- scripts to convert wininst & egg to wheel0.6===- require distribute >= 0.6.28- stop using verlib0.5===- working pretty well0.4.2=====- hyphenated name fix0.4===- improve test coverage- improve Windows compatibility- include tox.ini courtesy of Marc Abramowitz- draft hmac sha-256 signing function0.3===- prototype egg2wheel conversion script0.2===- Python 3 compatibility0.1===- Initial version
#!/usr/bin/env pythonimport distutils.distimport os.pathimport reimport sysimport tempfileimport zipfilefrom argparse import ArgumentParserfrom glob import iglobfrom shutil import rmtreeimport wheel.bdist_wheelfrom wheel.archive import archive_wheelfileegg_info_re = re.compile(r'''(^|/)(?P<name>[^/]+?)-(?P<ver>.+?)(-(?P<pyver>.+?))?(-(?P<arch>.+?))?.egg-info(/|$)''', re.VERBOSE)def parse_info(wininfo_name, egginfo_name):"""Extract metadata from filenames.Extracts the 4 metadataitems needed (name, version, pyversion, arch) fromthe installer filename and the name of the egg-info directory embedded inthe zipfile (if any).The egginfo filename has the format::name-ver(-pyver)(-arch).egg-infoThe installer filename has the format::name-ver.arch(-pyver).exeSome things to note:1. The installer filename is not definitive. An installer can be renamedand work perfectly well as an installer. So more reliable data shouldbe used whenever possible.2. The egg-info data should be preferred for the name and version, becausethese come straight from the distutils metadata, and are mandatory.3. The pyver from the egg-info data should be ignored, as it isconstructed from the version of Python used to build the installer,which is irrelevant - the installer filename is correct here (even tothe point that when it's not there, any version is implied).4. The architecture must be taken from the installer filename, as it isnot included in the egg-info data.5. Architecture-neutral installers still have an architecture because theinstaller format itself (being executable) is architecture-specific. Weshould therefore ignore the architecture if the content is pure-python."""egginfo = Noneif egginfo_name:egginfo = egg_info_re.search(egginfo_name)if not egginfo:raise ValueError("Egg info filename %s is not valid" % (egginfo_name,))# Parse the wininst filename# 1. Distribution name (up to the first '-')w_name, sep, rest = wininfo_name.partition('-')if not sep:raise ValueError("Installer filename %s is not valid" % (wininfo_name,))# Strip '.exe'rest = rest[:-4]# 2. Python version (from the last '-', must start with 'py')rest2, sep, w_pyver = rest.rpartition('-')if sep and w_pyver.startswith('py'):rest = rest2w_pyver = w_pyver.replace('.', '')else:# Not version specific - use py2.py3. While it is possible that# pure-Python code is not compatible with both Python 2 and 3, there# is no way of knowing from the wininst format, so we assume the best# here (the user can always manually rename the wheel to be more# restrictive if needed).w_pyver = 'py2.py3'# 3. Version and architecturew_ver, sep, w_arch = rest.rpartition('.')if not sep:raise ValueError("Installer filename %s is not valid" % (wininfo_name,))if egginfo:w_name = egginfo.group('name')w_ver = egginfo.group('ver')return dict(name=w_name, ver=w_ver, arch=w_arch, pyver=w_pyver)def bdist_wininst2wheel(path, dest_dir=os.path.curdir):bdw = zipfile.ZipFile(path)# Search for egg-info in the archiveegginfo_name = Nonefor filename in bdw.namelist():if '.egg-info' in filename:egginfo_name = filenamebreakinfo = parse_info(os.path.basename(path), egginfo_name)root_is_purelib = Truefor zipinfo in bdw.infolist():if zipinfo.filename.startswith('PLATLIB'):root_is_purelib = Falsebreakif root_is_purelib:paths = {'purelib': ''}else:paths = {'platlib': ''}dist_info = "%(name)s-%(ver)s" % infodatadir = "%s.data/" % dist_info# rewrite paths to trick ZipFile into extracting an egg# XXX grab wininst .ini - between .exe, padding, and first zip file.members = []egginfo_name = ''for zipinfo in bdw.infolist():key, basename = zipinfo.filename.split('/', 1)key = key.lower()basepath = paths.get(key, None)if basepath is None:basepath = datadir + key.lower() + '/'oldname = zipinfo.filenamenewname = basepath + basenamezipinfo.filename = newnamedel bdw.NameToInfo[oldname]bdw.NameToInfo[newname] = zipinfo# Collect member names, but omit '' (from an entry like "PLATLIB/"if newname:members.append(newname)# Remember egg-info name for the egg2dist call belowif not egginfo_name:if newname.endswith('.egg-info'):egginfo_name = newnameelif '.egg-info/' in newname:egginfo_name, sep, _ = newname.rpartition('/')dir = tempfile.mkdtemp(suffix="_b2w")bdw.extractall(dir, members)# egg2wheelabi = 'none'pyver = info['pyver']arch = (info['arch'] or 'any').replace('.', '_').replace('-', '_')# Wininst installers always have arch even if they are not# architecture-specific (because the format itself is).# So, assume the content is architecture-neutral if root is purelib.if root_is_purelib:arch = 'any'# If the installer is architecture-specific, it's almost certainly also# CPython-specific.if arch != 'any':pyver = pyver.replace('py', 'cp')wheel_name = '-'.join((dist_info,pyver,abi,arch))if root_is_purelib:bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution())else:bw = _bdist_wheel_tag(distutils.dist.Distribution())bw.root_is_pure = root_is_purelibbw.python_tag = pyverbw.plat_name_supplied = Truebw.plat_name = info['arch'] or 'any'if not root_is_purelib:bw.full_tag_supplied = Truebw.full_tag = (pyver, abi, arch)dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info)bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)bw.write_wheelfile(dist_info_dir, generator='wininst2wheel')bw.write_record(dir, dist_info_dir)archive_wheelfile(os.path.join(dest_dir, wheel_name), dir)rmtree(dir)class _bdist_wheel_tag(wheel.bdist_wheel.bdist_wheel):# allow the client to override the default generated wheel tag# The default bdist_wheel implementation uses python and abi tags# of the running python process. This is not suitable for# generating/repackaging prebuild binaries.full_tag_supplied = Falsefull_tag = None # None or a (pytag, soabitag, plattag) tripledef get_tag(self):if self.full_tag_supplied and self.full_tag is not None:return self.full_tagelse:return super(_bdist_wheel_tag, self).get_tag()def main():parser = ArgumentParser()parser.add_argument('installers', nargs='*', help="Installers to convert")parser.add_argument('--dest-dir', '-d', default=os.path.curdir,help="Directory to store wheels (default %(default)s)")parser.add_argument('--verbose', '-v', action='store_true')args = parser.parse_args()for pat in args.installers:for installer in iglob(pat):if args.verbose:sys.stdout.write("{0}... ".format(installer))bdist_wininst2wheel(installer, args.dest_dir)if args.verbose:sys.stdout.write("OK\n")if __name__ == "__main__":main()
"""Utility functions."""import base64import hashlibimport jsonimport osimport sysfrom collections import OrderedDict__all__ = ['urlsafe_b64encode', 'urlsafe_b64decode', 'utf8','to_json', 'from_json', 'matches_requirement']# For encoding ascii back and forth between bytestrings, as is repeatedly# necessary in JSON-based crypto under Python 3if sys.version_info[0] < 3:text_type = unicode # noqa: F821def native(s):return selse:text_type = strdef native(s):if isinstance(s, bytes):return s.decode('ascii')return sdef urlsafe_b64encode(data):"""urlsafe_b64encode without padding"""return base64.urlsafe_b64encode(data).rstrip(binary('='))def urlsafe_b64decode(data):"""urlsafe_b64decode without padding"""pad = b'=' * (4 - (len(data) & 3))return base64.urlsafe_b64decode(data + pad)def to_json(o):"""Convert given data to JSON."""return json.dumps(o, sort_keys=True)def from_json(j):"""Decode a JSON payload."""return json.loads(j)def open_for_csv(name, mode):if sys.version_info[0] < 3:nl = {}bin = 'b'else:nl = {'newline': ''}bin = ''return open(name, mode + bin, **nl)def utf8(data):"""Utf-8 encode data."""if isinstance(data, text_type):return data.encode('utf-8')return datadef binary(s):if isinstance(s, text_type):return s.encode('ascii')return sclass HashingFile(object):def __init__(self, path, mode, hashtype='sha256'):self.fd = open(path, mode)self.hashtype = hashtypeself.hash = hashlib.new(hashtype)self.length = 0def write(self, data):self.hash.update(data)self.length += len(data)self.fd.write(data)def close(self):self.fd.close()def digest(self):if self.hashtype == 'md5':return self.hash.hexdigest()digest = self.hash.digest()return self.hashtype + '=' + native(urlsafe_b64encode(digest))def __enter__(self):return selfdef __exit__(self, exc_type, exc_val, exc_tb):self.fd.close()class OrderedDefaultDict(OrderedDict):def __init__(self, *args, **kwargs):if not args:self.default_factory = Noneelse:if not (args[0] is None or callable(args[0])):raise TypeError('first argument must be callable or None')self.default_factory = args[0]args = args[1:]super(OrderedDefaultDict, self).__init__(*args, **kwargs)def __missing__(self, key):if self.default_factory is None:raise KeyError(key)self[key] = default = self.default_factory()return defaultif sys.platform == 'win32':import ctypes.wintypes# CSIDL_APPDATA for reference - not used here for compatibility with# dirspec, which uses LOCAL_APPDATA and COMMON_APPDATA in that ordercsidl = dict(CSIDL_APPDATA=26, CSIDL_LOCAL_APPDATA=28, CSIDL_COMMON_APPDATA=35)def get_path(name):SHGFP_TYPE_CURRENT = 0buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH)ctypes.windll.shell32.SHGetFolderPathW(0, csidl[name], 0, SHGFP_TYPE_CURRENT, buf)return buf.valuedef save_config_path(*resource):appdata = get_path("CSIDL_LOCAL_APPDATA")path = os.path.join(appdata, *resource)if not os.path.isdir(path):os.makedirs(path)return pathdef load_config_paths(*resource):ids = ["CSIDL_LOCAL_APPDATA", "CSIDL_COMMON_APPDATA"]for id in ids:base = get_path(id)path = os.path.join(base, *resource)if os.path.exists(path):yield pathelse:def save_config_path(*resource):import xdg.BaseDirectoryreturn xdg.BaseDirectory.save_config_path(*resource)def load_config_paths(*resource):import xdg.BaseDirectoryreturn xdg.BaseDirectory.load_config_paths(*resource)def matches_requirement(req, wheels):"""List of wheels matching a requirement.:param req: The requirement to satisfy:param wheels: List of wheels to search."""try:from pkg_resources import Distribution, Requirementexcept ImportError:raise RuntimeError("Cannot use requirements without pkg_resources")req = Requirement.parse(req)selected = []for wf in wheels:f = wf.parsed_filenamedist = Distribution(project_name=f.group("name"), version=f.group("ver"))if dist in req:selected.append(wf)return selected
"""Wheel command-line utility."""import argparseimport hashlibimport jsonimport osimport sysfrom glob import iglobfrom .. import signaturesfrom ..install import WheelFile, VerifyingZipFilefrom ..paths import get_install_commandfrom ..util import urlsafe_b64decode, urlsafe_b64encode, native, binary, matches_requirementdef require_pkgresources(name):try:import pkg_resources # noqa: F401except ImportError:raise RuntimeError("'{0}' needs pkg_resources (part of setuptools).".format(name))class WheelError(Exception):pass# For testabilitydef get_keyring():try:from ..signatures import keysimport keyringassert keyring.get_keyring().priorityexcept (ImportError, AssertionError):raise WheelError("Install wheel[signatures] (requires keyring, keyrings.alt, pyxdg) for signatures.")return keys.WheelKeys, keyringdef keygen(get_keyring=get_keyring):"""Generate a public/private key pair."""WheelKeys, keyring = get_keyring()ed25519ll = signatures.get_ed25519ll()wk = WheelKeys().load()keypair = ed25519ll.crypto_sign_keypair()vk = native(urlsafe_b64encode(keypair.vk))sk = native(urlsafe_b64encode(keypair.sk))kr = keyring.get_keyring()kr.set_password("wheel", vk, sk)sys.stdout.write("Created Ed25519 keypair with vk={0}\n".format(vk))sys.stdout.write("in {0!r}\n".format(kr))sk2 = kr.get_password('wheel', vk)if sk2 != sk:raise WheelError("Keyring is broken. Could not retrieve secret key.")sys.stdout.write("Trusting {0} to sign and verify all packages.\n".format(vk))wk.add_signer('+', vk)wk.trust('+', vk)wk.save()def sign(wheelfile, replace=False, get_keyring=get_keyring):"""Sign a wheel"""WheelKeys, keyring = get_keyring()ed25519ll = signatures.get_ed25519ll()wf = WheelFile(wheelfile, append=True)wk = WheelKeys().load()name = wf.parsed_filename.group('name')sign_with = wk.signers(name)[0]sys.stdout.write("Signing {0} with {1}\n".format(name, sign_with[1]))vk = sign_with[1]kr = keyring.get_keyring()sk = kr.get_password('wheel', vk)keypair = ed25519ll.Keypair(urlsafe_b64decode(binary(vk)),urlsafe_b64decode(binary(sk)))record_name = wf.distinfo_name + '/RECORD'sig_name = wf.distinfo_name + '/RECORD.jws'if sig_name in wf.zipfile.namelist():raise WheelError("Wheel is already signed.")record_data = wf.zipfile.read(record_name)payload = {"hash": "sha256=" + native(urlsafe_b64encode(hashlib.sha256(record_data).digest()))}sig = signatures.sign(payload, keypair)wf.zipfile.writestr(sig_name, json.dumps(sig, sort_keys=True))wf.zipfile.close()def unsign(wheelfile):"""Remove RECORD.jws from a wheel by truncating the zip file.RECORD.jws must be at the end of the archive. The zip file must be anordinary archive, with the compressed files and the directory in the sameorder, and without any non-zip content after the truncation point."""vzf = VerifyingZipFile(wheelfile, "a")info = vzf.infolist()if not (len(info) and info[-1].filename.endswith('/RECORD.jws')):raise WheelError('The wheel is not signed (RECORD.jws not found at end of the archive).')vzf.pop()vzf.close()def verify(wheelfile):"""Verify a wheel.The signature will be verified for internal consistency ONLY and printed.Wheel's own unpack/install commands verify the manifest against thesignature and file contents."""wf = WheelFile(wheelfile)sig_name = wf.distinfo_name + '/RECORD.jws'try:sig = json.loads(native(wf.zipfile.open(sig_name).read()))except KeyError:raise WheelError('The wheel is not signed (RECORD.jws not found at end of the archive).')verified = signatures.verify(sig)sys.stderr.write("Signatures are internally consistent.\n")sys.stdout.write(json.dumps(verified, indent=2))sys.stdout.write('\n')def unpack(wheelfile, dest='.'):"""Unpack a wheel.Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}is the package name and {ver} its version.:param wheelfile: The path to the wheel.:param dest: Destination directory (default to current directory)."""wf = WheelFile(wheelfile)namever = wf.parsed_filename.group('namever')destination = os.path.join(dest, namever)sys.stderr.write("Unpacking to: %s\n" % (destination))wf.zipfile.extractall(destination)wf.zipfile.close()def install(requirements, requirements_file=None,wheel_dirs=None, force=False, list_files=False,dry_run=False):"""Install wheels.:param requirements: A list of requirements or wheel files to install.:param requirements_file: A file containing requirements to install.:param wheel_dirs: A list of directories to search for wheels.:param force: Install a wheel file even if it is not compatible.:param list_files: Only list the files to install, don't install them.:param dry_run: Do everything but the actual install."""# If no wheel directories specified, use the WHEELPATH environment# variable, or the current directory if that is not set.if not wheel_dirs:wheelpath = os.getenv("WHEELPATH")if wheelpath:wheel_dirs = wheelpath.split(os.pathsep)else:wheel_dirs = [os.path.curdir]# Get a list of all valid wheels in wheel_dirsall_wheels = []for d in wheel_dirs:for w in os.listdir(d):if w.endswith('.whl'):wf = WheelFile(os.path.join(d, w))if wf.compatible:all_wheels.append(wf)# If there is a requirements file, add it to the list of requirementsif requirements_file:# If the file doesn't exist, search for it in wheel_dirs# This allows standard requirements files to be stored with the# wheels.if not os.path.exists(requirements_file):for d in wheel_dirs:name = os.path.join(d, requirements_file)if os.path.exists(name):requirements_file = namebreakwith open(requirements_file) as fd:requirements.extend(fd)to_install = []for req in requirements:if req.endswith('.whl'):# Explicitly specified wheel filenameif os.path.exists(req):wf = WheelFile(req)if wf.compatible or force:to_install.append(wf)else:msg = ("{0} is not compatible with this Python. ""--force to install anyway.".format(req))raise WheelError(msg)else:# We could search on wheel_dirs, but it's probably OK to# assume the user has made an error.raise WheelError("No such wheel file: {}".format(req))continue# We have a requirement spec# If we don't have pkg_resources, this will raise an exceptionmatches = matches_requirement(req, all_wheels)if not matches:raise WheelError("No match for requirement {}".format(req))to_install.append(max(matches))# We now have a list of wheels to installif list_files:sys.stdout.write("Installing:\n")if dry_run:returnfor wf in to_install:if list_files:sys.stdout.write(" {0}\n".format(wf.filename))continuewf.install(force=force)wf.zipfile.close()def install_scripts(distributions):"""Regenerate the entry_points console_scripts for the named distribution."""try:from setuptools.command import easy_installimport pkg_resourcesexcept ImportError:raise RuntimeError("'wheel install_scripts' needs setuptools.")for dist in distributions:pkg_resources_dist = pkg_resources.get_distribution(dist)install = get_install_command(dist)command = easy_install.easy_install(install.distribution)command.args = ['wheel'] # dummy argumentcommand.finalize_options()command.install_egg_scripts(pkg_resources_dist)def convert(installers, dest_dir, verbose):require_pkgresources('wheel convert')# Only support wheel convert if pkg_resources is presentfrom ..wininst2wheel import bdist_wininst2wheelfrom ..egg2wheel import egg2wheelfor pat in installers:for installer in iglob(pat):if os.path.splitext(installer)[1] == '.egg':conv = egg2wheelelse:conv = bdist_wininst2wheelif verbose:sys.stdout.write("{0}... ".format(installer))sys.stdout.flush()conv(installer, dest_dir)if verbose:sys.stdout.write("OK\n")def parser():p = argparse.ArgumentParser()s = p.add_subparsers(help="commands")def keygen_f(args):keygen()keygen_parser = s.add_parser('keygen', help='Generate signing key')keygen_parser.set_defaults(func=keygen_f)def sign_f(args):sign(args.wheelfile)sign_parser = s.add_parser('sign', help='Sign wheel')sign_parser.add_argument('wheelfile', help='Wheel file')sign_parser.set_defaults(func=sign_f)def unsign_f(args):unsign(args.wheelfile)unsign_parser = s.add_parser('unsign', help=unsign.__doc__)unsign_parser.add_argument('wheelfile', help='Wheel file')unsign_parser.set_defaults(func=unsign_f)def verify_f(args):verify(args.wheelfile)verify_parser = s.add_parser('verify', help=verify.__doc__)verify_parser.add_argument('wheelfile', help='Wheel file')verify_parser.set_defaults(func=verify_f)def unpack_f(args):unpack(args.wheelfile, args.dest)unpack_parser = s.add_parser('unpack', help='Unpack wheel')unpack_parser.add_argument('--dest', '-d', help='Destination directory',default='.')unpack_parser.add_argument('wheelfile', help='Wheel file')unpack_parser.set_defaults(func=unpack_f)def install_f(args):install(args.requirements, args.requirements_file,args.wheel_dirs, args.force, args.list_files)install_parser = s.add_parser('install', help='Install wheels')install_parser.add_argument('requirements', nargs='*',help='Requirements to install.')install_parser.add_argument('--force', default=False,action='store_true',help='Install incompatible wheel files.')install_parser.add_argument('--wheel-dir', '-d', action='append',dest='wheel_dirs',help='Directories containing wheels.')install_parser.add_argument('--requirements-file', '-r',help="A file containing requirements to ""install.")install_parser.add_argument('--list', '-l', default=False,dest='list_files',action='store_true',help="List wheels which would be installed, ""but don't actually install anything.")install_parser.set_defaults(func=install_f)def install_scripts_f(args):install_scripts(args.distributions)install_scripts_parser = s.add_parser('install-scripts', help='Install console_scripts')install_scripts_parser.add_argument('distributions', nargs='*',help='Regenerate console_scripts for these distributions')install_scripts_parser.set_defaults(func=install_scripts_f)def convert_f(args):convert(args.installers, args.dest_dir, args.verbose)convert_parser = s.add_parser('convert', help='Convert egg or wininst to wheel')convert_parser.add_argument('installers', nargs='*', help='Installers to convert')convert_parser.add_argument('--dest-dir', '-d', default=os.path.curdir,help="Directory to store wheels (default %(default)s)")convert_parser.add_argument('--verbose', '-v', action='store_true')convert_parser.set_defaults(func=convert_f)def version_f(args):from .. import __version__sys.stdout.write("wheel %s\n" % __version__)version_parser = s.add_parser('version', help='Print version and exit')version_parser.set_defaults(func=version_f)def help_f(args):p.print_help()help_parser = s.add_parser('help', help='Show this help')help_parser.set_defaults(func=help_f)return pdef main():p = parser()args = p.parse_args()if not hasattr(args, 'func'):p.print_help()else:# XXX on Python 3.3 we get 'args has no func' rather than short help.try:args.func(args)return 0except WheelError as e:sys.stderr.write(e.message + "\n")return 1
"""Store and retrieve wheel signing / verifying keys.Given a scope (a package name, + meaning "all packages", or - meaning"no packages"), return a list of verifying keys that are trusted for thatscope.Given a package name, return a list of (scope, key) suggested keys to signthat package (only the verifying keys; the private signing key is storedelsewhere).Keys here are represented as urlsafe_b64encoded strings with no padding.Tentative command line interface:# list trustswheel trust# trust a particular key for allwheel trust + key# trust key for beaglevotewheel trust beaglevote key# stop trusting a key for allwheel untrust + key# generate a key pairwheel keygen# import a signing key from a filewheel import keyfile# export a signing keywheel export key"""import jsonimport os.pathfrom ..util import native, load_config_paths, save_config_pathclass WheelKeys(object):SCHEMA = 1CONFIG_NAME = 'wheel.json'def __init__(self):self.data = {'signers': [], 'verifiers': []}def load(self):# XXX JSON is not a great databasefor path in load_config_paths('wheel'):conf = os.path.join(native(path), self.CONFIG_NAME)if os.path.exists(conf):with open(conf, 'r') as infile:self.data = json.load(infile)for x in ('signers', 'verifiers'):if x not in self.data:self.data[x] = []if 'schema' not in self.data:self.data['schema'] = self.SCHEMAelif self.data['schema'] != self.SCHEMA:raise ValueError("Bad wheel.json version {0}, expected {1}".format(self.data['schema'], self.SCHEMA))breakreturn selfdef save(self):# Try not to call this a very long time after load()path = save_config_path('wheel')conf = os.path.join(native(path), self.CONFIG_NAME)with open(conf, 'w+') as out:json.dump(self.data, out, indent=2)return selfdef trust(self, scope, vk):"""Start trusting a particular key for given scope."""self.data['verifiers'].append({'scope': scope, 'vk': vk})return selfdef untrust(self, scope, vk):"""Stop trusting a particular key for given scope."""self.data['verifiers'].remove({'scope': scope, 'vk': vk})return selfdef trusted(self, scope=None):"""Return list of [(scope, trusted key), ...] for given scope."""trust = [(x['scope'], x['vk']) for x in self.data['verifiers']if x['scope'] in (scope, '+')]trust.sort(key=lambda x: x[0])trust.reverse()return trustdef signers(self, scope):"""Return list of signing key(s)."""sign = [(x['scope'], x['vk']) for x in self.data['signers'] if x['scope'] in (scope, '+')]sign.sort(key=lambda x: x[0])sign.reverse()return signdef add_signer(self, scope, vk):"""Remember verifying key vk as being valid for signing in scope."""self.data['signers'].append({'scope': scope, 'vk': vk})
import osimport warningsfrom collections import namedtuplefrom . import djbec__all__ = ['crypto_sign', 'crypto_sign_open', 'crypto_sign_keypair', 'Keypair','PUBLICKEYBYTES', 'SECRETKEYBYTES', 'SIGNATUREBYTES']PUBLICKEYBYTES = 32SECRETKEYBYTES = 64SIGNATUREBYTES = 64Keypair = namedtuple('Keypair', ('vk', 'sk')) # verifying key, secret keydef crypto_sign_keypair(seed=None):"""Return (verifying, secret) key from a given seed, or os.urandom(32)"""if seed is None:seed = os.urandom(PUBLICKEYBYTES)else:warnings.warn("ed25519ll should choose random seed.",RuntimeWarning)if len(seed) != 32:raise ValueError("seed must be 32 random bytes or None.")skbytes = seedvkbytes = djbec.publickey(skbytes)return Keypair(vkbytes, skbytes+vkbytes)def crypto_sign(msg, sk):"""Return signature+message given message and secret key.The signature is the first SIGNATUREBYTES bytes of the return value.A copy of msg is in the remainder."""if len(sk) != SECRETKEYBYTES:raise ValueError("Bad signing key length %d" % len(sk))vkbytes = sk[PUBLICKEYBYTES:]skbytes = sk[:PUBLICKEYBYTES]sig = djbec.signature(msg, skbytes, vkbytes)return sig + msgdef crypto_sign_open(signed, vk):"""Return message given signature+message and the verifying key."""if len(vk) != PUBLICKEYBYTES:raise ValueError("Bad verifying key length %d" % len(vk))rc = djbec.checkvalid(signed[:SIGNATUREBYTES], signed[SIGNATUREBYTES:], vk)if not rc:raise ValueError("rc != True", rc)return signed[SIGNATUREBYTES:]
# Ed25519 digital signatures# Based on http://ed25519.cr.yp.to/python/ed25519.py# See also http://ed25519.cr.yp.to/software.html# Adapted by Ron Garret# Sped up considerably using coordinate transforms found on:# http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html# Specifically add-2008-hwcd-4 and dbl-2008-hwcdimport hashlibimport randomtry: # pragma nocoverunicodePY3 = Falsedef asbytes(b):"""Convert array of integers to byte string"""return ''.join(chr(x) for x in b)def joinbytes(b):"""Convert array of bytes to byte string"""return ''.join(b)def bit(h, i):"""Return i'th bit of bytestring h"""return (ord(h[i // 8]) >> (i % 8)) & 1except NameError: # pragma nocoverPY3 = Trueasbytes = bytesjoinbytes = bytesdef bit(h, i):return (h[i // 8] >> (i % 8)) & 1b = 256q = 2 ** 255 - 19l = 2 ** 252 + 27742317777372353535851937790883648493def H(m):return hashlib.sha512(m).digest()def expmod(b, e, m):if e == 0:return 1t = expmod(b, e // 2, m) ** 2 % mif e & 1:t = (t * b) % mreturn t# Can probably get some extra speedup here by replacing this with# an extended-euclidean, but performance seems OK without thatdef inv(x):return expmod(x, q - 2, q)d = -121665 * inv(121666)I = expmod(2, (q - 1) // 4, q)def xrecover(y):xx = (y * y - 1) * inv(d * y * y + 1)x = expmod(xx, (q + 3) // 8, q)if (x * x - xx) % q != 0:x = (x * I) % qif x % 2 != 0:x = q - xreturn xBy = 4 * inv(5)Bx = xrecover(By)B = [Bx % q, By % q]# def edwards(P,Q):# x1 = P[0]# y1 = P[1]# x2 = Q[0]# y2 = Q[1]# x3 = (x1*y2+x2*y1) * inv(1+d*x1*x2*y1*y2)# y3 = (y1*y2+x1*x2) * inv(1-d*x1*x2*y1*y2)# return (x3 % q,y3 % q)# def scalarmult(P,e):# if e == 0: return [0,1]# Q = scalarmult(P,e/2)# Q = edwards(Q,Q)# if e & 1: Q = edwards(Q,P)# return Q# Faster (!) version based on:# http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.htmldef xpt_add(pt1, pt2):(X1, Y1, Z1, T1) = pt1(X2, Y2, Z2, T2) = pt2A = ((Y1 - X1) * (Y2 + X2)) % qB = ((Y1 + X1) * (Y2 - X2)) % qC = (Z1 * 2 * T2) % qD = (T1 * 2 * Z2) % qE = (D + C) % qF = (B - A) % qG = (B + A) % qH = (D - C) % qX3 = (E * F) % qY3 = (G * H) % qZ3 = (F * G) % qT3 = (E * H) % qreturn (X3, Y3, Z3, T3)def xpt_double(pt):(X1, Y1, Z1, _) = ptA = (X1 * X1)B = (Y1 * Y1)C = (2 * Z1 * Z1)D = (-A) % qJ = (X1 + Y1) % qE = (J * J - A - B) % qG = (D + B) % qF = (G - C) % qH = (D - B) % qX3 = (E * F) % qY3 = (G * H) % qZ3 = (F * G) % qT3 = (E * H) % qreturn X3, Y3, Z3, T3def pt_xform(pt):(x, y) = ptreturn x, y, 1, (x * y) % qdef pt_unxform(pt):(x, y, z, _) = ptreturn (x * inv(z)) % q, (y * inv(z)) % qdef xpt_mult(pt, n):if n == 0:return pt_xform((0, 1))_ = xpt_double(xpt_mult(pt, n >> 1))return xpt_add(_, pt) if n & 1 else _def scalarmult(pt, e):return pt_unxform(xpt_mult(pt_xform(pt), e))def encodeint(y):bits = [(y >> i) & 1 for i in range(b)]e = [(sum([bits[i * 8 + j] << j for j in range(8)]))for i in range(b // 8)]return asbytes(e)def encodepoint(P):x = P[0]y = P[1]bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1]e = [(sum([bits[i * 8 + j] << j for j in range(8)]))for i in range(b // 8)]return asbytes(e)def publickey(sk):h = H(sk)a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2))A = scalarmult(B, a)return encodepoint(A)def Hint(m):h = H(m)return sum(2 ** i * bit(h, i) for i in range(2 * b))def signature(m, sk, pk):h = H(sk)a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2))inter = joinbytes([h[i] for i in range(b // 8, b // 4)])r = Hint(inter + m)R = scalarmult(B, r)S = (r + Hint(encodepoint(R) + pk + m) * a) % lreturn encodepoint(R) + encodeint(S)def isoncurve(P):x = P[0]y = P[1]return (-x * x + y * y - 1 - d * x * x * y * y) % q == 0def decodeint(s):return sum(2 ** i * bit(s, i) for i in range(0, b))def decodepoint(s):y = sum(2 ** i * bit(s, i) for i in range(0, b - 1))x = xrecover(y)if x & 1 != bit(s, b - 1):x = q - xP = [x, y]if not isoncurve(P):raise Exception("decoding point that is not on curve")return Pdef checkvalid(s, m, pk):if len(s) != b // 4:raise Exception("signature length is wrong")if len(pk) != b // 8:raise Exception("public-key length is wrong")R = decodepoint(s[0:b // 8])A = decodepoint(pk)S = decodeint(s[b // 8:b // 4])h = Hint(encodepoint(R) + pk + m)v1 = scalarmult(B, S)# v2 = edwards(R,scalarmult(A,h))v2 = pt_unxform(xpt_add(pt_xform(R), pt_xform(scalarmult(A, h))))return v1 == v2############################################################ Curve25519 reference implementation by Matthew Dempsky, from:# http://cr.yp.to/highspeed/naclcrypto-20090310.pdf# P = 2 ** 255 - 19P = qA = 486662# def expmod(b, e, m):# if e == 0: return 1# t = expmod(b, e / 2, m) ** 2 % m# if e & 1: t = (t * b) % m# return t# def inv(x): return expmod(x, P - 2, P)def add(n, m, d):(xn, zn) = n(xm, zm) = m(xd, zd) = dx = 4 * (xm * xn - zm * zn) ** 2 * zdz = 4 * (xm * zn - zm * xn) ** 2 * xdreturn (x % P, z % P)def double(n):(xn, zn) = nx = (xn ** 2 - zn ** 2) ** 2z = 4 * xn * zn * (xn ** 2 + A * xn * zn + zn ** 2)return (x % P, z % P)def curve25519(n, base=9):one = (base, 1)two = double(one)# f(m) evaluates to a tuple# containing the mth multiple and the# (m+1)th multiple of base.def f(m):if m == 1:return (one, two)(pm, pm1) = f(m // 2)if m & 1:return (add(pm, pm1, one), double(pm1))return (double(pm), add(pm, pm1, one))((x, z), _) = f(n)return (x * inv(z)) % Pdef genkey(n=0):n = n or random.randint(0, P)n &= ~7n &= ~(128 << 8 * 31)n |= 64 << 8 * 31return n# def str2int(s):# return int(hexlify(s), 16)# # return sum(ord(s[i]) << (8 * i) for i in range(32))## def int2str(n):# return unhexlify("%x" % n)# # return ''.join([chr((n >> (8 * i)) & 255) for i in range(32)])#################################################def dsa_test():import osmsg = str(random.randint(q, q + q)).encode('utf-8')sk = os.urandom(32)pk = publickey(sk)sig = signature(msg, sk, pk)return checkvalid(sig, msg, pk)def dh_test():sk1 = genkey()sk2 = genkey()return curve25519(sk1, curve25519(sk2)) == curve25519(sk2, curve25519(sk1))
"""Create and verify jws-js format Ed25519 signatures."""import jsonfrom ..util import urlsafe_b64decode, urlsafe_b64encode, native, binary__all__ = ['sign', 'verify']ed25519ll = NoneALG = "Ed25519"def get_ed25519ll():"""Lazy import-and-test of ed25519 module"""global ed25519llif not ed25519ll:try:import ed25519ll # fast (thousands / s)except (ImportError, OSError): # pragma nocoverfrom . import ed25519py as ed25519ll # pure Python (hundreds / s)test()return ed25519lldef sign(payload, keypair):"""Return a JWS-JS format signature given a JSON-serializable payload andan Ed25519 keypair."""get_ed25519ll()#header = {"alg": ALG,"jwk": {"kty": ALG, # alg -> kty in jwk-08."vk": native(urlsafe_b64encode(keypair.vk))}}encoded_header = urlsafe_b64encode(binary(json.dumps(header, sort_keys=True)))encoded_payload = urlsafe_b64encode(binary(json.dumps(payload, sort_keys=True)))secured_input = b".".join((encoded_header, encoded_payload))sig_msg = ed25519ll.crypto_sign(secured_input, keypair.sk)signature = sig_msg[:ed25519ll.SIGNATUREBYTES]encoded_signature = urlsafe_b64encode(signature)return {"recipients":[{"header": native(encoded_header),"signature": native(encoded_signature)}],"payload": native(encoded_payload)}def assertTrue(condition, message=""):if not condition:raise ValueError(message)def verify(jwsjs):"""Return (decoded headers, payload) if all signatures in jwsjs areconsistent, else raise ValueError.Caller must decide whether the keys are actually trusted."""get_ed25519ll()# XXX forbid duplicate keys in JSON input using object_pairs_hook (2.7+)recipients = jwsjs["recipients"]encoded_payload = binary(jwsjs["payload"])headers = []for recipient in recipients:assertTrue(len(recipient) == 2, "Unknown recipient key {0}".format(recipient))h = binary(recipient["header"])s = binary(recipient["signature"])header = json.loads(native(urlsafe_b64decode(h)))assertTrue(header["alg"] == ALG,"Unexpected algorithm {0}".format(header["alg"]))if "alg" in header["jwk"] and "kty" not in header["jwk"]:header["jwk"]["kty"] = header["jwk"]["alg"] # b/w for JWK < -08assertTrue(header["jwk"]["kty"] == ALG, # true for Ed25519"Unexpected key type {0}".format(header["jwk"]["kty"]))vk = urlsafe_b64decode(binary(header["jwk"]["vk"]))secured_input = b".".join((h, encoded_payload))sig = urlsafe_b64decode(s)sig_msg = sig+secured_inputverified_input = native(ed25519ll.crypto_sign_open(sig_msg, vk))verified_header, verified_payload = verified_input.split('.')verified_header = binary(verified_header)decoded_header = native(urlsafe_b64decode(verified_header))headers.append(json.loads(decoded_header))verified_payload = binary(verified_payload)# only return header, payload that have passed through the crypto library.payload = json.loads(native(urlsafe_b64decode(verified_payload)))return headers, payloaddef test():kp = ed25519ll.crypto_sign_keypair()payload = {'test': 'onstartup'}jwsjs = json.loads(json.dumps(sign(payload, kp)))verify(jwsjs)jwsjs['payload'] += 'x'try:verify(jwsjs)except ValueError:passelse: # pragma no coverraise RuntimeError("No error from bad wheel.signatures payload.")
"""Tools for reading and writing PKG-INFO / METADATA without caringabout the encoding."""from email.parser import Parsertry:unicode_PY3 = Falseexcept NameError:_PY3 = Trueif not _PY3:from email.generator import Generatordef read_pkg_info_bytes(bytestr):return Parser().parsestr(bytestr)def read_pkg_info(path):with open(path, "r") as headers:message = Parser().parse(headers)return messagedef write_pkg_info(path, message):with open(path, 'w') as metadata:Generator(metadata, mangle_from_=False, maxheaderlen=0).flatten(message)else:from email.generator import BytesGeneratordef read_pkg_info_bytes(bytestr):headers = bytestr.decode(encoding="ascii", errors="surrogateescape")message = Parser().parsestr(headers)return messagedef read_pkg_info(path):with open(path, "r",encoding="ascii",errors="surrogateescape") as headers:message = Parser().parse(headers)return messagedef write_pkg_info(path, message):with open(path, "wb") as out:BytesGenerator(out, mangle_from_=False, maxheaderlen=0).flatten(message)
"""Generate and work with PEP 425 Compatibility Tags."""import distutils.utilimport platformimport sysimport sysconfigimport warningsdef get_config_var(var):try:return sysconfig.get_config_var(var)except IOError as e: # pip Issue #1074warnings.warn("{0}".format(e), RuntimeWarning)return Nonedef get_abbr_impl():"""Return abbreviated implementation name."""impl = platform.python_implementation()if impl == 'PyPy':return 'pp'elif impl == 'Jython':return 'jy'elif impl == 'IronPython':return 'ip'elif impl == 'CPython':return 'cp'raise LookupError('Unknown Python implementation: ' + impl)def get_impl_ver():"""Return implementation version."""impl_ver = get_config_var("py_version_nodot")if not impl_ver or get_abbr_impl() == 'pp':impl_ver = ''.join(map(str, get_impl_version_info()))return impl_verdef get_impl_version_info():"""Return sys.version_info-like tuple for use in decrementing the minorversion."""if get_abbr_impl() == 'pp':# as per https://github.com/pypa/pip/issues/2882return (sys.version_info[0], sys.pypy_version_info.major,sys.pypy_version_info.minor)else:return sys.version_info[0], sys.version_info[1]def get_flag(var, fallback, expected=True, warn=True):"""Use a fallback method for determining SOABI flags if the needed configvar is unset or unavailable."""val = get_config_var(var)if val is None:if warn:warnings.warn("Config variable '{0}' is unset, Python ABI tag may ""be incorrect".format(var), RuntimeWarning, 2)return fallback()return val == expecteddef get_abi_tag():"""Return the ABI tag based on SOABI (if available) or emulate SOABI(CPython 2, PyPy)."""soabi = get_config_var('SOABI')impl = get_abbr_impl()if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'):d = ''m = ''u = ''if get_flag('Py_DEBUG',lambda: hasattr(sys, 'gettotalrefcount'),warn=(impl == 'cp')):d = 'd'if get_flag('WITH_PYMALLOC',lambda: impl == 'cp',warn=(impl == 'cp')):m = 'm'if get_flag('Py_UNICODE_SIZE',lambda: sys.maxunicode == 0x10ffff,expected=4,warn=(impl == 'cp' andsys.version_info < (3, 3))) \and sys.version_info < (3, 3):u = 'u'abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)elif soabi and soabi.startswith('cpython-'):abi = 'cp' + soabi.split('-')[1]elif soabi:abi = soabi.replace('.', '_').replace('-', '_')else:abi = Nonereturn abidef get_platform():"""Return our platform name 'win32', 'linux_x86_64'"""# XXX remove distutils dependencyresult = distutils.util.get_platform().replace('.', '_').replace('-', '_')if result == "linux_x86_64" and sys.maxsize == 2147483647:# pip pull request #3497result = "linux_i686"return resultdef get_supported(versions=None, supplied_platform=None):"""Return a list of supported tags for each version specified in`versions`.:param versions: a list of string versions, of the form ["33", "32"],or None. The first version will be assumed to support our ABI."""supported = []# Versions must be given with respect to the preferenceif versions is None:versions = []version_info = get_impl_version_info()major = version_info[:-1]# Support all previous minor Python versions.for minor in range(version_info[-1], -1, -1):versions.append(''.join(map(str, major + (minor,))))impl = get_abbr_impl()abis = []abi = get_abi_tag()if abi:abis[0:0] = [abi]abi3s = set()import impfor suffix in imp.get_suffixes():if suffix[0].startswith('.abi'):abi3s.add(suffix[0].split('.', 2)[1])abis.extend(sorted(list(abi3s)))abis.append('none')platforms = []if supplied_platform:platforms.append(supplied_platform)platforms.append(get_platform())# Current version, current API (built specifically for our Python):for abi in abis:for arch in platforms:supported.append(('%s%s' % (impl, versions[0]), abi, arch))# abi3 modules compatible with older version of Pythonfor version in versions[1:]:# abi3 was introduced in Python 3.2if version in ('31', '30'):breakfor abi in abi3s: # empty set if not Python 3for arch in platforms:supported.append(("%s%s" % (impl, version), abi, arch))# No abi / arch, but requires our implementation:for i, version in enumerate(versions):supported.append(('%s%s' % (impl, version), 'none', 'any'))if i == 0:# Tagged specifically as being cross-version compatible# (with just the major version specified)supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))# Major Python version + platform; e.g. binaries not using the Python APIsupported.append(('py%s' % (versions[0][0]), 'none', arch))# No abi / arch, generic Pythonfor i, version in enumerate(versions):supported.append(('py%s' % (version,), 'none', 'any'))if i == 0:supported.append(('py%s' % (version[0]), 'none', 'any'))return supported
"""Installation paths.Map the .data/ subdirectory names to install paths."""import distutils.command.install as installimport distutils.dist as distimport os.pathimport sysdef get_install_command(name):# late binding due to potential monkeypatchingd = dist.Distribution({'name': name})i = install.install(d)i.finalize_options()return idef get_install_paths(name):"""Return the (distutils) install paths for the named dist.A dict with ('purelib', 'platlib', 'headers', 'scripts', 'data') keys."""paths = {}i = get_install_command(name)for key in install.SCHEME_KEYS:paths[key] = getattr(i, 'install_' + key)# pip uses a similar path as an alternative to the system's (read-only)# include directory:if hasattr(sys, 'real_prefix'): # virtualenvpaths['headers'] = os.path.join(sys.prefix,'include','site','python' + sys.version[:3],name)return paths
"""Tools for converting old- to new-style metadata."""import email.parserimport os.pathimport reimport textwrapfrom collections import namedtuple, OrderedDictimport pkg_resourcesfrom . import __version__ as wheel_versionfrom .pkginfo import read_pkg_infofrom .util import OrderedDefaultDictMETADATA_VERSION = "2.0"PLURAL_FIELDS = {"classifier": "classifiers","provides_dist": "provides","provides_extra": "extras"}SKIP_FIELDS = set()CONTACT_FIELDS = (({"email": "author_email", "name": "author"},"author"),({"email": "maintainer_email", "name": "maintainer"},"maintainer"))# commonly filled out as "UNKNOWN" by distutils:UNKNOWN_FIELDS = {"author", "author_email", "platform", "home_page", "license"}# Wheel itself is probably the only program that uses non-extras markers# in METADATA/PKG-INFO. Support its syntax with the extra at the end only.EXTRA_RE = re.compile("""^(?P<package>.*?)(;\s*(?P<condition>.*?)(extra == '(?P<extra>.*?)')?)$""")KEYWORDS_RE = re.compile("[\0-,]+")MayRequiresKey = namedtuple('MayRequiresKey', ('condition', 'extra'))def unique(iterable):"""Yield unique values in iterable, preserving order."""seen = set()for value in iterable:if value not in seen:seen.add(value)yield valuedef handle_requires(metadata, pkg_info, key):"""Place the runtime requirements from pkg_info into metadata."""may_requires = OrderedDefaultDict(list)for value in sorted(pkg_info.get_all(key)):extra_match = EXTRA_RE.search(value)if extra_match:groupdict = extra_match.groupdict()condition = groupdict['condition']extra = groupdict['extra']package = groupdict['package']if condition.endswith(' and '):condition = condition[:-5]else:condition, extra = None, Nonepackage = valuekey = MayRequiresKey(condition, extra)may_requires[key].append(package)if may_requires:metadata['run_requires'] = []def sort_key(item):# Both condition and extra could be None, which can't be compared# against strings in Python 3.key, value = itemif key.condition is None:return ''return key.conditionfor key, value in sorted(may_requires.items(), key=sort_key):may_requirement = OrderedDict((('requires', value),))if key.extra:may_requirement['extra'] = key.extraif key.condition:may_requirement['environment'] = key.conditionmetadata['run_requires'].append(may_requirement)if 'extras' not in metadata:metadata['extras'] = []metadata['extras'].extend([key.extra for key in may_requires.keys() if key.extra])def pkginfo_to_dict(path, distribution=None):"""Convert PKG-INFO to a prototype Metadata 2.0 (PEP 426) dict.The description is included under the key ['description'] rather thanbeing written to a separate file.path: path to PKG-INFO filedistribution: optional distutils Distribution()"""metadata = OrderedDefaultDict(lambda: OrderedDefaultDict(lambda: OrderedDefaultDict(OrderedDict)))metadata["generator"] = "bdist_wheel (" + wheel_version + ")"try:unicodepkg_info = read_pkg_info(path)except NameError:with open(path, 'rb') as pkg_info_file:pkg_info = email.parser.Parser().parsestr(pkg_info_file.read().decode('utf-8'))description = Noneif pkg_info['Summary']:metadata['summary'] = pkginfo_unicode(pkg_info, 'Summary')del pkg_info['Summary']if pkg_info['Description']:description = dedent_description(pkg_info)del pkg_info['Description']else:payload = pkg_info.get_payload()if isinstance(payload, bytes):# Avoid a Python 2 Unicode error.# We still suffer ? glyphs on Python 3.payload = payload.decode('utf-8')if payload:description = payloadif description:pkg_info['description'] = descriptionfor key in sorted(unique(k.lower() for k in pkg_info.keys())):low_key = key.replace('-', '_')if low_key in SKIP_FIELDS:continueif low_key in UNKNOWN_FIELDS and pkg_info.get(key) == 'UNKNOWN':continueif low_key in sorted(PLURAL_FIELDS):metadata[PLURAL_FIELDS[low_key]] = pkg_info.get_all(key)elif low_key == "requires_dist":handle_requires(metadata, pkg_info, key)elif low_key == 'provides_extra':if 'extras' not in metadata:metadata['extras'] = []metadata['extras'].extend(pkg_info.get_all(key))elif low_key == 'home_page':metadata['extensions']['python.details']['project_urls'] = {'Home': pkg_info[key]}elif low_key == 'keywords':metadata['keywords'] = KEYWORDS_RE.split(pkg_info[key])else:metadata[low_key] = pkg_info[key]metadata['metadata_version'] = METADATA_VERSIONif 'extras' in metadata:metadata['extras'] = sorted(set(metadata['extras']))# include more information if distribution is availableif distribution:for requires, attr in (('test_requires', 'tests_require'),):try:requirements = getattr(distribution, attr)if isinstance(requirements, list):new_requirements = sorted(convert_requirements(requirements))metadata[requires] = [{'requires': new_requirements}]except AttributeError:pass# handle contactscontacts = []for contact_type, role in CONTACT_FIELDS:contact = OrderedDict()for key in sorted(contact_type):if contact_type[key] in metadata:contact[key] = metadata.pop(contact_type[key])if contact:contact['role'] = rolecontacts.append(contact)if contacts:metadata['extensions']['python.details']['contacts'] = contacts# convert entry points to exportstry:with open(os.path.join(os.path.dirname(path), "entry_points.txt"), "r") as ep_file:ep_map = pkg_resources.EntryPoint.parse_map(ep_file.read())exports = OrderedDict()for group, items in sorted(ep_map.items()):exports[group] = OrderedDict()for item in sorted(map(str, items.values())):name, export = item.split(' = ', 1)exports[group][name] = exportif exports:metadata['extensions']['python.exports'] = exportsexcept IOError:pass# copy console_scripts entry points to commandsif 'python.exports' in metadata['extensions']:for (ep_script, wrap_script) in (('console_scripts', 'wrap_console'),('gui_scripts', 'wrap_gui')):if ep_script in metadata['extensions']['python.exports']:metadata['extensions']['python.commands'][wrap_script] = \metadata['extensions']['python.exports'][ep_script]return metadatadef requires_to_requires_dist(requirement):"""Compose the version predicates for requirement in PEP 345 fashion."""requires_dist = []for op, ver in requirement.specs:requires_dist.append(op + ver)if not requires_dist:return ''return " (%s)" % ','.join(sorted(requires_dist))def convert_requirements(requirements):"""Yield Requires-Dist: strings for parsed requirements strings."""for req in requirements:parsed_requirement = pkg_resources.Requirement.parse(req)spec = requires_to_requires_dist(parsed_requirement)extras = ",".join(parsed_requirement.extras)if extras:extras = "[%s]" % extrasyield (parsed_requirement.project_name + extras + spec)def generate_requirements(extras_require):"""Convert requirements from a setup()-style dictionary to ('Requires-Dist', 'requirement')and ('Provides-Extra', 'extra') tuples.extras_require is a dictionary of {extra: [requirements]} as passed to setup(),using the empty extra {'': [requirements]} to hold install_requires."""for extra, depends in extras_require.items():condition = ''if extra and ':' in extra: # setuptools extra:condition syntaxextra, condition = extra.split(':', 1)extra = pkg_resources.safe_extra(extra)if extra:yield ('Provides-Extra', extra)if condition:condition += " and "condition += "extra == '%s'" % extraif condition:condition = '; ' + conditionfor new_req in convert_requirements(depends):yield ('Requires-Dist', new_req + condition)def pkginfo_to_metadata(egg_info_path, pkginfo_path):"""Convert .egg-info directory with PKG-INFO to the Metadata 1.3 akaold-draft Metadata 2.0 format."""pkg_info = read_pkg_info(pkginfo_path)pkg_info.replace_header('Metadata-Version', '2.0')requires_path = os.path.join(egg_info_path, 'requires.txt')if os.path.exists(requires_path):with open(requires_path) as requires_file:requires = requires_file.read()for extra, reqs in sorted(pkg_resources.split_sections(requires),key=lambda x: x[0] or ''):for item in generate_requirements({extra: reqs}):pkg_info[item[0]] = item[1]description = pkg_info['Description']if description:pkg_info.set_payload(dedent_description(pkg_info))del pkg_info['Description']return pkg_infodef pkginfo_unicode(pkg_info, field):"""Hack to coax Unicode out of an email Message() - Python 3.3+"""text = pkg_info[field]field = field.lower()if not isinstance(text, str):if not hasattr(pkg_info, 'raw_items'): # Python 3.2return str(text)for item in pkg_info.raw_items():if item[0].lower() == field:text = item[1].encode('ascii', 'surrogateescape') \.decode('utf-8')breakreturn textdef dedent_description(pkg_info):"""Dedent and convert pkg_info['Description'] to Unicode."""description = pkg_info['Description']# Python 3 Unicode handling, sorta.surrogates = Falseif not isinstance(description, str):surrogates = Truedescription = pkginfo_unicode(pkg_info, 'Description')description_lines = description.splitlines()description_dedent = '\n'.join(# if the first line of long_description is blank,# the first line here will be indented.(description_lines[0].lstrip(),textwrap.dedent('\n'.join(description_lines[1:])),'\n'))if surrogates:description_dedent = description_dedent \.encode("utf8") \.decode("ascii", "surrogateescape")return description_dedentif __name__ == "__main__":import sysimport pprintpprint.pprint(pkginfo_to_dict(sys.argv[1]))
"""Operations on existing wheel files, including basic installation."""# XXX see patched pip to installimport csvimport hashlibimport os.pathimport reimport shutilimport sysimport warningsimport zipfilefrom . import signaturesfrom .decorator import reifyfrom .paths import get_install_pathsfrom .pep425tags import get_supportedfrom .pkginfo import read_pkg_info_bytesfrom .util import (urlsafe_b64encode, from_json, urlsafe_b64decode, native, binary, HashingFile,open_for_csv)try:_big_number = sys.maxsizeexcept NameError:_big_number = sys.maxint# The next major version after this version of the 'wheel' tool:VERSION_TOO_HIGH = (1, 0)# Non-greedy matching of an optional build number may be too clever (more# invalid wheel filenames will match). Separate regex for .dist-info?WHEEL_INFO_RE = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?)((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)\.whl|\.dist-info)$""",re.VERBOSE).matchdef parse_version(version):"""Use parse_version from pkg_resources or distutils as available."""global parse_versiontry:from pkg_resources import parse_versionexcept ImportError:from distutils.version import LooseVersion as parse_versionreturn parse_version(version)class BadWheelFile(ValueError):passclass WheelFile(object):"""Parse wheel-specific attributes from a wheel (.whl) file and offerbasic installation and verification support.WheelFile can be used to simply parse a wheel filename by avoiding themethods that require the actual file contents."""WHEEL_INFO = "WHEEL"RECORD = "RECORD"def __init__(self,filename,fp=None,append=False,context=get_supported):""":param fp: A seekable file-like object or None to open(filename).:param append: Open archive in append mode.:param context: Function returning list of supported tags. Wheelsmust have the same context to be sortable."""self.filename = filenameself.fp = fpself.append = appendself.context = contextbasename = os.path.basename(filename)self.parsed_filename = WHEEL_INFO_RE(basename)if not basename.endswith('.whl') or self.parsed_filename is None:raise BadWheelFile("Bad filename '%s'" % filename)def __repr__(self):return self.filename@propertydef distinfo_name(self):return "%s.dist-info" % self.parsed_filename.group('namever')@propertydef datadir_name(self):return "%s.data" % self.parsed_filename.group('namever')@propertydef record_name(self):return "%s/%s" % (self.distinfo_name, self.RECORD)@propertydef wheelinfo_name(self):return "%s/%s" % (self.distinfo_name, self.WHEEL_INFO)@propertydef tags(self):"""A wheel file is compatible with the Cartesian product of theperiod-delimited tags in its filename.To choose a wheel file among several candidates having the samedistribution version 'ver', an installer ranks each triple of(pyver, abi, plat) that its Python installation can run, sortingthe wheels by the best-ranked tag it supports and then by theirarity which is just len(list(compatibility_tags))."""tags = self.parsed_filename.groupdict()for pyver in tags['pyver'].split('.'):for abi in tags['abi'].split('.'):for plat in tags['plat'].split('.'):yield (pyver, abi, plat)compatibility_tags = tags@propertydef arity(self):"""The number of compatibility tags the wheel declares."""return len(list(self.compatibility_tags))@propertydef rank(self):"""Lowest index of any of this wheel's tags in self.context(), and thearity e.g. (0, 1)"""return self.compatibility_rank(self.context())@propertydef compatible(self):return self.rank[0] != _big_number # bad API!# deprecated:def compatibility_rank(self, supported):"""Rank the wheel against the supported tags. Smaller ranks are morecompatible!:param supported: A list of compatibility tags that the currentPython implemenation can run."""preferences = []for tag in self.compatibility_tags:try:preferences.append(supported.index(tag))# Tag not presentexcept ValueError:passif len(preferences):return (min(preferences), self.arity)return (_big_number, 0)# deprecateddef supports_current_python(self, x):assert self.context == x, 'context mismatch'return self.compatible# Comparability.# Wheels are equal if they refer to the same file.# If two wheels are not equal, compare based on (in this order):# 1. Name# 2. Version# 3. Compatibility rank# 4. Filename (as a tiebreaker)@propertydef _sort_key(self):return (self.parsed_filename.group('name'),parse_version(self.parsed_filename.group('ver')),tuple(-x for x in self.rank),self.filename)def __eq__(self, other):return self.filename == other.filenamedef __ne__(self, other):return self.filename != other.filenamedef __lt__(self, other):if self.context != other.context:raise TypeError("{0}.context != {1}.context".format(self, other))return self._sort_key < other._sort_key# XXX prunesn = self.parsed_filename.group('name')on = other.parsed_filename.group('name')if sn != on:return sn < onsv = parse_version(self.parsed_filename.group('ver'))ov = parse_version(other.parsed_filename.group('ver'))if sv != ov:return sv < ov# Compatibilityif self.context != other.context:raise TypeError("{0}.context != {1}.context".format(self, other))sc = self.rankoc = other.rankif sc is not None and oc is not None and sc != oc:# Smaller compatibility ranks are "better" than larger ones,# so we have to reverse the sense of the comparison here!return sc > ocelif sc is None and oc is not None:return Falsereturn self.filename < other.filenamedef __gt__(self, other):return other < selfdef __le__(self, other):return self == other or self < otherdef __ge__(self, other):return self == other or other < self## Methods using the file's contents:#@reifydef zipfile(self):mode = "r"if self.append:mode = "a"vzf = VerifyingZipFile(self.fp if self.fp else self.filename, mode)if not self.append:self.verify(vzf)return vzf@reifydef parsed_wheel_info(self):"""Parse wheel metadata (the .data/WHEEL file)"""return read_pkg_info_bytes(self.zipfile.read(self.wheelinfo_name))def check_version(self):version = self.parsed_wheel_info['Wheel-Version']if tuple(map(int, version.split('.'))) >= VERSION_TOO_HIGH:raise ValueError("Wheel version is too high")@reifydef install_paths(self):"""Consult distutils to get the install paths for our dist. A dict with('purelib', 'platlib', 'headers', 'scripts', 'data').We use the name from our filename as the dist name, which means headerscould be installed in the wrong place if the filesystem-escaped nameis different than the Name. Who cares?"""name = self.parsed_filename.group('name')return get_install_paths(name)def install(self, force=False, overrides={}):"""Install the wheel into site-packages."""# Utility to get the target directory for a particular keydef get_path(key):return overrides.get(key) or self.install_paths[key]# The base target location is either purelib or platlibif self.parsed_wheel_info['Root-Is-Purelib'] == 'true':root = get_path('purelib')else:root = get_path('platlib')# Parse all the names in the archivename_trans = {}for info in self.zipfile.infolist():name = info.filename# Zip files can contain entries representing directories.# These end in a '/'.# We ignore these, as we create directories on demand.if name.endswith('/'):continue# Pathnames in a zipfile namelist are always /-separated.# In theory, paths could start with ./ or have other oddities# but this won't happen in practical cases of well-formed wheels.# We'll cover the simple case of an initial './' as it's both easy# to do and more common than most other oddities.if name.startswith('./'):name = name[2:]# Split off the base directory to identify files that are to be# installed in non-root locationsbasedir, sep, filename = name.partition('/')if sep and basedir == self.datadir_name:# Data file. Target destination is elsewherekey, sep, filename = filename.partition('/')if not sep:raise ValueError("Invalid filename in wheel: {0}".format(name))target = get_path(key)else:# Normal file. Target destination is rootkey = ''target = rootfilename = name# Map the actual filename from the zipfile to its intended target# directory and the pathname relative to that directory.dest = os.path.normpath(os.path.join(target, filename))name_trans[info] = (key, target, filename, dest)# We're now ready to start processing the actual install. The process# is as follows:# 1. Prechecks - is the wheel valid, is its declared architecture# OK, etc. [[Responsibility of the caller]]# 2. Overwrite check - do any of the files to be installed already# exist?# 3. Actual install - put the files in their target locations.# 4. Update RECORD - write a suitably modified RECORD file to# reflect the actual installed paths.if not force:for info, v in name_trans.items():k = info.filenamekey, target, filename, dest = vif os.path.exists(dest):raise ValueError("Wheel file {0} would overwrite {1}. Use force if this is intended".format(k, dest))# Get the name of our executable, for use when replacing script# wrapper hashbang lines.# We encode it using getfilesystemencoding, as that is "the name of# the encoding used to convert Unicode filenames into system file# names".exename = sys.executable.encode(sys.getfilesystemencoding())record_data = []record_name = self.distinfo_name + '/RECORD'for info, (key, target, filename, dest) in name_trans.items():name = info.filenamesource = self.zipfile.open(info)# Skip the RECORD fileif name == record_name:continueddir = os.path.dirname(dest)if not os.path.isdir(ddir):os.makedirs(ddir)temp_filename = dest + '.part'try:with HashingFile(temp_filename, 'wb') as destination:if key == 'scripts':hashbang = source.readline()if hashbang.startswith(b'#!python'):hashbang = b'#!' + exename + binary(os.linesep)destination.write(hashbang)shutil.copyfileobj(source, destination)except:if os.path.exists(temp_filename):os.unlink(temp_filename)raiseos.rename(temp_filename, dest)reldest = os.path.relpath(dest, root)reldest.replace(os.sep, '/')record_data.append((reldest, destination.digest(), destination.length))destination.close()source.close()# preserve attributes (especially +x bit for scripts)attrs = info.external_attr >> 16if attrs: # tends to be 0 if Windows.os.chmod(dest, info.external_attr >> 16)record_name = os.path.join(root, self.record_name)with open_for_csv(record_name, 'w+') as record_file:writer = csv.writer(record_file)for reldest, digest, length in sorted(record_data):writer.writerow((reldest, digest, length))writer.writerow((self.record_name, '', ''))def verify(self, zipfile=None):"""Configure the VerifyingZipFile `zipfile` by verifying its signatureand setting expected hashes for every hash in RECORD.Caller must complete the verification process by completely readingevery file in the archive (e.g. with extractall)."""sig = Noneif zipfile is None:zipfile = self.zipfilezipfile.strict = Truerecord_name = '/'.join((self.distinfo_name, 'RECORD'))sig_name = '/'.join((self.distinfo_name, 'RECORD.jws'))# tolerate s/mime signatures:smime_sig_name = '/'.join((self.distinfo_name, 'RECORD.p7s'))zipfile.set_expected_hash(record_name, None)zipfile.set_expected_hash(sig_name, None)zipfile.set_expected_hash(smime_sig_name, None)record = zipfile.read(record_name)record_digest = urlsafe_b64encode(hashlib.sha256(record).digest())try:sig = from_json(native(zipfile.read(sig_name)))except KeyError: # no signaturepassif sig:headers, payload = signatures.verify(sig)if payload['hash'] != "sha256=" + native(record_digest):msg = "RECORD.sig claimed RECORD hash {0} != computed hash {1}."raise BadWheelFile(msg.format(payload['hash'],native(record_digest)))reader = csv.reader((native(r) for r in record.splitlines()))for row in reader:filename = row[0]hash = row[1]if not hash:if filename not in (record_name, sig_name):sys.stderr.write("%s has no hash!\n" % filename)continuealgo, data = row[1].split('=', 1)assert algo == "sha256", "Unsupported hash algorithm"zipfile.set_expected_hash(filename, urlsafe_b64decode(binary(data)))class VerifyingZipFile(zipfile.ZipFile):"""ZipFile that can assert that each of its extracted contents matchesan expected sha256 hash. Note that each file must be completly read inorder for its hash to be checked."""def __init__(self, file, mode="r",compression=zipfile.ZIP_STORED,allowZip64=False):zipfile.ZipFile.__init__(self, file, mode, compression, allowZip64)self.strict = Falseself._expected_hashes = {}self._hash_algorithm = hashlib.sha256def set_expected_hash(self, name, hash):""":param name: name of zip entry:param hash: bytes of hash (or None for "don't care")"""self._expected_hashes[name] = hashdef open(self, name_or_info, mode="r", pwd=None):"""Return file-like object for 'name'."""# A non-monkey-patched version would contain most of zipfile.pyef = zipfile.ZipFile.open(self, name_or_info, mode, pwd)if isinstance(name_or_info, zipfile.ZipInfo):name = name_or_info.filenameelse:name = name_or_infoif name in self._expected_hashes and self._expected_hashes[name] is not None:expected_hash = self._expected_hashes[name]try:_update_crc_orig = ef._update_crcexcept AttributeError:warnings.warn('Need ZipExtFile._update_crc to implement ''file hash verification (in Python >= 2.7)')return efrunning_hash = self._hash_algorithm()if hasattr(ef, '_eof'): # py33def _update_crc(data):_update_crc_orig(data)running_hash.update(data)if ef._eof and running_hash.digest() != expected_hash:raise BadWheelFile("Bad hash for file %r" % ef.name)else:def _update_crc(data, eof=None):_update_crc_orig(data, eof=eof)running_hash.update(data)if eof and running_hash.digest() != expected_hash:raise BadWheelFile("Bad hash for file %r" % ef.name)ef._update_crc = _update_crcelif self.strict and name not in self._expected_hashes:raise BadWheelFile("No expected hash for file %r" % ef.name)return efdef pop(self):"""Truncate the last file off this zipfile.Assumes infolist() is in the same order as the files (true forordinary zip files created by Python)"""if not self.fp:raise RuntimeError("Attempt to pop from ZIP archive that was already closed")last = self.infolist().pop()del self.NameToInfo[last.filename]self.fp.seek(last.header_offset, os.SEEK_SET)self.fp.truncate()self._didModify = True
#!/usr/bin/env pythonimport distutils.distimport os.pathimport reimport shutilimport sysimport tempfileimport zipfilefrom argparse import ArgumentParserfrom distutils.archive_util import make_archivefrom glob import iglobimport wheel.bdist_wheelfrom wheel.wininst2wheel import _bdist_wheel_tagegg_info_re = re.compile(r'''(?P<name>.+?)-(?P<ver>.+?)(-(?P<pyver>.+?))?(-(?P<arch>.+?))?.egg''', re.VERBOSE)def egg2wheel(egg_path, dest_dir):egg_info = egg_info_re.match(os.path.basename(egg_path)).groupdict()dir = tempfile.mkdtemp(suffix="_e2w")if os.path.isfile(egg_path):# assume we have a bdist_egg otherwiseegg = zipfile.ZipFile(egg_path)egg.extractall(dir)else:# support buildout-style installed eggs directoriesfor pth in os.listdir(egg_path):src = os.path.join(egg_path, pth)if os.path.isfile(src):shutil.copy2(src, dir)else:shutil.copytree(src, os.path.join(dir, pth))dist_info = "%s-%s" % (egg_info['name'], egg_info['ver'])abi = 'none'pyver = egg_info['pyver'].replace('.', '')arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_')if arch != 'any':# assume all binary eggs are for CPythonpyver = 'cp' + pyver[2:]wheel_name = '-'.join((dist_info,pyver,abi,arch))root_is_purelib = egg_info['arch'] is Noneif root_is_purelib:bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution())else:bw = _bdist_wheel_tag(distutils.dist.Distribution())bw.root_is_pure = root_is_purelibbw.python_tag = pyverbw.plat_name_supplied = Truebw.plat_name = egg_info['arch'] or 'any'if not root_is_purelib:bw.full_tag_supplied = Truebw.full_tag = (pyver, abi, arch)dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info)bw.egg2dist(os.path.join(dir, 'EGG-INFO'),dist_info_dir)bw.write_wheelfile(dist_info_dir, generator='egg2wheel')bw.write_record(dir, dist_info_dir)filename = make_archive(os.path.join(dest_dir, wheel_name), 'zip', root_dir=dir)os.rename(filename, filename[:-3] + 'whl')shutil.rmtree(dir)def main():parser = ArgumentParser()parser.add_argument('eggs', nargs='*', help="Eggs to convert")parser.add_argument('--dest-dir', '-d', default=os.path.curdir,help="Directory to store wheels (default %(default)s)")parser.add_argument('--verbose', '-v', action='store_true')args = parser.parse_args()for pat in args.eggs:for egg in iglob(pat):if args.verbose:sys.stdout.write("{0}... ".format(egg))egg2wheel(egg, args.dest_dir)if args.verbose:sys.stdout.write("OK\n")if __name__ == "__main__":main()
# from Pyramidclass reify(object):"""Put the result of a method which uses this (non-data)descriptor decorator in the instance dict after the first call,effectively replacing the decorator with an instance variable."""def __init__(self, wrapped):self.wrapped = wrappedself.__doc__ = wrapped.__doc__def __get__(self, inst, objtype=None):if inst is None:return selfval = self.wrapped(inst)setattr(inst, self.wrapped.__name__, val)return val
"""Create a wheel (.whl) distribution.A wheel is a built archive format."""import csvimport hashlibimport osimport subprocessimport warningsimport shutilimport jsonimport sysimport refrom email.generator import Generatorfrom distutils.core import Commandfrom distutils.sysconfig import get_python_versionfrom distutils import log as loggerfrom shutil import rmtreeimport pkg_resourcesfrom .pep425tags import get_abbr_impl, get_impl_ver, get_abi_tag, get_platformfrom .util import native, open_for_csvfrom .archive import archive_wheelfilefrom .pkginfo import read_pkg_info, write_pkg_infofrom .metadata import pkginfo_to_dictfrom . import pep425tags, metadatafrom . import __version__ as wheel_versionsafe_name = pkg_resources.safe_namesafe_version = pkg_resources.safe_versionPY_LIMITED_API_PATTERN = r'cp3\d'def safer_name(name):return safe_name(name).replace('-', '_')def safer_version(version):return safe_version(version).replace('-', '_')class bdist_wheel(Command):description = 'create a wheel distribution'user_options = [('bdist-dir=', 'b',"temporary directory for creating the distribution"),('plat-name=', 'p',"platform name to embed in generated filenames ""(default: %s)" % get_platform()),('keep-temp', 'k',"keep the pseudo-installation tree around after " +"creating the distribution archive"),('dist-dir=', 'd',"directory to put final built distributions in"),('skip-build', None,"skip rebuilding everything (for testing/debugging)"),('relative', None,"build the archive using relative paths""(default: false)"),('owner=', 'u',"Owner name used when creating a tar file"" [default: current user]"),('group=', 'g',"Group name used when creating a tar file"" [default: current group]"),('universal', None,"make a universal wheel"" (default: false)"),('python-tag=', None,"Python implementation compatibility tag"" (default: py%s)" % get_impl_ver()[0]),('build-number=', None,"Build number for this particular version. ""As specified in PEP-0427, this must start with a digit. ""[default: None]"),('py-limited-api=', None,"Python tag (cp32|cp33|cpNN) for abi3 wheel tag"" (default: false)"),]boolean_options = ['keep-temp', 'skip-build', 'relative', 'universal']def initialize_options(self):self.bdist_dir = Noneself.data_dir = Noneself.plat_name = Noneself.plat_tag = Noneself.format = 'zip'self.keep_temp = Falseself.dist_dir = Noneself.distinfo_dir = Noneself.egginfo_dir = Noneself.root_is_pure = Noneself.skip_build = Noneself.relative = Falseself.owner = Noneself.group = Noneself.universal = Falseself.python_tag = 'py' + get_impl_ver()[0]self.build_number = Noneself.py_limited_api = Falseself.plat_name_supplied = Falsedef finalize_options(self):if self.bdist_dir is None:bdist_base = self.get_finalized_command('bdist').bdist_baseself.bdist_dir = os.path.join(bdist_base, 'wheel')self.data_dir = self.wheel_dist_name + '.data'self.plat_name_supplied = self.plat_name is not Noneneed_options = ('dist_dir', 'plat_name', 'skip_build')self.set_undefined_options('bdist',*zip(need_options, need_options))self.root_is_pure = not (self.distribution.has_ext_modules()or self.distribution.has_c_libraries())if self.py_limited_api and not re.match(PY_LIMITED_API_PATTERN, self.py_limited_api):raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN)# Support legacy [wheel] section for setting universalwheel = self.distribution.get_option_dict('wheel')if 'universal' in wheel:# please don't define this in your global configsval = wheel['universal'][1].strip()if val.lower() in ('1', 'true', 'yes'):self.universal = Trueif self.build_number is not None and not self.build_number[:1].isdigit():raise ValueError("Build tag (build-number) must start with a digit.")@propertydef wheel_dist_name(self):"""Return distribution full name with - replaced with _"""components = (safer_name(self.distribution.get_name()),safer_version(self.distribution.get_version()))if self.build_number:components += (self.build_number,)return '-'.join(components)def get_tag(self):# bdist sets self.plat_name if unset, we should only use it for purepy# wheels if the user supplied it.if self.plat_name_supplied:plat_name = self.plat_nameelif self.root_is_pure:plat_name = 'any'else:plat_name = self.plat_name or get_platform()if plat_name in ('linux-x86_64', 'linux_x86_64') and sys.maxsize == 2147483647:plat_name = 'linux_i686'plat_name = plat_name.replace('-', '_').replace('.', '_')if self.root_is_pure:if self.universal:impl = 'py2.py3'else:impl = self.python_tagtag = (impl, 'none', plat_name)else:impl_name = get_abbr_impl()impl_ver = get_impl_ver()impl = impl_name + impl_ver# We don't work on CPython 3.1, 3.0.if self.py_limited_api and (impl_name + impl_ver).startswith('cp3'):impl = self.py_limited_apiabi_tag = 'abi3'else:abi_tag = str(get_abi_tag()).lower()tag = (impl, abi_tag, plat_name)supported_tags = pep425tags.get_supported(supplied_platform=plat_name if self.plat_name_supplied else None)# XXX switch to this alternate implementation for non-pure:if not self.py_limited_api:assert tag == supported_tags[0], "%s != %s" % (tag, supported_tags[0])assert tag in supported_tags, "would build wheel with unsupported tag {}".format(tag)return tagdef get_archive_basename(self):"""Return archive name without extension"""impl_tag, abi_tag, plat_tag = self.get_tag()archive_basename = "%s-%s-%s-%s" % (self.wheel_dist_name,impl_tag,abi_tag,plat_tag)return archive_basenamedef run(self):build_scripts = self.reinitialize_command('build_scripts')build_scripts.executable = 'python'if not self.skip_build:self.run_command('build')install = self.reinitialize_command('install',reinit_subcommands=True)install.root = self.bdist_dirinstall.compile = Falseinstall.skip_build = self.skip_buildinstall.warn_dir = False# A wheel without setuptools scripts is more cross-platform.# Use the (undocumented) `no_ep` option to setuptools'# install_scripts command to avoid creating entry point scripts.install_scripts = self.reinitialize_command('install_scripts')install_scripts.no_ep = True# Use a custom scheme for the archive, because we have to decide# at installation time which scheme to use.for key in ('headers', 'scripts', 'data', 'purelib', 'platlib'):setattr(install,'install_' + key,os.path.join(self.data_dir, key))basedir_observed = ''if os.name == 'nt':# win32 barfs if any of these are ''; could be '.'?# (distutils.command.install:change_roots bug)basedir_observed = os.path.normpath(os.path.join(self.data_dir, '..'))self.install_libbase = self.install_lib = basedir_observedsetattr(install,'install_purelib' if self.root_is_pure else 'install_platlib',basedir_observed)logger.info("installing to %s", self.bdist_dir)self.run_command('install')archive_basename = self.get_archive_basename()pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)if not self.relative:archive_root = self.bdist_direlse:archive_root = os.path.join(self.bdist_dir,self._ensure_relative(install.install_base))self.set_undefined_options('install_egg_info', ('target', 'egginfo_dir'))self.distinfo_dir = os.path.join(self.bdist_dir,'%s.dist-info' % self.wheel_dist_name)self.egg2dist(self.egginfo_dir,self.distinfo_dir)self.write_wheelfile(self.distinfo_dir)self.write_record(self.bdist_dir, self.distinfo_dir)# Make the archiveif not os.path.exists(self.dist_dir):os.makedirs(self.dist_dir)wheel_name = archive_wheelfile(pseudoinstall_root, archive_root)# Sign the archiveif 'WHEEL_TOOL' in os.environ:subprocess.call([os.environ['WHEEL_TOOL'], 'sign', wheel_name])# Add to 'Distribution.dist_files' so that the "upload" command worksgetattr(self.distribution, 'dist_files', []).append(('bdist_wheel', get_python_version(), wheel_name))if not self.keep_temp:if self.dry_run:logger.info('removing %s', self.bdist_dir)else:rmtree(self.bdist_dir)def write_wheelfile(self, wheelfile_base, generator='bdist_wheel (' + wheel_version + ')'):from email.message import Messagemsg = Message()msg['Wheel-Version'] = '1.0' # of the specmsg['Generator'] = generatormsg['Root-Is-Purelib'] = str(self.root_is_pure).lower()if self.build_number is not None:msg['Build'] = self.build_number# Doesn't work for bdist_wininstimpl_tag, abi_tag, plat_tag = self.get_tag()for impl in impl_tag.split('.'):for abi in abi_tag.split('.'):for plat in plat_tag.split('.'):msg['Tag'] = '-'.join((impl, abi, plat))wheelfile_path = os.path.join(wheelfile_base, 'WHEEL')logger.info('creating %s', wheelfile_path)with open(wheelfile_path, 'w') as f:Generator(f, maxheaderlen=0).flatten(msg)def _ensure_relative(self, path):# copied from dir_util, deleteddrive, path = os.path.splitdrive(path)if path[0:1] == os.sep:path = drive + path[1:]return pathdef _pkginfo_to_metadata(self, egg_info_path, pkginfo_path):return metadata.pkginfo_to_metadata(egg_info_path, pkginfo_path)def license_file(self):"""Return license filename from a license-file key in setup.cfg, or None."""metadata = self.distribution.get_option_dict('metadata')if 'license_file' not in metadata:return Nonereturn metadata['license_file'][1]def setupcfg_requirements(self):"""Generate requirements from setup.cfg as('Requires-Dist', 'requirement; qualifier') tuples. From a metadatasection in setup.cfg:[metadata]provides-extra = extra1extra2requires-dist = requirement; qualifieranother; qualifier2unqualifiedYields('Provides-Extra', 'extra1'),('Provides-Extra', 'extra2'),('Requires-Dist', 'requirement; qualifier'),('Requires-Dist', 'another; qualifier2'),('Requires-Dist', 'unqualified')"""metadata = self.distribution.get_option_dict('metadata')# our .ini parser folds - to _ in key names:for key, title in (('provides_extra', 'Provides-Extra'),('requires_dist', 'Requires-Dist')):if key not in metadata:continuefield = metadata[key]for line in field[1].splitlines():line = line.strip()if not line:continueyield (title, line)def add_requirements(self, metadata_path):"""Add additional requirements from setup.cfg to file metadata_path"""additional = list(self.setupcfg_requirements())if not additional:returnpkg_info = read_pkg_info(metadata_path)if 'Provides-Extra' in pkg_info or 'Requires-Dist' in pkg_info:warnings.warn('setup.cfg requirements overwrite values from setup.py')del pkg_info['Provides-Extra']del pkg_info['Requires-Dist']for k, v in additional:pkg_info[k] = vwrite_pkg_info(metadata_path, pkg_info)def egg2dist(self, egginfo_path, distinfo_path):"""Convert an .egg-info directory into a .dist-info directory"""def adios(p):"""Appropriately delete directory, file or link."""if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):shutil.rmtree(p)elif os.path.exists(p):os.unlink(p)adios(distinfo_path)if not os.path.exists(egginfo_path):# There is no egg-info. This is probably because the egg-info# file/directory is not named matching the distribution name used# to name the archive file. Check for this case and report# accordingly.import globpat = os.path.join(os.path.dirname(egginfo_path), '*.egg-info')possible = glob.glob(pat)err = "Egg metadata expected at %s but not found" % (egginfo_path,)if possible:alt = os.path.basename(possible[0])err += " (%s found - possible misnamed archive file?)" % (alt,)raise ValueError(err)if os.path.isfile(egginfo_path):# .egg-info is a single filepkginfo_path = egginfo_pathpkg_info = self._pkginfo_to_metadata(egginfo_path, egginfo_path)os.mkdir(distinfo_path)else:# .egg-info is a directorypkginfo_path = os.path.join(egginfo_path, 'PKG-INFO')pkg_info = self._pkginfo_to_metadata(egginfo_path, pkginfo_path)# ignore common egg metadata that is useless to wheelshutil.copytree(egginfo_path, distinfo_path,ignore=lambda x, y: {'PKG-INFO', 'requires.txt', 'SOURCES.txt','not-zip-safe'})# delete dependency_links if it is only whitespacedependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt')with open(dependency_links_path, 'r') as dependency_links_file:dependency_links = dependency_links_file.read().strip()if not dependency_links:adios(dependency_links_path)write_pkg_info(os.path.join(distinfo_path, 'METADATA'), pkg_info)# XXX deprecated. Still useful for current distribute/setuptools.metadata_path = os.path.join(distinfo_path, 'METADATA')self.add_requirements(metadata_path)# XXX intentionally a different path than the PEP.metadata_json_path = os.path.join(distinfo_path, 'metadata.json')pymeta = pkginfo_to_dict(metadata_path,distribution=self.distribution)if 'description' in pymeta:description_filename = 'DESCRIPTION.rst'description_text = pymeta.pop('description')description_path = os.path.join(distinfo_path,description_filename)with open(description_path, "wb") as description_file:description_file.write(description_text.encode('utf-8'))pymeta['extensions']['python.details']['document_names']['description'] = \description_filename# XXX heuristically copy any LICENSE/LICENSE.txt?license = self.license_file()if license:license_filename = 'LICENSE.txt'shutil.copy(license, os.path.join(self.distinfo_dir, license_filename))pymeta['extensions']['python.details']['document_names']['license'] = license_filenamewith open(metadata_json_path, "w") as metadata_json:json.dump(pymeta, metadata_json, sort_keys=True)adios(egginfo_path)def write_record(self, bdist_dir, distinfo_dir):from .util import urlsafe_b64encoderecord_path = os.path.join(distinfo_dir, 'RECORD')record_relpath = os.path.relpath(record_path, bdist_dir)def walk():for dir, dirs, files in os.walk(bdist_dir):dirs.sort()for f in sorted(files):yield os.path.join(dir, f)def skip(path):"""Wheel hashes every possible file."""return (path == record_relpath)with open_for_csv(record_path, 'w+') as record_file:writer = csv.writer(record_file)for path in walk():relpath = os.path.relpath(path, bdist_dir)if skip(relpath):hash = ''size = ''else:with open(path, 'rb') as f:data = f.read()digest = hashlib.sha256(data).digest()hash = 'sha256=' + native(urlsafe_b64encode(digest))size = len(data)record_path = os.path.relpath(path, bdist_dir).replace(os.path.sep, '/')writer.writerow((record_path, hash, size))
"""Archive tools for wheel."""import osimport os.pathimport timeimport zipfilefrom distutils import logdef archive_wheelfile(base_name, base_dir):"""Archive all files under `base_dir` in a whl file and name it like`base_name`."""olddir = os.path.abspath(os.curdir)base_name = os.path.abspath(base_name)try:os.chdir(base_dir)return make_wheelfile_inner(base_name)finally:os.chdir(olddir)def make_wheelfile_inner(base_name, base_dir='.'):"""Create a whl file from all the files under 'base_dir'.Places .dist-info at the end of the archive."""zip_filename = base_name + ".whl"log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)# Some applications need reproducible .whl files, but they can't do this# without forcing the timestamp of the individual ZipInfo objects. See# issue #143.timestamp = os.environ.get('SOURCE_DATE_EPOCH')if timestamp is None:date_time = Noneelse:date_time = time.gmtime(int(timestamp))[0:6]# XXX support bz2, xz when availablezip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_DEFLATED)score = {'WHEEL': 1, 'METADATA': 2, 'RECORD': 3}deferred = []def writefile(path, date_time):st = os.stat(path)if date_time is None:mtime = time.gmtime(st.st_mtime)date_time = mtime[0:6]zinfo = zipfile.ZipInfo(path, date_time)zinfo.external_attr = st.st_mode << 16zinfo.compress_type = zipfile.ZIP_DEFLATEDwith open(path, 'rb') as fp:zip.writestr(zinfo, fp.read())log.info("adding '%s'" % path)for dirpath, dirnames, filenames in os.walk(base_dir):# Sort the directory names so that `os.walk` will walk them in a# defined order on the next iteration.dirnames.sort()for name in sorted(filenames):path = os.path.normpath(os.path.join(dirpath, name))if os.path.isfile(path):if dirpath.endswith('.dist-info'):deferred.append((score.get(name, 0), path))else:writefile(path, date_time)deferred.sort()for score, path in deferred:writefile(path, date_time)zip.close()return zip_filename
"""Wheel command line tool (enable python -m wheel syntax)"""import sysdef main(): # needed for console scriptif __package__ == '':# To be able to run 'python wheel-0.9.whl/wheel':import os.pathpath = os.path.dirname(os.path.dirname(__file__))sys.path[0:0] = [path]import wheel.toolsys.exit(wheel.tool.main())if __name__ == "__main__":sys.exit(main())
# __variables__ with double-quoted values will be available in setup.py:__version__ = "0.30.0"
# Copyright (c) 2010-2017 Benjamin Peterson## Permission is hereby granted, free of charge, to any person obtaining a copy# of this software and associated documentation files (the "Software"), to deal# in the Software without restriction, including without limitation the rights# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell# copies of the Software, and to permit persons to whom the Software is# furnished to do so, subject to the following conditions:## The above copyright notice and this permission notice shall be included in all# copies or substantial portions of the Software.## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE# SOFTWARE."""Utilities for writing code that runs on Python 2 and 3"""from __future__ import absolute_importimport functoolsimport itertoolsimport operatorimport sysimport types__author__ = "Benjamin Peterson <benjamin@python.org>"__version__ = "1.11.0"# Useful for very coarse version differentiation.PY2 = sys.version_info[0] == 2PY3 = sys.version_info[0] == 3PY34 = sys.version_info[0:2] >= (3, 4)if PY3:string_types = str,integer_types = int,class_types = type,text_type = strbinary_type = bytesMAXSIZE = sys.maxsizeelse:string_types = basestring,integer_types = (int, long)class_types = (type, types.ClassType)text_type = unicodebinary_type = strif sys.platform.startswith("java"):# Jython always uses 32 bits.MAXSIZE = int((1 << 31) - 1)else:# It's possible to have sizeof(long) != sizeof(Py_ssize_t).class X(object):def __len__(self):return 1 << 31try:len(X())except OverflowError:# 32-bitMAXSIZE = int((1 << 31) - 1)else:# 64-bitMAXSIZE = int((1 << 63) - 1)del Xdef _add_doc(func, doc):"""Add documentation to a function."""func.__doc__ = docdef _import_module(name):"""Import module, returning the module after the last dot."""__import__(name)return sys.modules[name]class _LazyDescr(object):def __init__(self, name):self.name = namedef __get__(self, obj, tp):result = self._resolve()setattr(obj, self.name, result) # Invokes __set__.try:# This is a bit ugly, but it avoids running this again by# removing this descriptor.delattr(obj.__class__, self.name)except AttributeError:passreturn resultclass MovedModule(_LazyDescr):def __init__(self, name, old, new=None):super(MovedModule, self).__init__(name)if PY3:if new is None:new = nameself.mod = newelse:self.mod = olddef _resolve(self):return _import_module(self.mod)def __getattr__(self, attr):_module = self._resolve()value = getattr(_module, attr)setattr(self, attr, value)return valueclass _LazyModule(types.ModuleType):def __init__(self, name):super(_LazyModule, self).__init__(name)self.__doc__ = self.__class__.__doc__def __dir__(self):attrs = ["__doc__", "__name__"]attrs += [attr.name for attr in self._moved_attributes]return attrs# Subclasses should override this_moved_attributes = []class MovedAttribute(_LazyDescr):def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):super(MovedAttribute, self).__init__(name)if PY3:if new_mod is None:new_mod = nameself.mod = new_modif new_attr is None:if old_attr is None:new_attr = nameelse:new_attr = old_attrself.attr = new_attrelse:self.mod = old_modif old_attr is None:old_attr = nameself.attr = old_attrdef _resolve(self):module = _import_module(self.mod)return getattr(module, self.attr)class _SixMetaPathImporter(object):"""A meta path importer to import six.moves and its submodules.This class implements a PEP302 finder and loader. It should be compatiblewith Python 2.5 and all existing versions of Python3"""def __init__(self, six_module_name):self.name = six_module_nameself.known_modules = {}def _add_module(self, mod, *fullnames):for fullname in fullnames:self.known_modules[self.name + "." + fullname] = moddef _get_module(self, fullname):return self.known_modules[self.name + "." + fullname]def find_module(self, fullname, path=None):if fullname in self.known_modules:return selfreturn Nonedef __get_module(self, fullname):try:return self.known_modules[fullname]except KeyError:raise ImportError("This loader does not know module " + fullname)def load_module(self, fullname):try:# in case of a reloadreturn sys.modules[fullname]except KeyError:passmod = self.__get_module(fullname)if isinstance(mod, MovedModule):mod = mod._resolve()else:mod.__loader__ = selfsys.modules[fullname] = modreturn moddef is_package(self, fullname):"""Return true, if the named module is a package.We need this method to get correct spec objects withPython 3.4 (see PEP451)"""return hasattr(self.__get_module(fullname), "__path__")def get_code(self, fullname):"""Return NoneRequired, if is_package is implemented"""self.__get_module(fullname) # eventually raises ImportErrorreturn Noneget_source = get_code # same as get_code_importer = _SixMetaPathImporter(__name__)class _MovedItems(_LazyModule):"""Lazy loading of moved objects"""__path__ = [] # mark as package_moved_attributes = [MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),MovedAttribute("intern", "__builtin__", "sys"),MovedAttribute("map", "itertools", "builtins", "imap", "map"),MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),MovedAttribute("getoutput", "commands", "subprocess"),MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),MovedAttribute("reduce", "__builtin__", "functools"),MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),MovedAttribute("StringIO", "StringIO", "io"),MovedAttribute("UserDict", "UserDict", "collections"),MovedAttribute("UserList", "UserList", "collections"),MovedAttribute("UserString", "UserString", "collections"),MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),MovedModule("builtins", "__builtin__"),MovedModule("configparser", "ConfigParser"),MovedModule("copyreg", "copy_reg"),MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),MovedModule("http_cookies", "Cookie", "http.cookies"),MovedModule("html_entities", "htmlentitydefs", "html.entities"),MovedModule("html_parser", "HTMLParser", "html.parser"),MovedModule("http_client", "httplib", "http.client"),MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),MovedModule("cPickle", "cPickle", "pickle"),MovedModule("queue", "Queue"),MovedModule("reprlib", "repr"),MovedModule("socketserver", "SocketServer"),MovedModule("_thread", "thread", "_thread"),MovedModule("tkinter", "Tkinter"),MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),MovedModule("tkinter_tix", "Tix", "tkinter.tix"),MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),MovedModule("tkinter_colorchooser", "tkColorChooser","tkinter.colorchooser"),MovedModule("tkinter_commondialog", "tkCommonDialog","tkinter.commondialog"),MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),MovedModule("tkinter_font", "tkFont", "tkinter.font"),MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),MovedModule("tkinter_tksimpledialog", "tkSimpleDialog","tkinter.simpledialog"),MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),]# Add windows specific modules.if sys.platform == "win32":_moved_attributes += [MovedModule("winreg", "_winreg"),]for attr in _moved_attributes:setattr(_MovedItems, attr.name, attr)if isinstance(attr, MovedModule):_importer._add_module(attr, "moves." + attr.name)del attr_MovedItems._moved_attributes = _moved_attributesmoves = _MovedItems(__name__ + ".moves")_importer._add_module(moves, "moves")class Module_six_moves_urllib_parse(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_parse"""_urllib_parse_moved_attributes = [MovedAttribute("ParseResult", "urlparse", "urllib.parse"),MovedAttribute("SplitResult", "urlparse", "urllib.parse"),MovedAttribute("parse_qs", "urlparse", "urllib.parse"),MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),MovedAttribute("urldefrag", "urlparse", "urllib.parse"),MovedAttribute("urljoin", "urlparse", "urllib.parse"),MovedAttribute("urlparse", "urlparse", "urllib.parse"),MovedAttribute("urlsplit", "urlparse", "urllib.parse"),MovedAttribute("urlunparse", "urlparse", "urllib.parse"),MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),MovedAttribute("quote", "urllib", "urllib.parse"),MovedAttribute("quote_plus", "urllib", "urllib.parse"),MovedAttribute("unquote", "urllib", "urllib.parse"),MovedAttribute("unquote_plus", "urllib", "urllib.parse"),MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),MovedAttribute("urlencode", "urllib", "urllib.parse"),MovedAttribute("splitquery", "urllib", "urllib.parse"),MovedAttribute("splittag", "urllib", "urllib.parse"),MovedAttribute("splituser", "urllib", "urllib.parse"),MovedAttribute("splitvalue", "urllib", "urllib.parse"),MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),MovedAttribute("uses_params", "urlparse", "urllib.parse"),MovedAttribute("uses_query", "urlparse", "urllib.parse"),MovedAttribute("uses_relative", "urlparse", "urllib.parse"),]for attr in _urllib_parse_moved_attributes:setattr(Module_six_moves_urllib_parse, attr.name, attr)del attrModule_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),"moves.urllib_parse", "moves.urllib.parse")class Module_six_moves_urllib_error(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_error"""_urllib_error_moved_attributes = [MovedAttribute("URLError", "urllib2", "urllib.error"),MovedAttribute("HTTPError", "urllib2", "urllib.error"),MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),]for attr in _urllib_error_moved_attributes:setattr(Module_six_moves_urllib_error, attr.name, attr)del attrModule_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),"moves.urllib_error", "moves.urllib.error")class Module_six_moves_urllib_request(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_request"""_urllib_request_moved_attributes = [MovedAttribute("urlopen", "urllib2", "urllib.request"),MovedAttribute("install_opener", "urllib2", "urllib.request"),MovedAttribute("build_opener", "urllib2", "urllib.request"),MovedAttribute("pathname2url", "urllib", "urllib.request"),MovedAttribute("url2pathname", "urllib", "urllib.request"),MovedAttribute("getproxies", "urllib", "urllib.request"),MovedAttribute("Request", "urllib2", "urllib.request"),MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),MovedAttribute("BaseHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),MovedAttribute("FileHandler", "urllib2", "urllib.request"),MovedAttribute("FTPHandler", "urllib2", "urllib.request"),MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),MovedAttribute("urlretrieve", "urllib", "urllib.request"),MovedAttribute("urlcleanup", "urllib", "urllib.request"),MovedAttribute("URLopener", "urllib", "urllib.request"),MovedAttribute("FancyURLopener", "urllib", "urllib.request"),MovedAttribute("proxy_bypass", "urllib", "urllib.request"),MovedAttribute("parse_http_list", "urllib2", "urllib.request"),MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),]for attr in _urllib_request_moved_attributes:setattr(Module_six_moves_urllib_request, attr.name, attr)del attrModule_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),"moves.urllib_request", "moves.urllib.request")class Module_six_moves_urllib_response(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_response"""_urllib_response_moved_attributes = [MovedAttribute("addbase", "urllib", "urllib.response"),MovedAttribute("addclosehook", "urllib", "urllib.response"),MovedAttribute("addinfo", "urllib", "urllib.response"),MovedAttribute("addinfourl", "urllib", "urllib.response"),]for attr in _urllib_response_moved_attributes:setattr(Module_six_moves_urllib_response, attr.name, attr)del attrModule_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),"moves.urllib_response", "moves.urllib.response")class Module_six_moves_urllib_robotparser(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_robotparser"""_urllib_robotparser_moved_attributes = [MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),]for attr in _urllib_robotparser_moved_attributes:setattr(Module_six_moves_urllib_robotparser, attr.name, attr)del attrModule_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),"moves.urllib_robotparser", "moves.urllib.robotparser")class Module_six_moves_urllib(types.ModuleType):"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""__path__ = [] # mark as packageparse = _importer._get_module("moves.urllib_parse")error = _importer._get_module("moves.urllib_error")request = _importer._get_module("moves.urllib_request")response = _importer._get_module("moves.urllib_response")robotparser = _importer._get_module("moves.urllib_robotparser")def __dir__(self):return ['parse', 'error', 'request', 'response', 'robotparser']_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),"moves.urllib")def add_move(move):"""Add an item to six.moves."""setattr(_MovedItems, move.name, move)def remove_move(name):"""Remove item from six.moves."""try:delattr(_MovedItems, name)except AttributeError:try:del moves.__dict__[name]except KeyError:raise AttributeError("no such move, %r" % (name,))if PY3:_meth_func = "__func__"_meth_self = "__self__"_func_closure = "__closure__"_func_code = "__code__"_func_defaults = "__defaults__"_func_globals = "__globals__"else:_meth_func = "im_func"_meth_self = "im_self"_func_closure = "func_closure"_func_code = "func_code"_func_defaults = "func_defaults"_func_globals = "func_globals"try:advance_iterator = nextexcept NameError:def advance_iterator(it):return it.next()next = advance_iteratortry:callable = callableexcept NameError:def callable(obj):return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)if PY3:def get_unbound_function(unbound):return unboundcreate_bound_method = types.MethodTypedef create_unbound_method(func, cls):return funcIterator = objectelse:def get_unbound_function(unbound):return unbound.im_funcdef create_bound_method(func, obj):return types.MethodType(func, obj, obj.__class__)def create_unbound_method(func, cls):return types.MethodType(func, None, cls)class Iterator(object):def next(self):return type(self).__next__(self)callable = callable_add_doc(get_unbound_function,"""Get the function out of a possibly unbound function""")get_method_function = operator.attrgetter(_meth_func)get_method_self = operator.attrgetter(_meth_self)get_function_closure = operator.attrgetter(_func_closure)get_function_code = operator.attrgetter(_func_code)get_function_defaults = operator.attrgetter(_func_defaults)get_function_globals = operator.attrgetter(_func_globals)if PY3:def iterkeys(d, **kw):return iter(d.keys(**kw))def itervalues(d, **kw):return iter(d.values(**kw))def iteritems(d, **kw):return iter(d.items(**kw))def iterlists(d, **kw):return iter(d.lists(**kw))viewkeys = operator.methodcaller("keys")viewvalues = operator.methodcaller("values")viewitems = operator.methodcaller("items")else:def iterkeys(d, **kw):return d.iterkeys(**kw)def itervalues(d, **kw):return d.itervalues(**kw)def iteritems(d, **kw):return d.iteritems(**kw)def iterlists(d, **kw):return d.iterlists(**kw)viewkeys = operator.methodcaller("viewkeys")viewvalues = operator.methodcaller("viewvalues")viewitems = operator.methodcaller("viewitems")_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")_add_doc(itervalues, "Return an iterator over the values of a dictionary.")_add_doc(iteritems,"Return an iterator over the (key, value) pairs of a dictionary.")_add_doc(iterlists,"Return an iterator over the (key, [values]) pairs of a dictionary.")if PY3:def b(s):return s.encode("latin-1")def u(s):return sunichr = chrimport structint2byte = struct.Struct(">B").packdel structbyte2int = operator.itemgetter(0)indexbytes = operator.getitemiterbytes = iterimport ioStringIO = io.StringIOBytesIO = io.BytesIO_assertCountEqual = "assertCountEqual"if sys.version_info[1] <= 1:_assertRaisesRegex = "assertRaisesRegexp"_assertRegex = "assertRegexpMatches"else:_assertRaisesRegex = "assertRaisesRegex"_assertRegex = "assertRegex"else:def b(s):return s# Workaround for standalone backslashdef u(s):return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")unichr = unichrint2byte = chrdef byte2int(bs):return ord(bs[0])def indexbytes(buf, i):return ord(buf[i])iterbytes = functools.partial(itertools.imap, ord)import StringIOStringIO = BytesIO = StringIO.StringIO_assertCountEqual = "assertItemsEqual"_assertRaisesRegex = "assertRaisesRegexp"_assertRegex = "assertRegexpMatches"_add_doc(b, """Byte literal""")_add_doc(u, """Text literal""")def assertCountEqual(self, *args, **kwargs):return getattr(self, _assertCountEqual)(*args, **kwargs)def assertRaisesRegex(self, *args, **kwargs):return getattr(self, _assertRaisesRegex)(*args, **kwargs)def assertRegex(self, *args, **kwargs):return getattr(self, _assertRegex)(*args, **kwargs)if PY3:exec_ = getattr(moves.builtins, "exec")def reraise(tp, value, tb=None):try:if value is None:value = tp()if value.__traceback__ is not tb:raise value.with_traceback(tb)raise valuefinally:value = Nonetb = Noneelse:def exec_(_code_, _globs_=None, _locs_=None):"""Execute code in a namespace."""if _globs_ is None:frame = sys._getframe(1)_globs_ = frame.f_globalsif _locs_ is None:_locs_ = frame.f_localsdel frameelif _locs_ is None:_locs_ = _globs_exec("""exec _code_ in _globs_, _locs_""")exec_("""def reraise(tp, value, tb=None):try:raise tp, value, tbfinally:tb = None""")if sys.version_info[:2] == (3, 2):exec_("""def raise_from(value, from_value):try:if from_value is None:raise valueraise value from from_valuefinally:value = None""")elif sys.version_info[:2] > (3, 2):exec_("""def raise_from(value, from_value):try:raise value from from_valuefinally:value = None""")else:def raise_from(value, from_value):raise valueprint_ = getattr(moves.builtins, "print", None)if print_ is None:def print_(*args, **kwargs):"""The new-style print function for Python 2.4 and 2.5."""fp = kwargs.pop("file", sys.stdout)if fp is None:returndef write(data):if not isinstance(data, basestring):data = str(data)# If the file has an encoding, encode unicode with it.if (isinstance(fp, file) andisinstance(data, unicode) andfp.encoding is not None):errors = getattr(fp, "errors", None)if errors is None:errors = "strict"data = data.encode(fp.encoding, errors)fp.write(data)want_unicode = Falsesep = kwargs.pop("sep", None)if sep is not None:if isinstance(sep, unicode):want_unicode = Trueelif not isinstance(sep, str):raise TypeError("sep must be None or a string")end = kwargs.pop("end", None)if end is not None:if isinstance(end, unicode):want_unicode = Trueelif not isinstance(end, str):raise TypeError("end must be None or a string")if kwargs:raise TypeError("invalid keyword arguments to print()")if not want_unicode:for arg in args:if isinstance(arg, unicode):want_unicode = Truebreakif want_unicode:newline = unicode("\n")space = unicode(" ")else:newline = "\n"space = " "if sep is None:sep = spaceif end is None:end = newlinefor i, arg in enumerate(args):if i:write(sep)write(arg)write(end)if sys.version_info[:2] < (3, 3):_print = print_def print_(*args, **kwargs):fp = kwargs.get("file", sys.stdout)flush = kwargs.pop("flush", False)_print(*args, **kwargs)if flush and fp is not None:fp.flush()_add_doc(reraise, """Reraise an exception.""")if sys.version_info[0:2] < (3, 4):def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,updated=functools.WRAPPER_UPDATES):def wrapper(f):f = functools.wraps(wrapped, assigned, updated)(f)f.__wrapped__ = wrappedreturn freturn wrapperelse:wraps = functools.wrapsdef with_metaclass(meta, *bases):"""Create a base class with a metaclass."""# This requires a bit of explanation: the basic idea is to make a dummy# metaclass for one level of class instantiation that replaces itself with# the actual metaclass.class metaclass(type):def __new__(cls, name, this_bases, d):return meta(name, bases, d)@classmethoddef __prepare__(cls, name, this_bases):return meta.__prepare__(name, bases)return type.__new__(metaclass, 'temporary_class', (), {})def add_metaclass(metaclass):"""Class decorator for creating a class with a metaclass."""def wrapper(cls):orig_vars = cls.__dict__.copy()slots = orig_vars.get('__slots__')if slots is not None:if isinstance(slots, str):slots = [slots]for slots_var in slots:orig_vars.pop(slots_var)orig_vars.pop('__dict__', None)orig_vars.pop('__weakref__', None)return metaclass(cls.__name__, cls.__bases__, orig_vars)return wrapperdef python_2_unicode_compatible(klass):"""A decorator that defines __unicode__ and __str__ methods under Python 2.Under Python 3 it does nothing.To support Python 2 and 3 with a single code base, define a __str__ methodreturning text and apply this decorator to the class."""if PY2:if '__str__' not in klass.__dict__:raise ValueError("@python_2_unicode_compatible cannot be applied ""to %s because it doesn't define __str__()." %klass.__name__)klass.__unicode__ = klass.__str__klass.__str__ = lambda self: self.__unicode__().encode('utf-8')return klass# Complete the moves implementation.# This code is at the end of this module to speed up module loading.# Turn this module into a package.__path__ = [] # required for PEP 302 and PEP 451__package__ = __name__ # see PEP 366 @ReservedAssignmentif globals().get("__spec__") is not None:__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable# Remove other six meta path importers, since they cause problems. This can# happen if six is removed from sys.modules and then reloaded. (Setuptools does# this for some reason.)if sys.meta_path:for i, importer in enumerate(sys.meta_path):# Here's some real nastiness: Another "instance" of the six module might# be floating around. Therefore, we can't use isinstance() to check for# the six meta path importer, since the other six instance will have# inserted an importer with different class.if (type(importer).__name__ == "_SixMetaPathImporter" andimporter.name == __name__):del sys.meta_path[i]breakdel i, importer# Finally, add the importer to the meta path import hook.sys.meta_path.append(_importer)
six
{"classifiers": ["Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Libraries", "Topic :: Utilities"], "extensions": {"python.details": {"contacts": [{"email": "benjamin@python.org", "name": "Benjamin Peterson", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://pypi.python.org/pypi/six/"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT", "metadata_version": "2.0", "name": "six", "summary": "Python 2 and 3 compatibility utilities", "test_requires": [{"requires": ["pytest"]}], "version": "1.11.0"}
Wheel-Version: 1.0Generator: bdist_wheel (0.29.0)Root-Is-Purelib: trueTag: py2-none-anyTag: py3-none-any
six.py,sha256=A08MPb-Gi9FfInI3IW7HimXFmEH2T2IPzHgDvdhZPRA,30888six-1.11.0.dist-info/DESCRIPTION.rst,sha256=gPBoq1Ruc1QDWyLeXPlieL3F-XZz1_WXB-5gctCfg-A,1098six-1.11.0.dist-info/METADATA,sha256=06nZXaDYN3vnC-pmUjhkECYFH_a--ywvcPIpUdNeH1o,1607six-1.11.0.dist-info/RECORD,,six-1.11.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110six-1.11.0.dist-info/metadata.json,sha256=ac3f4f7MpSHSnZ1SqhHCwsL7FGWMG0gBEb0hhS2eSSM,703six-1.11.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4six-1.11.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4six.pyc,,
Metadata-Version: 2.0Name: sixVersion: 1.11.0Summary: Python 2 and 3 compatibility utilitiesHome-page: http://pypi.python.org/pypi/six/Author: Benjamin PetersonAuthor-email: benjamin@python.orgLicense: MITPlatform: UNKNOWNClassifier: Programming Language :: Python :: 2Classifier: Programming Language :: Python :: 3Classifier: Intended Audience :: DevelopersClassifier: License :: OSI Approved :: MIT LicenseClassifier: Topic :: Software Development :: LibrariesClassifier: Topic :: Utilities.. image:: http://img.shields.io/pypi/v/six.svg:target: https://pypi.python.org/pypi/six.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master:target: https://travis-ci.org/benjaminp/six.. image:: http://img.shields.io/badge/license-MIT-green.svg:target: https://github.com/benjaminp/six/blob/master/LICENSESix is a Python 2 and 3 compatibility library. It provides utility functionsfor smoothing over the differences between the Python versions with the goal ofwriting Python code that is compatible on both Python versions. See thedocumentation for more information on what is provided.Six supports every Python version since 2.6. It is contained in only one Pythonfile, so it can be easily copied into your project. (The copyright and licensenotice must be retained.)Online documentation is at http://six.rtfd.org.Bugs can be reported to https://github.com/benjaminp/six. The code can alsobe found there.For questions about six or porting in general, email the python-porting mailinglist: https://mail.python.org/mailman/listinfo/python-porting
pip
.. image:: http://img.shields.io/pypi/v/six.svg:target: https://pypi.python.org/pypi/six.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master:target: https://travis-ci.org/benjaminp/six.. image:: http://img.shields.io/badge/license-MIT-green.svg:target: https://github.com/benjaminp/six/blob/master/LICENSESix is a Python 2 and 3 compatibility library. It provides utility functionsfor smoothing over the differences between the Python versions with the goal ofwriting Python code that is compatible on both Python versions. See thedocumentation for more information on what is provided.Six supports every Python version since 2.6. It is contained in only one Pythonfile, so it can be easily copied into your project. (The copyright and licensenotice must be retained.)Online documentation is at http://six.rtfd.org.Bugs can be reported to https://github.com/benjaminp/six. The code can alsobe found there.For questions about six or porting in general, email the python-porting mailinglist: https://mail.python.org/mailman/listinfo/python-porting
easy_installpkg_resourcessetuptools
{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Archiving :: Packaging", "Topic :: System :: Systems Administration", "Topic :: Utilities"], "description_content_type": "text/x-rst; charset=UTF-8", "extensions": {"python.commands": {"wrap_console": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.6": "setuptools.command.easy_install:main"}}, "python.details": {"contacts": [{"email": "distutils-sig@python.org", "name": "Python Packaging Authority", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://github.com/pypa/setuptools"}}, "python.exports": {"console_scripts": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.6": "setuptools.command.easy_install:main"}, "distutils.commands": {"alias": "setuptools.command.alias:alias", "bdist_egg": "setuptools.command.bdist_egg:bdist_egg", "bdist_rpm": "setuptools.command.bdist_rpm:bdist_rpm", "bdist_wininst": "setuptools.command.bdist_wininst:bdist_wininst", "build_clib": "setuptools.command.build_clib:build_clib", "build_ext": "setuptools.command.build_ext:build_ext", "build_py": "setuptools.command.build_py:build_py", "develop": "setuptools.command.develop:develop", "dist_info": "setuptools.command.dist_info:dist_info", "easy_install": "setuptools.command.easy_install:easy_install", "egg_info": "setuptools.command.egg_info:egg_info", "install": "setuptools.command.install:install", "install_egg_info": "setuptools.command.install_egg_info:install_egg_info", "install_lib": "setuptools.command.install_lib:install_lib", "install_scripts": "setuptools.command.install_scripts:install_scripts", "register": "setuptools.command.register:register", "rotate": "setuptools.command.rotate:rotate", "saveopts": "setuptools.command.saveopts:saveopts", "sdist": "setuptools.command.sdist:sdist", "setopt": "setuptools.command.setopt:setopt", "test": "setuptools.command.test:test", "upload": "setuptools.command.upload:upload", "upload_docs": "setuptools.command.upload_docs:upload_docs"}, "distutils.setup_keywords": {"convert_2to3_doctests": "setuptools.dist:assert_string_list", "dependency_links": "setuptools.dist:assert_string_list", "eager_resources": "setuptools.dist:assert_string_list", "entry_points": "setuptools.dist:check_entry_points", "exclude_package_data": "setuptools.dist:check_package_data", "extras_require": "setuptools.dist:check_extras", "include_package_data": "setuptools.dist:assert_bool", "install_requires": "setuptools.dist:check_requirements", "namespace_packages": "setuptools.dist:check_nsp", "package_data": "setuptools.dist:check_package_data", "packages": "setuptools.dist:check_packages", "python_requires": "setuptools.dist:check_specifier", "setup_requires": "setuptools.dist:check_requirements", "test_loader": "setuptools.dist:check_importable", "test_runner": "setuptools.dist:check_importable", "test_suite": "setuptools.dist:check_test_suite", "tests_require": "setuptools.dist:check_requirements", "use_2to3": "setuptools.dist:assert_bool", "use_2to3_exclude_fixers": "setuptools.dist:assert_string_list", "use_2to3_fixers": "setuptools.dist:assert_string_list", "zip_safe": "setuptools.dist:assert_bool"}, "egg_info.writers": {"PKG-INFO": "setuptools.command.egg_info:write_pkg_info", "dependency_links.txt": "setuptools.command.egg_info:overwrite_arg", "depends.txt": "setuptools.command.egg_info:warn_depends_obsolete", "eager_resources.txt": "setuptools.command.egg_info:overwrite_arg", "entry_points.txt": "setuptools.command.egg_info:write_entries", "namespace_packages.txt": "setuptools.command.egg_info:overwrite_arg", "requires.txt": "setuptools.command.egg_info:write_requirements", "top_level.txt": "setuptools.command.egg_info:write_toplevel_names"}, "setuptools.installation": {"eggsecutable": "setuptools.command.easy_install:bootstrap"}}}, "extras": ["certs", "ssl"], "generator": "bdist_wheel (0.30.0)", "keywords": ["CPAN", "PyPI", "distutils", "eggs", "package", "management"], "metadata_version": "2.0", "name": "setuptools", "requires_python": ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*", "run_requires": [{"extra": "certs", "requires": ["certifi (==2016.9.26)"]}, {"environment": "sys_platform=='win32'", "extra": "ssl", "requires": ["wincertstore (==0.2)"]}], "summary": "Easily download, build, install, upgrade, and uninstall Python packages", "version": "38.2.3"}
[console_scripts]easy_install = setuptools.command.easy_install:maineasy_install-3.6 = setuptools.command.easy_install:main[distutils.commands]alias = setuptools.command.alias:aliasbdist_egg = setuptools.command.bdist_egg:bdist_eggbdist_rpm = setuptools.command.bdist_rpm:bdist_rpmbdist_wininst = setuptools.command.bdist_wininst:bdist_wininstbuild_clib = setuptools.command.build_clib:build_clibbuild_ext = setuptools.command.build_ext:build_extbuild_py = setuptools.command.build_py:build_pydevelop = setuptools.command.develop:developdist_info = setuptools.command.dist_info:dist_infoeasy_install = setuptools.command.easy_install:easy_installegg_info = setuptools.command.egg_info:egg_infoinstall = setuptools.command.install:installinstall_egg_info = setuptools.command.install_egg_info:install_egg_infoinstall_lib = setuptools.command.install_lib:install_libinstall_scripts = setuptools.command.install_scripts:install_scriptsregister = setuptools.command.register:registerrotate = setuptools.command.rotate:rotatesaveopts = setuptools.command.saveopts:saveoptssdist = setuptools.command.sdist:sdistsetopt = setuptools.command.setopt:setopttest = setuptools.command.test:testupload = setuptools.command.upload:uploadupload_docs = setuptools.command.upload_docs:upload_docs[distutils.setup_keywords]convert_2to3_doctests = setuptools.dist:assert_string_listdependency_links = setuptools.dist:assert_string_listeager_resources = setuptools.dist:assert_string_listentry_points = setuptools.dist:check_entry_pointsexclude_package_data = setuptools.dist:check_package_dataextras_require = setuptools.dist:check_extrasinclude_package_data = setuptools.dist:assert_boolinstall_requires = setuptools.dist:check_requirementsnamespace_packages = setuptools.dist:check_nsppackage_data = setuptools.dist:check_package_datapackages = setuptools.dist:check_packagespython_requires = setuptools.dist:check_specifiersetup_requires = setuptools.dist:check_requirementstest_loader = setuptools.dist:check_importabletest_runner = setuptools.dist:check_importabletest_suite = setuptools.dist:check_test_suitetests_require = setuptools.dist:check_requirementsuse_2to3 = setuptools.dist:assert_booluse_2to3_exclude_fixers = setuptools.dist:assert_string_listuse_2to3_fixers = setuptools.dist:assert_string_listzip_safe = setuptools.dist:assert_bool[egg_info.writers]PKG-INFO = setuptools.command.egg_info:write_pkg_infodependency_links.txt = setuptools.command.egg_info:overwrite_argdepends.txt = setuptools.command.egg_info:warn_depends_obsoleteeager_resources.txt = setuptools.command.egg_info:overwrite_argentry_points.txt = setuptools.command.egg_info:write_entriesnamespace_packages.txt = setuptools.command.egg_info:overwrite_argrequires.txt = setuptools.command.egg_info:write_requirementstop_level.txt = setuptools.command.egg_info:write_toplevel_names[setuptools.installation]eggsecutable = setuptools.command.easy_install:bootstrap
https://files.pythonhosted.org/packages/source/c/certifi/certifi-2016.9.26.tar.gz#md5=baa81e951a29958563689d868ef1064dhttps://files.pythonhosted.org/packages/source/w/wincertstore/wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2
Wheel-Version: 1.0Generator: bdist_wheel (0.30.0)Root-Is-Purelib: trueTag: py2-none-anyTag: py3-none-any
easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126pkg_resources/__init__.py,sha256=4QY15P8cS_OAzbXIytmbvGknvX7vdiiomG4YhbQRZCI,105991pkg_resources/py31compat.py,sha256=-ysVqoxLetAnL94uM0kHkomKQTC1JZLN2ZUjqUhMeKE,600pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0pkg_resources/_vendor/appdirs.py,sha256=tgGaL0m4Jo2VeuGfoOOifLv7a7oUEJu2n1vRkqoPw-0,22374pkg_resources/_vendor/pyparsing.py,sha256=PifeLY3-WhIcBVzLtv0U4T_pwDtPruBhBCkg5vLqa28,229867pkg_resources/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098pkg_resources/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720pkg_resources/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513pkg_resources/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860pkg_resources/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416pkg_resources/_vendor/packaging/markers.py,sha256=uEcBBtGvzqltgnArqb9c4RrcInXezDLos14zbBHhWJo,8248pkg_resources/_vendor/packaging/requirements.py,sha256=SikL2UynbsT0qtY9ltqngndha_sfo0w6XGFhAhoSoaQ,4355pkg_resources/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025pkg_resources/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421pkg_resources/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556pkg_resources/extern/__init__.py,sha256=JUtlHHvlxHSNuB4pWqNjcx7n6kG-fwXg7qmJ2zNJlIY,2487setuptools/__init__.py,sha256=WWIdCbFJnZ9fZoaWDN_x1vDA_Rkm-Sc15iKvPtIYKFs,5700setuptools/archive_util.py,sha256=kw8Ib_lKjCcnPKNbS7h8HztRVK0d5RacU3r_KRdVnmM,6592setuptools/build_meta.py,sha256=FllaKTr1vSJyiUeRjVJEZmeEaRzhYueNlimtcwaJba8,5671setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536setuptools/config.py,sha256=ZTCgAC18m4xXxufllfGxi-XHvFg6gj_jGpisRjp0KCw,16305setuptools/dep_util.py,sha256=fgixvC1R7sH3r13ktyf7N0FALoqEXL1cBarmNpSEoWg,935setuptools/depends.py,sha256=hC8QIDcM3VDpRXvRVA6OfL9AaQfxvhxHcN_w6sAyNq8,5837setuptools/dist.py,sha256=lx5Errn7nal__wuH-9YIpGFFZGNYnshcEMDZtHBPprk,40336setuptools/extension.py,sha256=uc6nHI-MxwmNCNPbUiBnybSyqhpJqjbhvOQ-emdvt_E,1729setuptools/glibc.py,sha256=X64VvGPL2AbURKwYRsWJOXXGAYOiF_v2qixeTkAULuU,3146setuptools/glob.py,sha256=Y-fpv8wdHZzv9DPCaGACpMSBWJ6amq_1e0R_i8_el4w,5207setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536setuptools/launch.py,sha256=sd7ejwhBocCDx_wG9rIs0OaZ8HtmmFU8ZC6IR_S0Lvg,787setuptools/lib2to3_ex.py,sha256=t5e12hbR2pi9V4ezWDTB4JM-AISUnGOkmcnYHek3xjg,2013setuptools/monkey.py,sha256=zZGTH7p0xeXQKLmEwJTPIE4m5m7fJeHoAsxyv5M8e_E,5789setuptools/msvc.py,sha256=AEbWNLJ0pTuHJSkQuBZET6wr_d2-yGGPkdHCMdIKWB4,40884setuptools/namespaces.py,sha256=F0Nrbv8KCT2OrO7rwa03om4N4GZKAlnce-rr-cgDQa8,3199setuptools/package_index.py,sha256=B7g7NpCYk_bEjjeqBm3NbqBkbJhh-mnres3l1V2hdlw,40507setuptools/pep425tags.py,sha256=NuGMx1gGif7x6iYemh0LfgBr_FZF5GFORIbgmMdU8J4,10882setuptools/py27compat.py,sha256=3mwxRMDk5Q5O1rSXOERbQDXhFqwDJhhUitfMW_qpUCo,536setuptools/py31compat.py,sha256=XuU1HCsGE_3zGvBRIhYw2iB-IhCFK4-Pxw_jMiqdNVk,1192setuptools/py33compat.py,sha256=W8_JFZr8WQbJT_7-JFWjc_6lHGtoMK-4pCrHIwk5JN0,998setuptools/py36compat.py,sha256=VUDWxmu5rt4QHlGTRtAFu6W5jvfL6WBjeDAzeoBy0OM,2891setuptools/sandbox.py,sha256=9UbwfEL5QY436oMI1LtFWohhoZ-UzwHvGyZjUH_qhkw,14276setuptools/script (dev).tmpl,sha256=f7MR17dTkzaqkCMSVseyOCMVrPVSMdmTQsaB8cZzfuI,201setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138setuptools/site-patch.py,sha256=BVt6yIrDMXJoflA5J6DJIcsJUfW_XEeVhOzelTTFDP4,2307setuptools/ssl_support.py,sha256=YBDJsCZjSp62CWjxmSkke9kn9rhHHj25Cus6zhJRW3c,8492setuptools/unicode_utils.py,sha256=NOiZ_5hD72A6w-4wVj8awHFM3n51Kmw1Ic_vx15XFqw,996setuptools/version.py,sha256=og_cuZQb0QI6ukKZFfZWPlr1HgJBPPn2vO2m_bI9ZTE,144setuptools/wheel.py,sha256=5JtH1AEmxOHRxCGYbuacsciTZdQGZwlR-SukKzMy8kQ,6498setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714setuptools/command/__init__.py,sha256=NWzJ0A1BEengZpVeqUyWLNm2bk4P3F4iL5QUErHy7kA,594setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426setuptools/command/bdist_egg.py,sha256=dh88W8pMHUJPHlLT-XnPvyyDcmMfEbI2jK1E3w1EHKc,18011setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637setuptools/command/build_clib.py,sha256=bQ9aBr-5ZSO-9fGsGsDLz0mnnFteHUZnftVLkhvHDq0,4484setuptools/command/build_ext.py,sha256=dO89j-IC0dAjSty1sSZxvi0LSdkPGR_ZPXFuAAFDZj4,13049setuptools/command/build_py.py,sha256=yWyYaaS9F3o9JbIczn064A5g1C5_UiKRDxGaTqYbtLE,9596setuptools/command/develop.py,sha256=wKbOw2_qUvcDti2lZmtxbDmYb54yAAibExzXIvToz-A,8046setuptools/command/dist_info.py,sha256=5t6kOfrdgALT-P3ogss6PF9k-Leyesueycuk3dUyZnI,960setuptools/command/easy_install.py,sha256=YAIsUhzmw_shvvoDP8EesDS2xbFH7tD4MuYE8X3x6Yc,87125setuptools/command/egg_info.py,sha256=DLR5_Xr-gRqxQwJcyVCkUrfVZZriMuQSWoQGJ3XL7T8,24944setuptools/command/install.py,sha256=a0EZpL_A866KEdhicTGbuyD_TYl1sykfzdrri-zazT4,4683setuptools/command/install_egg_info.py,sha256=bMgeIeRiXzQ4DAGPV1328kcjwQjHjOWU4FngAWLV78Q,2203setuptools/command/install_lib.py,sha256=11mxf0Ch12NsuYwS8PHwXBRvyh671QAM4cTRh7epzG0,3840setuptools/command/install_scripts.py,sha256=UD0rEZ6861mTYhIdzcsqKnUl8PozocXWl9VBQ1VTWnc,2439setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628setuptools/command/py36compat.py,sha256=SzjZcOxF7zdFUT47Zv2n7AM3H8koDys_0OpS-n9gIfc,4986setuptools/command/register.py,sha256=bHlMm1qmBbSdahTOT8w6UhA-EgeQIz7p6cD-qOauaiI,270setuptools/command/rotate.py,sha256=co5C1EkI7P0GGT6Tqz-T2SIj2LBJTZXYELpmao6d4KQ,2164setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658setuptools/command/sdist.py,sha256=obDTe2BmWt2PlnFPZZh7e0LWvemEsbCCO9MzhrTZjm8,6711setuptools/command/setopt.py,sha256=NTWDyx-gjDF-txf4dO577s7LOzHVoKR0Mq33rFxaRr8,5085setuptools/command/test.py,sha256=MeBAcXUePGjPKqjz4zvTrHatLvNsjlPFcagt3XnFYdk,9214setuptools/command/upload.py,sha256=i1gfItZ3nQOn5FKXb8tLC2Kd7eKC8lWO4bdE6NqGpE4,1172setuptools/command/upload_docs.py,sha256=oXiGplM_cUKLwE4CWWw98RzCufAu8tBhMC97GegFcms,7311setuptools/extern/__init__.py,sha256=ZtCLYQ8JTtOtm7SYoxekZw-UzY3TR50SRIUaeqr2ROk,131setuptools-38.2.3.dist-info/DESCRIPTION.rst,sha256=It3a3GRjT5701mqhrpMcLyW_YS2Dokv-X8zWoTaMRe0,1422setuptools-38.2.3.dist-info/LICENSE.txt,sha256=wyo6w5WvYyHv0ovnPQagDw22q4h9HCHU_sRhKNIFbVo,1078setuptools-38.2.3.dist-info/METADATA,sha256=qXybnHfSR9Odp8qE472M7yQLnfdYYO0Ojw2lv7LXM2w,2798setuptools-38.2.3.dist-info/RECORD,,setuptools-38.2.3.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110setuptools-38.2.3.dist-info/dependency_links.txt,sha256=HlkCFkoK5TbZ5EMLbLKYhLcY_E31kBWD8TqW2EgmatQ,239setuptools-38.2.3.dist-info/entry_points.txt,sha256=jBqCYDlVjl__sjYFGXo1JQGIMAYFJE-prYWUtnMZEew,2990setuptools-38.2.3.dist-info/metadata.json,sha256=AUsqayPgFKG0QIiwMMAYXfpXnSsbnh2OKzGq_KCh9oU,4901setuptools-38.2.3.dist-info/top_level.txt,sha256=2HUXVVwA4Pff1xgTFr3GsTXXKaPaO6vlG6oNJ_4u4Tg,38setuptools-38.2.3.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1../../../bin/easy_install,sha256=5J3C-ybFYhOj3Zp5ZtmNy-6jtR9DjqDlWc1v6dF70zc,307../../../bin/easy_install-2.7,sha256=5J3C-ybFYhOj3Zp5ZtmNy-6jtR9DjqDlWc1v6dF70zc,307setuptools-38.2.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4setuptools/ssl_support.pyc,,setuptools/extern/__init__.pyc,,setuptools/command/install_scripts.pyc,,setuptools/extension.pyc,,pkg_resources/_vendor/packaging/_compat.pyc,,setuptools/msvc.pyc,,setuptools/command/dist_info.pyc,,setuptools/command/bdist_wininst.pyc,,setuptools/command/upload.pyc,,pkg_resources/_vendor/appdirs.pyc,,pkg_resources/py31compat.pyc,,pkg_resources/_vendor/packaging/_structures.pyc,,setuptools/wheel.pyc,,setuptools/namespaces.pyc,,pkg_resources/_vendor/six.pyc,,setuptools/depends.pyc,,setuptools/command/install_egg_info.pyc,,setuptools/site-patch.pyc,,setuptools/build_meta.pyc,,setuptools/windows_support.pyc,,setuptools/command/setopt.pyc,,setuptools/unicode_utils.pyc,,setuptools/command/bdist_egg.pyc,,setuptools/py31compat.pyc,,setuptools/dep_util.pyc,,setuptools/command/sdist.pyc,,setuptools/command/saveopts.pyc,,pkg_resources/_vendor/packaging/version.pyc,,setuptools/command/egg_info.pyc,,pkg_resources/_vendor/packaging/__init__.pyc,,setuptools/pep425tags.pyc,,setuptools/command/install.pyc,,setuptools/command/alias.pyc,,setuptools/__init__.pyc,,setuptools/command/easy_install.pyc,,setuptools/py27compat.pyc,,pkg_resources/extern/__init__.pyc,,setuptools/command/build_py.pyc,,setuptools/command/test.pyc,,setuptools/command/build_ext.pyc,,setuptools/version.pyc,,setuptools/command/py36compat.pyc,,setuptools/glibc.pyc,,setuptools/dist.pyc,,setuptools/command/bdist_rpm.pyc,,pkg_resources/__init__.pyc,,setuptools/command/__init__.pyc,,setuptools/py33compat.pyc,,setuptools/archive_util.pyc,,pkg_resources/_vendor/packaging/__about__.pyc,,setuptools/command/upload_docs.pyc,,setuptools/py36compat.pyc,,pkg_resources/_vendor/packaging/markers.pyc,,setuptools/command/install_lib.pyc,,pkg_resources/_vendor/packaging/specifiers.pyc,,setuptools/lib2to3_ex.pyc,,setuptools/sandbox.pyc,,setuptools/command/develop.pyc,,pkg_resources/_vendor/packaging/requirements.pyc,,pkg_resources/_vendor/pyparsing.pyc,,setuptools/glob.pyc,,setuptools/command/rotate.pyc,,setuptools/config.pyc,,setuptools/command/build_clib.pyc,,easy_install.pyc,,setuptools/package_index.pyc,,pkg_resources/_vendor/packaging/utils.pyc,,pkg_resources/_vendor/__init__.pyc,,setuptools/monkey.pyc,,setuptools/launch.pyc,,setuptools/command/register.pyc,,
Metadata-Version: 2.0Name: setuptoolsVersion: 38.2.3Summary: Easily download, build, install, upgrade, and uninstall Python packagesHome-page: https://github.com/pypa/setuptoolsAuthor: Python Packaging AuthorityAuthor-email: distutils-sig@python.orgLicense: UNKNOWNDescription-Content-Type: text/x-rst; charset=UTF-8Keywords: CPAN PyPI distutils eggs package managementPlatform: UNKNOWNClassifier: Development Status :: 5 - Production/StableClassifier: Intended Audience :: DevelopersClassifier: License :: OSI Approved :: MIT LicenseClassifier: Operating System :: OS IndependentClassifier: Programming Language :: Python :: 2Classifier: Programming Language :: Python :: 2.7Classifier: Programming Language :: Python :: 3Classifier: Programming Language :: Python :: 3.3Classifier: Programming Language :: Python :: 3.4Classifier: Programming Language :: Python :: 3.5Classifier: Programming Language :: Python :: 3.6Classifier: Topic :: Software Development :: Libraries :: Python ModulesClassifier: Topic :: System :: Archiving :: PackagingClassifier: Topic :: System :: Systems AdministrationClassifier: Topic :: UtilitiesRequires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*Provides-Extra: certsRequires-Dist: certifi (==2016.9.26); extra == 'certs'Provides-Extra: sslRequires-Dist: wincertstore (==0.2); sys_platform=='win32' and extra == 'ssl'.. image:: https://img.shields.io/pypi/v/setuptools.svg:target: https://pypi.org/project/setuptools.. image:: https://readthedocs.org/projects/setuptools/badge/?version=latest:target: https://setuptools.readthedocs.io.. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20build%20%40%20Travis%20CI:target: https://travis-ci.org/pypa/setuptools.. image:: https://img.shields.io/appveyor/ci/jaraco/setuptools/master.svg?label=Windows%20build%20%40%20Appveyor:target: https://ci.appveyor.com/project/jaraco/setuptools/branch/master.. image:: https://img.shields.io/pypi/pyversions/setuptools.svgSee the `Installation Instructions<https://packaging.python.org/installing/>`_ in the Python PackagingUser's Guide for instructions on installing, upgrading, and uninstallingSetuptools.The project is `maintained at GitHub <https://github.com/pypa/setuptools>`_.Questions and comments should be directed to the `distutils-sigmailing list <http://mail.python.org/pipermail/distutils-sig/>`_.Bug reports and especially tested patches may besubmitted directly to the `bug tracker<https://github.com/pypa/setuptools/issues>`_.Code of Conduct---------------Everyone interacting in the setuptools project's codebases, issue trackers,chat rooms, and mailing lists is expected to follow the`PyPA Code of Conduct <https://www.pypa.io/en/latest/code-of-conduct/>`_.
Copyright (C) 2016 Jason R Coombs <jaraco@jaraco.com>Permission is hereby granted, free of charge, to any person obtaining a copy ofthis software and associated documentation files (the "Software"), to deal inthe Software without restriction, including without limitation the rights touse, copy, modify, merge, publish, distribute, sublicense, and/or sell copiesof the Software, and to permit persons to whom the Software is furnished to doso, subject to the following conditions:The above copyright notice and this permission notice shall be included in allcopies or substantial portions of the Software.THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS ORIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THEAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHERLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THESOFTWARE.
pip
.. image:: https://img.shields.io/pypi/v/setuptools.svg:target: https://pypi.org/project/setuptools.. image:: https://readthedocs.org/projects/setuptools/badge/?version=latest:target: https://setuptools.readthedocs.io.. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20build%20%40%20Travis%20CI:target: https://travis-ci.org/pypa/setuptools.. image:: https://img.shields.io/appveyor/ci/jaraco/setuptools/master.svg?label=Windows%20build%20%40%20Appveyor:target: https://ci.appveyor.com/project/jaraco/setuptools/branch/master.. image:: https://img.shields.io/pypi/pyversions/setuptools.svgSee the `Installation Instructions<https://packaging.python.org/installing/>`_ in the Python PackagingUser's Guide for instructions on installing, upgrading, and uninstallingSetuptools.The project is `maintained at GitHub <https://github.com/pypa/setuptools>`_.Questions and comments should be directed to the `distutils-sigmailing list <http://mail.python.org/pipermail/distutils-sig/>`_.Bug reports and especially tested patches may besubmitted directly to the `bug tracker<https://github.com/pypa/setuptools/issues>`_.Code of Conduct---------------Everyone interacting in the setuptools project's codebases, issue trackers,chat rooms, and mailing lists is expected to follow the`PyPA Code of Conduct <https://www.pypa.io/en/latest/code-of-conduct/>`_.
import platformimport ctypesdef windows_only(func):if platform.system() != 'Windows':return lambda *args, **kwargs: Nonereturn func@windows_onlydef hide_file(path):"""Set the hidden attribute on a file or directory.From http://stackoverflow.com/questions/19622133/`path` must be text."""__import__('ctypes.wintypes')SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesWSetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORDSetFileAttributes.restype = ctypes.wintypes.BOOLFILE_ATTRIBUTE_HIDDEN = 0x02ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN)if not ret:raise ctypes.WinError()
'''Wheels support.'''from distutils.util import get_platformimport emailimport itertoolsimport osimport reimport zipfilefrom pkg_resources import Distribution, PathMetadata, parse_versionfrom pkg_resources.extern.six import PY3from setuptools import Distribution as SetuptoolsDistributionfrom setuptools import pep425tagsfrom setuptools.command.egg_info import write_requirementsWHEEL_NAME = re.compile(r"""^(?P<project_name>.+?)-(?P<version>\d.*?)((-(?P<build>\d.*?))?-(?P<py_version>.+?)-(?P<abi>.+?)-(?P<platform>.+?))\.whl$""",re.VERBOSE).matchNAMESPACE_PACKAGE_INIT = '''\try:__import__('pkg_resources').declare_namespace(__name__)except ImportError:__path__ = __import__('pkgutil').extend_path(__path__, __name__)'''class Wheel(object):def __init__(self, filename):match = WHEEL_NAME(os.path.basename(filename))if match is None:raise ValueError('invalid wheel name: %r' % filename)self.filename = filenamefor k, v in match.groupdict().items():setattr(self, k, v)def tags(self):'''List tags (py_version, abi, platform) supported by this wheel.'''return itertools.product(self.py_version.split('.'),self.abi.split('.'),self.platform.split('.'))def is_compatible(self):'''Is the wheel is compatible with the current platform?'''supported_tags = pep425tags.get_supported()return next((True for t in self.tags() if t in supported_tags), False)def egg_name(self):return Distribution(project_name=self.project_name, version=self.version,platform=(None if self.platform == 'any' else get_platform()),).egg_name() + '.egg'def install_as_egg(self, destination_eggdir):'''Install wheel as an egg directory.'''with zipfile.ZipFile(self.filename) as zf:dist_basename = '%s-%s' % (self.project_name, self.version)dist_info = '%s.dist-info' % dist_basenamedist_data = '%s.data' % dist_basenamedef get_metadata(name):with zf.open('%s/%s' % (dist_info, name)) as fp:value = fp.read().decode('utf-8') if PY3 else fp.read()return email.parser.Parser().parsestr(value)wheel_metadata = get_metadata('WHEEL')dist_metadata = get_metadata('METADATA')# Check wheel format version is supported.wheel_version = parse_version(wheel_metadata.get('Wheel-Version'))if not parse_version('1.0') <= wheel_version < parse_version('2.0dev0'):raise ValueError('unsupported wheel format version: %s' % wheel_version)# Extract to target directory.os.mkdir(destination_eggdir)zf.extractall(destination_eggdir)# Convert metadata.dist_info = os.path.join(destination_eggdir, dist_info)dist = Distribution.from_location(destination_eggdir, dist_info,metadata=PathMetadata(destination_eggdir, dist_info))# Note: we need to evaluate and strip markers now,# as we can't easily convert back from the syntax:# foobar; "linux" in sys_platform and extra == 'test'def raw_req(req):req.marker = Nonereturn str(req)install_requires = list(sorted(map(raw_req, dist.requires())))extras_require = {extra: list(sorted(reqfor req in map(raw_req, dist.requires((extra,)))if req not in install_requires))for extra in dist.extras}egg_info = os.path.join(destination_eggdir, 'EGG-INFO')os.rename(dist_info, egg_info)os.rename(os.path.join(egg_info, 'METADATA'),os.path.join(egg_info, 'PKG-INFO'))setup_dist = SetuptoolsDistribution(attrs=dict(install_requires=install_requires,extras_require=extras_require,))write_requirements(setup_dist.get_command_obj('egg_info'),None, os.path.join(egg_info, 'requires.txt'))# Move data entries to their correct location.dist_data = os.path.join(destination_eggdir, dist_data)dist_data_scripts = os.path.join(dist_data, 'scripts')if os.path.exists(dist_data_scripts):egg_info_scripts = os.path.join(destination_eggdir,'EGG-INFO', 'scripts')os.mkdir(egg_info_scripts)for entry in os.listdir(dist_data_scripts):# Remove bytecode, as it's not properly handled# during easy_install scripts install phase.if entry.endswith('.pyc'):os.unlink(os.path.join(dist_data_scripts, entry))else:os.rename(os.path.join(dist_data_scripts, entry),os.path.join(egg_info_scripts, entry))os.rmdir(dist_data_scripts)for subdir in filter(os.path.exists, (os.path.join(dist_data, d)for d in ('data', 'headers', 'purelib', 'platlib'))):for entry in os.listdir(subdir):os.rename(os.path.join(subdir, entry),os.path.join(destination_eggdir, entry))os.rmdir(subdir)if os.path.exists(dist_data):os.rmdir(dist_data)# Fix namespace packages.namespace_packages = os.path.join(egg_info, 'namespace_packages.txt')if os.path.exists(namespace_packages):with open(namespace_packages) as fp:namespace_packages = fp.read().split()for mod in namespace_packages:mod_dir = os.path.join(destination_eggdir, *mod.split('.'))mod_init = os.path.join(mod_dir, '__init__.py')if os.path.exists(mod_dir) and not os.path.exists(mod_init):with open(mod_init, 'w') as fp:fp.write(NAMESPACE_PACKAGE_INIT)
import pkg_resourcestry:__version__ = pkg_resources.get_distribution('setuptools').versionexcept Exception:__version__ = 'unknown'
import unicodedataimport sysfrom setuptools.extern import six# HFS Plus uses decomposed UTF-8def decompose(path):if isinstance(path, six.text_type):return unicodedata.normalize('NFD', path)try:path = path.decode('utf-8')path = unicodedata.normalize('NFD', path)path = path.encode('utf-8')except UnicodeError:pass # Not UTF-8return pathdef filesys_decode(path):"""Ensure that the given path is decoded,NONE when no expected encoding works"""if isinstance(path, six.text_type):return pathfs_enc = sys.getfilesystemencoding() or 'utf-8'candidates = fs_enc, 'utf-8'for enc in candidates:try:return path.decode(enc)except UnicodeDecodeError:continuedef try_encode(string, enc):"turn unicode encoding into a functional routine"try:return string.encode(enc)except UnicodeEncodeError:return None
import osimport socketimport atexitimport reimport functoolsfrom setuptools.extern.six.moves import urllib, http_client, map, filterfrom pkg_resources import ResolutionError, ExtractionErrortry:import sslexcept ImportError:ssl = None__all__ = ['VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths','opener_for']cert_paths = """/etc/pki/tls/certs/ca-bundle.crt/etc/ssl/certs/ca-certificates.crt/usr/share/ssl/certs/ca-bundle.crt/usr/local/share/certs/ca-root.crt/etc/ssl/cert.pem/System/Library/OpenSSL/certs/cert.pem/usr/local/share/certs/ca-root-nss.crt/etc/ssl/ca-bundle.pem""".strip().split()try:HTTPSHandler = urllib.request.HTTPSHandlerHTTPSConnection = http_client.HTTPSConnectionexcept AttributeError:HTTPSHandler = HTTPSConnection = objectis_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection)try:from ssl import CertificateError, match_hostnameexcept ImportError:try:from backports.ssl_match_hostname import CertificateErrorfrom backports.ssl_match_hostname import match_hostnameexcept ImportError:CertificateError = Nonematch_hostname = Noneif not CertificateError:class CertificateError(ValueError):passif not match_hostname:def _dnsname_match(dn, hostname, max_wildcards=1):"""Matching according to RFC 6125, section 6.4.3http://tools.ietf.org/html/rfc6125#section-6.4.3"""pats = []if not dn:return False# Ported from python3-syntax:# leftmost, *remainder = dn.split(r'.')parts = dn.split(r'.')leftmost = parts[0]remainder = parts[1:]wildcards = leftmost.count('*')if wildcards > max_wildcards:# Issue #17980: avoid denials of service by refusing more# than one wildcard per fragment. A survey of established# policy among SSL implementations showed it to be a# reasonable choice.raise CertificateError("too many wildcards in certificate DNS name: " + repr(dn))# speed up common case w/o wildcardsif not wildcards:return dn.lower() == hostname.lower()# RFC 6125, section 6.4.3, subitem 1.# The client SHOULD NOT attempt to match a presented identifier in which# the wildcard character comprises a label other than the left-most label.if leftmost == '*':# When '*' is a fragment by itself, it matches a non-empty dotless# fragment.pats.append('[^.]+')elif leftmost.startswith('xn--') or hostname.startswith('xn--'):# RFC 6125, section 6.4.3, subitem 3.# The client SHOULD NOT attempt to match a presented identifier# where the wildcard character is embedded within an A-label or# U-label of an internationalized domain name.pats.append(re.escape(leftmost))else:# Otherwise, '*' matches any dotless string, e.g. www*pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))# add the remaining fragments, ignore any wildcardsfor frag in remainder:pats.append(re.escape(frag))pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)return pat.match(hostname)def match_hostname(cert, hostname):"""Verify that *cert* (in decoded format as returned bySSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125rules are followed, but IP addresses are not accepted for *hostname*.CertificateError is raised on failure. On success, the functionreturns nothing."""if not cert:raise ValueError("empty or no certificate")dnsnames = []san = cert.get('subjectAltName', ())for key, value in san:if key == 'DNS':if _dnsname_match(value, hostname):returndnsnames.append(value)if not dnsnames:# The subject is only checked when there is no dNSName entry# in subjectAltNamefor sub in cert.get('subject', ()):for key, value in sub:# XXX according to RFC 2818, the most specific Common Name# must be used.if key == 'commonName':if _dnsname_match(value, hostname):returndnsnames.append(value)if len(dnsnames) > 1:raise CertificateError("hostname %r ""doesn't match either of %s"% (hostname, ', '.join(map(repr, dnsnames))))elif len(dnsnames) == 1:raise CertificateError("hostname %r ""doesn't match %r"% (hostname, dnsnames[0]))else:raise CertificateError("no appropriate commonName or ""subjectAltName fields were found")class VerifyingHTTPSHandler(HTTPSHandler):"""Simple verifying handler: no auth, subclasses, timeouts, etc."""def __init__(self, ca_bundle):self.ca_bundle = ca_bundleHTTPSHandler.__init__(self)def https_open(self, req):return self.do_open(lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req)class VerifyingHTTPSConn(HTTPSConnection):"""Simple verifying connection: no auth, subclasses, timeouts, etc."""def __init__(self, host, ca_bundle, **kw):HTTPSConnection.__init__(self, host, **kw)self.ca_bundle = ca_bundledef connect(self):sock = socket.create_connection((self.host, self.port), getattr(self, 'source_address', None))# Handle the socket if a (proxy) tunnel is presentif hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):self.sock = sockself._tunnel()# http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7# change self.host to mean the proxy server host when tunneling is# being used. Adapt, since we are interested in the destination# host for the match_hostname() comparison.actual_host = self._tunnel_hostelse:actual_host = self.hostif hasattr(ssl, 'create_default_context'):ctx = ssl.create_default_context(cafile=self.ca_bundle)self.sock = ctx.wrap_socket(sock, server_hostname=actual_host)else:# This is for python < 2.7.9 and < 3.4?self.sock = ssl.wrap_socket(sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle)try:match_hostname(self.sock.getpeercert(), actual_host)except CertificateError:self.sock.shutdown(socket.SHUT_RDWR)self.sock.close()raisedef opener_for(ca_bundle=None):"""Get a urlopen() replacement that uses ca_bundle for verification"""return urllib.request.build_opener(VerifyingHTTPSHandler(ca_bundle or find_ca_bundle())).open# from jaraco.functoolsdef once(func):@functools.wraps(func)def wrapper(*args, **kwargs):if not hasattr(func, 'always_returns'):func.always_returns = func(*args, **kwargs)return func.always_returnsreturn wrapper@oncedef get_win_certfile():try:import wincertstoreexcept ImportError:return Noneclass CertFile(wincertstore.CertFile):def __init__(self):super(CertFile, self).__init__()atexit.register(self.close)def close(self):try:super(CertFile, self).close()except OSError:pass_wincerts = CertFile()_wincerts.addstore('CA')_wincerts.addstore('ROOT')return _wincerts.namedef find_ca_bundle():"""Return an existing CA bundle path, or None"""extant_cert_paths = filter(os.path.isfile, cert_paths)return (get_win_certfile()or next(extant_cert_paths, None)or _certifi_where())def _certifi_where():try:return __import__('certifi').where()except (ImportError, ResolutionError, ExtractionError):pass
def __boot():import sysimport osPYTHONPATH = os.environ.get('PYTHONPATH')if PYTHONPATH is None or (sys.platform == 'win32' and not PYTHONPATH):PYTHONPATH = []else:PYTHONPATH = PYTHONPATH.split(os.pathsep)pic = getattr(sys, 'path_importer_cache', {})stdpath = sys.path[len(PYTHONPATH):]mydir = os.path.dirname(__file__)for item in stdpath:if item == mydir or not item:continue # skip if current dir. on Windows, or my own directoryimporter = pic.get(item)if importer is not None:loader = importer.find_module('site')if loader is not None:# This should actually reload the current moduleloader.load_module('site')breakelse:try:import imp # Avoid import loop in Python >= 3.3stream, path, descr = imp.find_module('site', [item])except ImportError:continueif stream is None:continuetry:# This should actually reload the current moduleimp.load_module('site', stream, path, descr)finally:stream.close()breakelse:raise ImportError("Couldn't find the real 'site' module")known_paths = dict([(makepath(item)[1], 1) for item in sys.path]) # 2.2 compoldpos = getattr(sys, '__egginsert', 0) # save old insertion positionsys.__egginsert = 0 # and reset the current onefor item in PYTHONPATH:addsitedir(item)sys.__egginsert += oldpos # restore effective old positiond, nd = makepath(stdpath[0])insert_at = Nonenew_path = []for item in sys.path:p, np = makepath(item)if np == nd and insert_at is None:# We've hit the first 'system' path entry, so added entries go hereinsert_at = len(new_path)if np in known_paths or insert_at is None:new_path.append(item)else:# new path after the insert point, back-insert itnew_path.insert(insert_at, item)insert_at += 1sys.path[:] = new_pathif __name__ == 'site':__boot()del __boot
# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r__requires__ = %(spec)r__import__('pkg_resources').run_script(%(spec)r, %(script_name)r)
# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r__requires__ = %(spec)r__import__('pkg_resources').require(%(spec)r)__file__ = %(dev_path)rexec(compile(open(__file__).read(), __file__, 'exec'))
import osimport sysimport tempfileimport operatorimport functoolsimport itertoolsimport reimport contextlibimport pickleimport textwrapfrom setuptools.extern import sixfrom setuptools.extern.six.moves import builtins, mapimport pkg_resources.py31compatif sys.platform.startswith('java'):import org.python.modules.posix.PosixModule as _oselse:_os = sys.modules[os.name]try:_file = fileexcept NameError:_file = None_open = openfrom distutils.errors import DistutilsErrorfrom pkg_resources import working_set__all__ = ["AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",]def _execfile(filename, globals, locals=None):"""Python 3 implementation of execfile."""mode = 'rb'with open(filename, mode) as stream:script = stream.read()if locals is None:locals = globalscode = compile(script, filename, 'exec')exec(code, globals, locals)@contextlib.contextmanagerdef save_argv(repl=None):saved = sys.argv[:]if repl is not None:sys.argv[:] = repltry:yield savedfinally:sys.argv[:] = saved@contextlib.contextmanagerdef save_path():saved = sys.path[:]try:yield savedfinally:sys.path[:] = saved@contextlib.contextmanagerdef override_temp(replacement):"""Monkey-patch tempfile.tempdir with replacement, ensuring it exists"""pkg_resources.py31compat.makedirs(replacement, exist_ok=True)saved = tempfile.tempdirtempfile.tempdir = replacementtry:yieldfinally:tempfile.tempdir = saved@contextlib.contextmanagerdef pushd(target):saved = os.getcwd()os.chdir(target)try:yield savedfinally:os.chdir(saved)class UnpickleableException(Exception):"""An exception representing another Exception that could not be pickled."""@staticmethoddef dump(type, exc):"""Always return a dumped (pickled) type and exc. If exc can't be pickled,wrap it in UnpickleableException first."""try:return pickle.dumps(type), pickle.dumps(exc)except Exception:# get UnpickleableException inside the sandboxfrom setuptools.sandbox import UnpickleableException as clsreturn cls.dump(cls, cls(repr(exc)))class ExceptionSaver:"""A Context Manager that will save an exception, serialized, and restore itlater."""def __enter__(self):return selfdef __exit__(self, type, exc, tb):if not exc:return# dump the exceptionself._saved = UnpickleableException.dump(type, exc)self._tb = tb# suppress the exceptionreturn Truedef resume(self):"restore and re-raise any exception"if '_saved' not in vars(self):returntype, exc = map(pickle.loads, self._saved)six.reraise(type, exc, self._tb)@contextlib.contextmanagerdef save_modules():"""Context in which imported modules are saved.Translates exceptions internal to the context into the equivalent exceptionoutside the context."""saved = sys.modules.copy()with ExceptionSaver() as saved_exc:yield savedsys.modules.update(saved)# remove any modules imported sincedel_modules = (mod_name for mod_name in sys.modulesif mod_name not in saved# exclude any encodings modules. See #285and not mod_name.startswith('encodings.'))_clear_modules(del_modules)saved_exc.resume()def _clear_modules(module_names):for mod_name in list(module_names):del sys.modules[mod_name]@contextlib.contextmanagerdef save_pkg_resources_state():saved = pkg_resources.__getstate__()try:yield savedfinally:pkg_resources.__setstate__(saved)@contextlib.contextmanagerdef setup_context(setup_dir):temp_dir = os.path.join(setup_dir, 'temp')with save_pkg_resources_state():with save_modules():hide_setuptools()with save_path():with save_argv():with override_temp(temp_dir):with pushd(setup_dir):# ensure setuptools commands are available__import__('setuptools')yielddef _needs_hiding(mod_name):""">>> _needs_hiding('setuptools')True>>> _needs_hiding('pkg_resources')True>>> _needs_hiding('setuptools_plugin')False>>> _needs_hiding('setuptools.__init__')True>>> _needs_hiding('distutils')True>>> _needs_hiding('os')False>>> _needs_hiding('Cython')True"""pattern = re.compile(r'(setuptools|pkg_resources|distutils|Cython)(\.|$)')return bool(pattern.match(mod_name))def hide_setuptools():"""Remove references to setuptools' modules from sys.modules to allow theinvocation to import the most appropriate setuptools. This technique isnecessary to avoid issues such as #315 where setuptools upgrading itselfwould fail to find a function declared in the metadata."""modules = filter(_needs_hiding, sys.modules)_clear_modules(modules)def run_setup(setup_script, args):"""Run a distutils setup script, sandboxed in its directory"""setup_dir = os.path.abspath(os.path.dirname(setup_script))with setup_context(setup_dir):try:sys.argv[:] = [setup_script] + list(args)sys.path.insert(0, setup_dir)# reset to include setup dir, w/clean callback listworking_set.__init__()working_set.callbacks.append(lambda dist: dist.activate())# __file__ should be a byte string on Python 2 (#712)dunder_file = (setup_scriptif isinstance(setup_script, str) elsesetup_script.encode(sys.getfilesystemencoding()))with DirectorySandbox(setup_dir):ns = dict(__file__=dunder_file, __name__='__main__')_execfile(setup_script, ns)except SystemExit as v:if v.args and v.args[0]:raise# Normal exit, just returnclass AbstractSandbox:"""Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""_active = Falsedef __init__(self):self._attrs = [name for name in dir(_os)if not name.startswith('_') and hasattr(self, name)]def _copy(self, source):for name in self._attrs:setattr(os, name, getattr(source, name))def __enter__(self):self._copy(self)if _file:builtins.file = self._filebuiltins.open = self._openself._active = Truedef __exit__(self, exc_type, exc_value, traceback):self._active = Falseif _file:builtins.file = _filebuiltins.open = _openself._copy(_os)def run(self, func):"""Run 'func' under os sandboxing"""with self:return func()def _mk_dual_path_wrapper(name):original = getattr(_os, name)def wrap(self, src, dst, *args, **kw):if self._active:src, dst = self._remap_pair(name, src, dst, *args, **kw)return original(src, dst, *args, **kw)return wrapfor name in ["rename", "link", "symlink"]:if hasattr(_os, name):locals()[name] = _mk_dual_path_wrapper(name)def _mk_single_path_wrapper(name, original=None):original = original or getattr(_os, name)def wrap(self, path, *args, **kw):if self._active:path = self._remap_input(name, path, *args, **kw)return original(path, *args, **kw)return wrapif _file:_file = _mk_single_path_wrapper('file', _file)_open = _mk_single_path_wrapper('open', _open)for name in ["stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir","remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat","startfile", "mkfifo", "mknod", "pathconf", "access"]:if hasattr(_os, name):locals()[name] = _mk_single_path_wrapper(name)def _mk_single_with_return(name):original = getattr(_os, name)def wrap(self, path, *args, **kw):if self._active:path = self._remap_input(name, path, *args, **kw)return self._remap_output(name, original(path, *args, **kw))return original(path, *args, **kw)return wrapfor name in ['readlink', 'tempnam']:if hasattr(_os, name):locals()[name] = _mk_single_with_return(name)def _mk_query(name):original = getattr(_os, name)def wrap(self, *args, **kw):retval = original(*args, **kw)if self._active:return self._remap_output(name, retval)return retvalreturn wrapfor name in ['getcwd', 'tmpnam']:if hasattr(_os, name):locals()[name] = _mk_query(name)def _validate_path(self, path):"""Called to remap or validate any path, whether input or output"""return pathdef _remap_input(self, operation, path, *args, **kw):"""Called for path inputs"""return self._validate_path(path)def _remap_output(self, operation, path):"""Called for path outputs"""return self._validate_path(path)def _remap_pair(self, operation, src, dst, *args, **kw):"""Called for path pairs like rename, link, and symlink operations"""return (self._remap_input(operation + '-from', src, *args, **kw),self._remap_input(operation + '-to', dst, *args, **kw))if hasattr(os, 'devnull'):_EXCEPTIONS = [os.devnull,]else:_EXCEPTIONS = []class DirectorySandbox(AbstractSandbox):"""Restrict operations to a single subdirectory - pseudo-chroot"""write_ops = dict.fromkeys(["open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir","utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",])_exception_patterns = [# Allow lib2to3 to attempt to save a pickled grammar object (#121)r'.*lib2to3.*\.pickle$',]"exempt writing to paths that match the pattern"def __init__(self, sandbox, exceptions=_EXCEPTIONS):self._sandbox = os.path.normcase(os.path.realpath(sandbox))self._prefix = os.path.join(self._sandbox, '')self._exceptions = [os.path.normcase(os.path.realpath(path))for path in exceptions]AbstractSandbox.__init__(self)def _violation(self, operation, *args, **kw):from setuptools.sandbox import SandboxViolationraise SandboxViolation(operation, args, kw)if _file:def _file(self, path, mode='r', *args, **kw):if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):self._violation("file", path, mode, *args, **kw)return _file(path, mode, *args, **kw)def _open(self, path, mode='r', *args, **kw):if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):self._violation("open", path, mode, *args, **kw)return _open(path, mode, *args, **kw)def tmpnam(self):self._violation("tmpnam")def _ok(self, path):active = self._activetry:self._active = Falserealpath = os.path.normcase(os.path.realpath(path))return (self._exempted(realpath)or realpath == self._sandboxor realpath.startswith(self._prefix))finally:self._active = activedef _exempted(self, filepath):start_matches = (filepath.startswith(exception)for exception in self._exceptions)pattern_matches = (re.match(pattern, filepath)for pattern in self._exception_patterns)candidates = itertools.chain(start_matches, pattern_matches)return any(candidates)def _remap_input(self, operation, path, *args, **kw):"""Called for path inputs"""if operation in self.write_ops and not self._ok(path):self._violation(operation, os.path.realpath(path), *args, **kw)return pathdef _remap_pair(self, operation, src, dst, *args, **kw):"""Called for path pairs like rename, link, and symlink operations"""if not self._ok(src) or not self._ok(dst):self._violation(operation, src, dst, *args, **kw)return (src, dst)def open(self, file, flags, mode=0o777, *args, **kw):"""Called for low-level os.open()"""if flags & WRITE_FLAGS and not self._ok(file):self._violation("os.open", file, flags, mode, *args, **kw)return _os.open(file, flags, mode, *args, **kw)WRITE_FLAGS = functools.reduce(operator.or_, [getattr(_os, a, 0) for a in"O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()])class SandboxViolation(DistutilsError):"""A setup script attempted to modify the filesystem outside the sandbox"""tmpl = textwrap.dedent("""SandboxViolation: {cmd}{args!r} {kwargs}The package setup script has attempted to modify files on your systemthat are not within the EasyInstall build area, and has been aborted.This package cannot be safely installed by EasyInstall, and may notsupport alternate installation locations even if you run its setupscript by hand. Please inform the package's author and the EasyInstallmaintainers to find out if a fix or workaround is available.""").lstrip()def __str__(self):cmd, args, kwargs = self.argsreturn self.tmpl.format(**locals())
import sysfrom distutils.errors import DistutilsOptionErrorfrom distutils.util import strtoboolfrom distutils.debug import DEBUGclass Distribution_parse_config_files:"""Mix-in providing forward-compatibility for functionality to beincluded by default on Python 3.7.Do not edit the code in this class except to update functionalityas implemented in distutils."""def parse_config_files(self, filenames=None):from configparser import ConfigParser# Ignore install directory options if we have a venvif sys.prefix != sys.base_prefix:ignore_options = ['install-base', 'install-platbase', 'install-lib','install-platlib', 'install-purelib', 'install-headers','install-scripts', 'install-data', 'prefix', 'exec-prefix','home', 'user', 'root']else:ignore_options = []ignore_options = frozenset(ignore_options)if filenames is None:filenames = self.find_config_files()if DEBUG:self.announce("Distribution.parse_config_files():")parser = ConfigParser(interpolation=None)for filename in filenames:if DEBUG:self.announce(" reading %s" % filename)parser.read(filename)for section in parser.sections():options = parser.options(section)opt_dict = self.get_option_dict(section)for opt in options:if opt != '__name__' and opt not in ignore_options:val = parser.get(section,opt)opt = opt.replace('-', '_')opt_dict[opt] = (filename, val)# Make the ConfigParser forget everything (so we retain# the original filenames that options come from)parser.__init__()# If there was a "global" section in the config file, use it# to set Distribution options.if 'global' in self.command_options:for (opt, (src, val)) in self.command_options['global'].items():alias = self.negative_opt.get(opt)try:if alias:setattr(self, alias, not strtobool(val))elif opt in ('verbose', 'dry_run'): # ugh!setattr(self, opt, strtobool(val))else:setattr(self, opt, val)except ValueError as msg:raise DistutilsOptionError(msg)if sys.version_info < (3,):# Python 2 behavior is sufficientclass Distribution_parse_config_files:passif False:# When updated behavior is available upstream,# disable override here.class Distribution_parse_config_files:pass
import disimport arrayimport collectionsfrom setuptools.extern import sixOpArg = collections.namedtuple('OpArg', 'opcode arg')class Bytecode_compat(object):def __init__(self, code):self.code = codedef __iter__(self):"""Yield '(op,arg)' pair for each operation in code object 'code'"""bytes = array.array('b', self.code.co_code)eof = len(self.code.co_code)ptr = 0extended_arg = 0while ptr < eof:op = bytes[ptr]if op >= dis.HAVE_ARGUMENT:arg = bytes[ptr + 1] + bytes[ptr + 2] * 256 + extended_argptr += 3if op == dis.EXTENDED_ARG:long_type = six.integer_types[-1]extended_arg = arg * long_type(65536)continueelse:arg = Noneptr += 1yield OpArg(op, arg)Bytecode = getattr(dis, 'Bytecode', Bytecode_compat)
__all__ = ['get_config_vars', 'get_path']try:# Python 2.7 or >=3.2from sysconfig import get_config_vars, get_pathexcept ImportError:from distutils.sysconfig import get_config_vars, get_python_libdef get_path(name):if name not in ('platlib', 'purelib'):raise ValueError("Name must be purelib or platlib")return get_python_lib(name == 'platlib')try:# Python >=3.2from tempfile import TemporaryDirectoryexcept ImportError:import shutilimport tempfileclass TemporaryDirectory(object):"""Very simple temporary directory context manager.Will try to delete afterward, but will also ignore OS and similarerrors on deletion."""def __init__(self):self.name = None # Handle mkdtemp raising an exceptionself.name = tempfile.mkdtemp()def __enter__(self):return self.namedef __exit__(self, exctype, excvalue, exctrace):try:shutil.rmtree(self.name, True)except OSError: # removal errors are not the only possiblepassself.name = None
"""Compatibility Support for Python 2.7 and earlier"""import platformfrom setuptools.extern import sixdef get_all_headers(message, key):"""Given an HTTPMessage, return all headers matching a given key."""return message.get_all(key)if six.PY2:def get_all_headers(message, key):return message.getheaders(key)linux_py2_ascii = (platform.system() == 'Linux' andsix.PY2)rmtree_safe = str if linux_py2_ascii else lambda x: x"""Workaround for http://bugs.python.org/issue24672"""
# This file originally from pip:# https://github.com/pypa/pip/blob/8f4f15a5a95d7d5b511ceaee9ed261176c181970/src/pip/_internal/pep425tags.py"""Generate and work with PEP 425 Compatibility Tags."""from __future__ import absolute_importimport distutils.utilimport platformimport reimport sysimport sysconfigimport warningsfrom collections import OrderedDictfrom . import glibc_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')def get_config_var(var):try:return sysconfig.get_config_var(var)except IOError as e: # Issue #1074warnings.warn("{}".format(e), RuntimeWarning)return Nonedef get_abbr_impl():"""Return abbreviated implementation name."""if hasattr(sys, 'pypy_version_info'):pyimpl = 'pp'elif sys.platform.startswith('java'):pyimpl = 'jy'elif sys.platform == 'cli':pyimpl = 'ip'else:pyimpl = 'cp'return pyimpldef get_impl_ver():"""Return implementation version."""impl_ver = get_config_var("py_version_nodot")if not impl_ver or get_abbr_impl() == 'pp':impl_ver = ''.join(map(str, get_impl_version_info()))return impl_verdef get_impl_version_info():"""Return sys.version_info-like tuple for use in decrementing the minorversion."""if get_abbr_impl() == 'pp':# as per https://github.com/pypa/pip/issues/2882return (sys.version_info[0], sys.pypy_version_info.major,sys.pypy_version_info.minor)else:return sys.version_info[0], sys.version_info[1]def get_impl_tag():"""Returns the Tag for this specific implementation."""return "{}{}".format(get_abbr_impl(), get_impl_ver())def get_flag(var, fallback, expected=True, warn=True):"""Use a fallback method for determining SOABI flags if the needed configvar is unset or unavailable."""val = get_config_var(var)if val is None:if warn:warnings.warn("Config variable '{0}' is unset, Python ABI tag may ""be incorrect".format(var), RuntimeWarning, 2)return fallback()return val == expecteddef get_abi_tag():"""Return the ABI tag based on SOABI (if available) or emulate SOABI(CPython 2, PyPy)."""soabi = get_config_var('SOABI')impl = get_abbr_impl()if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):d = ''m = ''u = ''if get_flag('Py_DEBUG',lambda: hasattr(sys, 'gettotalrefcount'),warn=(impl == 'cp')):d = 'd'if get_flag('WITH_PYMALLOC',lambda: impl == 'cp',warn=(impl == 'cp')):m = 'm'if get_flag('Py_UNICODE_SIZE',lambda: sys.maxunicode == 0x10ffff,expected=4,warn=(impl == 'cp' andsys.version_info < (3, 3))) \and sys.version_info < (3, 3):u = 'u'abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)elif soabi and soabi.startswith('cpython-'):abi = 'cp' + soabi.split('-')[1]elif soabi:abi = soabi.replace('.', '_').replace('-', '_')else:abi = Nonereturn abidef _is_running_32bit():return sys.maxsize == 2147483647def get_platform():"""Return our platform name 'win32', 'linux_x86_64'"""if sys.platform == 'darwin':# distutils.util.get_platform() returns the release based on the value# of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may# be significantly older than the user's current machine.release, _, machine = platform.mac_ver()split_ver = release.split('.')if machine == "x86_64" and _is_running_32bit():machine = "i386"elif machine == "ppc64" and _is_running_32bit():machine = "ppc"return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)# XXX remove distutils dependencyresult = distutils.util.get_platform().replace('.', '_').replace('-', '_')if result == "linux_x86_64" and _is_running_32bit():# 32 bit Python program (running on a 64 bit Linux): pip should only# install and run 32 bit compiled extensions in that case.result = "linux_i686"return resultdef is_manylinux1_compatible():# Only Linux, and only x86-64 / i686if get_platform() not in {"linux_x86_64", "linux_i686"}:return False# Check for presence of _manylinux moduletry:import _manylinuxreturn bool(_manylinux.manylinux1_compatible)except (ImportError, AttributeError):# Fall through to heuristic check belowpass# Check glibc version. CentOS 5 uses glibc 2.5.return glibc.have_compatible_glibc(2, 5)def get_darwin_arches(major, minor, machine):"""Return a list of supported arches (including group arches) forthe given major, minor and machine architecture of an macOS machine."""arches = []def _supports_arch(major, minor, arch):# Looking at the application support for macOS versions in the chart# provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears# our timeline looks roughly like:## 10.0 - Introduces ppc support.# 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64# and x86_64 support is CLI only, and cannot be used for GUI# applications.# 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.# 10.6 - Drops support for ppc64# 10.7 - Drops support for ppc## Given that we do not know if we're installing a CLI or a GUI# application, we must be conservative and assume it might be a GUI# application and behave as if ppc64 and x86_64 support did not occur# until 10.5.## Note: The above information is taken from the "Application support"# column in the chart not the "Processor support" since I believe# that we care about what instruction sets an application can use# not which processors the OS supports.if arch == 'ppc':return (major, minor) <= (10, 5)if arch == 'ppc64':return (major, minor) == (10, 5)if arch == 'i386':return (major, minor) >= (10, 4)if arch == 'x86_64':return (major, minor) >= (10, 5)if arch in groups:for garch in groups[arch]:if _supports_arch(major, minor, garch):return Truereturn Falsegroups = OrderedDict([("fat", ("i386", "ppc")),("intel", ("x86_64", "i386")),("fat64", ("x86_64", "ppc64")),("fat32", ("x86_64", "i386", "ppc")),])if _supports_arch(major, minor, machine):arches.append(machine)for garch in groups:if machine in groups[garch] and _supports_arch(major, minor, garch):arches.append(garch)arches.append('universal')return archesdef get_supported(versions=None, noarch=False, platform=None,impl=None, abi=None):"""Return a list of supported tags for each version specified in`versions`.:param versions: a list of string versions, of the form ["33", "32"],or None. The first version will be assumed to support our ABI.:param platform: specify the exact platform you want validtags for, or None. If None, use the local system platform.:param impl: specify the exact implementation you want validtags for, or None. If None, use the local interpreter impl.:param abi: specify the exact abi you want validtags for, or None. If None, use the local interpreter abi."""supported = []# Versions must be given with respect to the preferenceif versions is None:versions = []version_info = get_impl_version_info()major = version_info[:-1]# Support all previous minor Python versions.for minor in range(version_info[-1], -1, -1):versions.append(''.join(map(str, major + (minor,))))impl = impl or get_abbr_impl()abis = []abi = abi or get_abi_tag()if abi:abis[0:0] = [abi]abi3s = set()import impfor suffix in imp.get_suffixes():if suffix[0].startswith('.abi'):abi3s.add(suffix[0].split('.', 2)[1])abis.extend(sorted(list(abi3s)))abis.append('none')if not noarch:arch = platform or get_platform()if arch.startswith('macosx'):# support macosx-10.6-intel on macosx-10.9-x86_64match = _osx_arch_pat.match(arch)if match:name, major, minor, actual_arch = match.groups()tpl = '{}_{}_%i_%s'.format(name, major)arches = []for m in reversed(range(int(minor) + 1)):for a in get_darwin_arches(int(major), m, actual_arch):arches.append(tpl % (m, a))else:# arch pattern didn't match (?!)arches = [arch]elif platform is None and is_manylinux1_compatible():arches = [arch.replace('linux', 'manylinux1'), arch]else:arches = [arch]# Current version, current API (built specifically for our Python):for abi in abis:for arch in arches:supported.append(('%s%s' % (impl, versions[0]), abi, arch))# abi3 modules compatible with older version of Pythonfor version in versions[1:]:# abi3 was introduced in Python 3.2if version in {'31', '30'}:breakfor abi in abi3s: # empty set if not Python 3for arch in arches:supported.append(("%s%s" % (impl, version), abi, arch))# Has binaries, does not use the Python API:for arch in arches:supported.append(('py%s' % (versions[0][0]), 'none', arch))# No abi / arch, but requires our implementation:supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))# Tagged specifically as being cross-version compatible# (with just the major version specified)supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))# No abi / arch, generic Pythonfor i, version in enumerate(versions):supported.append(('py%s' % (version,), 'none', 'any'))if i == 0:supported.append(('py%s' % (version[0]), 'none', 'any'))return supportedimplementation_tag = get_impl_tag()
"""PyPI and direct package downloading"""import sysimport osimport reimport shutilimport socketimport base64import hashlibimport itertoolsfrom functools import wrapstry:from urllib.parse import splituserexcept ImportError:from urllib2 import splituserfrom setuptools.extern import sixfrom setuptools.extern.six.moves import urllib, http_client, configparser, mapimport setuptoolsfrom pkg_resources import (CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST,Environment, find_distributions, safe_name, safe_version,to_filename, Requirement, DEVELOP_DIST, EGG_DIST,)from setuptools import ssl_supportfrom distutils import logfrom distutils.errors import DistutilsErrorfrom fnmatch import translatefrom setuptools.py27compat import get_all_headersfrom setuptools.wheel import WheelEGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$')HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)# this is here to fix emacs' cruddy broken syntax highlightingPYPI_MD5 = re.compile('<a href="([^"#]+)">([^<]+)</a>\n\\s+\\(<a (?:title="MD5 hash"\n\\s+)''href="[^?]+\\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\\)')URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).matchEXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()__all__ = ['PackageIndex', 'distros_for_url', 'parse_bdist_wininst','interpret_distro_name',]_SOCKET_TIMEOUT = 15_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}"user_agent = _tmpl.format(py_major=sys.version[:3], setuptools=setuptools)def parse_requirement_arg(spec):try:return Requirement.parse(spec)except ValueError:raise DistutilsError("Not a URL, existing file, or requirement spec: %r" % (spec,))def parse_bdist_wininst(name):"""Return (base,pyversion) or (None,None) for possible .exe name"""lower = name.lower()base, py_ver, plat = None, None, Noneif lower.endswith('.exe'):if lower.endswith('.win32.exe'):base = name[:-10]plat = 'win32'elif lower.startswith('.win32-py', -16):py_ver = name[-7:-4]base = name[:-16]plat = 'win32'elif lower.endswith('.win-amd64.exe'):base = name[:-14]plat = 'win-amd64'elif lower.startswith('.win-amd64-py', -20):py_ver = name[-7:-4]base = name[:-20]plat = 'win-amd64'return base, py_ver, platdef egg_info_for_url(url):parts = urllib.parse.urlparse(url)scheme, server, path, parameters, query, fragment = partsbase = urllib.parse.unquote(path.split('/')[-1])if server == 'sourceforge.net' and base == 'download': # XXX Yuckbase = urllib.parse.unquote(path.split('/')[-2])if '#' in base:base, fragment = base.split('#', 1)return base, fragmentdef distros_for_url(url, metadata=None):"""Yield egg or source distribution objects that might be found at a URL"""base, fragment = egg_info_for_url(url)for dist in distros_for_location(url, base, metadata):yield distif fragment:match = EGG_FRAGMENT.match(fragment)if match:for dist in interpret_distro_name(url, match.group(1), metadata, precedence=CHECKOUT_DIST):yield distdef distros_for_location(location, basename, metadata=None):"""Yield egg or source distribution objects based on basename"""if basename.endswith('.egg.zip'):basename = basename[:-4] # strip the .zipif basename.endswith('.egg') and '-' in basename:# only one, unambiguous interpretationreturn [Distribution.from_location(location, basename, metadata)]if basename.endswith('.whl') and '-' in basename:wheel = Wheel(basename)if not wheel.is_compatible():return []return [Distribution(location=location,project_name=wheel.project_name,version=wheel.version,# Increase priority over eggs.precedence=EGG_DIST + 1,)]if basename.endswith('.exe'):win_base, py_ver, platform = parse_bdist_wininst(basename)if win_base is not None:return interpret_distro_name(location, win_base, metadata, py_ver, BINARY_DIST, platform)# Try source distro extensions (.zip, .tgz, etc.)#for ext in EXTENSIONS:if basename.endswith(ext):basename = basename[:-len(ext)]return interpret_distro_name(location, basename, metadata)return [] # no extension matcheddef distros_for_filename(filename, metadata=None):"""Yield possible egg or source distribution objects based on a filename"""return distros_for_location(normalize_path(filename), os.path.basename(filename), metadata)def interpret_distro_name(location, basename, metadata, py_version=None, precedence=SOURCE_DIST,platform=None):"""Generate alternative interpretations of a source distro nameNote: if `location` is a filesystem filename, you should call``pkg_resources.normalize_path()`` on it before passing it to thisroutine!"""# Generate alternative interpretations of a source distro name# Because some packages are ambiguous as to name/versions split# e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.# So, we generate each possible interepretation (e.g. "adns, python-1.1.0"# "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,# the spurious interpretations should be ignored, because in the event# there's also an "adns" package, the spurious "python-1.1.0" version will# compare lower than any numeric version number, and is therefore unlikely# to match a request for it. It's still a potential problem, though, and# in the long run PyPI and the distutils should go for "safe" names and# versions in distribution archive names (sdist and bdist).parts = basename.split('-')if not py_version and any(re.match(r'py\d\.\d$', p) for p in parts[2:]):# it is a bdist_dumb, not an sdist -- bail outreturnfor p in range(1, len(parts) + 1):yield Distribution(location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),py_version=py_version, precedence=precedence,platform=platform)# From Python 2.7 docsdef unique_everseen(iterable, key=None):"List unique elements, preserving order. Remember all elements ever seen."# unique_everseen('AAAABBBCCDAABBB') --> A B C D# unique_everseen('ABBCcAD', str.lower) --> A B C Dseen = set()seen_add = seen.addif key is None:for element in six.moves.filterfalse(seen.__contains__, iterable):seen_add(element)yield elementelse:for element in iterable:k = key(element)if k not in seen:seen_add(k)yield elementdef unique_values(func):"""Wrap a function returning an iterable such that the resulting iterableonly ever yields unique items."""@wraps(func)def wrapper(*args, **kwargs):return unique_everseen(func(*args, **kwargs))return wrapperREL = re.compile(r"""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)# this line is here to fix emacs' cruddy broken syntax highlighting@unique_valuesdef find_external_links(url, page):"""Find rel="homepage" and rel="download" links in `page`, yielding URLs"""for match in REL.finditer(page):tag, rel = match.groups()rels = set(map(str.strip, rel.lower().split(',')))if 'homepage' in rels or 'download' in rels:for match in HREF.finditer(tag):yield urllib.parse.urljoin(url, htmldecode(match.group(1)))for tag in ("<th>Home Page", "<th>Download URL"):pos = page.find(tag)if pos != -1:match = HREF.search(page, pos)if match:yield urllib.parse.urljoin(url, htmldecode(match.group(1)))class ContentChecker(object):"""A null content checker that defines the interface for checking content"""def feed(self, block):"""Feed a block of data to the hash."""returndef is_valid(self):"""Check the hash. Return False if validation fails."""return Truedef report(self, reporter, template):"""Call reporter with information about the checker (hash name)substituted into the template."""returnclass HashChecker(ContentChecker):pattern = re.compile(r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)='r'(?P<expected>[a-f0-9]+)')def __init__(self, hash_name, expected):self.hash_name = hash_nameself.hash = hashlib.new(hash_name)self.expected = expected@classmethoddef from_url(cls, url):"Construct a (possibly null) ContentChecker from a URL"fragment = urllib.parse.urlparse(url)[-1]if not fragment:return ContentChecker()match = cls.pattern.search(fragment)if not match:return ContentChecker()return cls(**match.groupdict())def feed(self, block):self.hash.update(block)def is_valid(self):return self.hash.hexdigest() == self.expecteddef report(self, reporter, template):msg = template % self.hash_namereturn reporter(msg)class PackageIndex(Environment):"""A distribution index that scans web pages for download URLs"""def __init__(self, index_url="https://pypi.python.org/simple", hosts=('*',),ca_bundle=None, verify_ssl=True, *args, **kw):Environment.__init__(self, *args, **kw)self.index_url = index_url + "/" [:not index_url.endswith('/')]self.scanned_urls = {}self.fetched_urls = {}self.package_pages = {}self.allows = re.compile('|'.join(map(translate, hosts))).matchself.to_scan = []use_ssl = (verify_ssland ssl_support.is_availableand (ca_bundle or ssl_support.find_ca_bundle()))if use_ssl:self.opener = ssl_support.opener_for(ca_bundle)else:self.opener = urllib.request.urlopendef process_url(self, url, retrieve=False):"""Evaluate a URL as a possible download, and maybe retrieve it"""if url in self.scanned_urls and not retrieve:returnself.scanned_urls[url] = Trueif not URL_SCHEME(url):self.process_filename(url)returnelse:dists = list(distros_for_url(url))if dists:if not self.url_ok(url):returnself.debug("Found link: %s", url)if dists or not retrieve or url in self.fetched_urls:list(map(self.add, dists))return # don't need the actual pageif not self.url_ok(url):self.fetched_urls[url] = Truereturnself.info("Reading %s", url)self.fetched_urls[url] = True # prevent multiple fetch attemptstmpl = "Download error on %s: %%s -- Some packages may not be found!"f = self.open_url(url, tmpl % url)if f is None:returnself.fetched_urls[f.url] = Trueif 'html' not in f.headers.get('content-type', '').lower():f.close() # not html, we can't process itreturnbase = f.url # handle redirectspage = f.read()if not isinstance(page, str):# In Python 3 and got bytes but want str.if isinstance(f, urllib.error.HTTPError):# Errors have no charset, assume latin1:charset = 'latin-1'else:charset = f.headers.get_param('charset') or 'latin-1'page = page.decode(charset, "ignore")f.close()for match in HREF.finditer(page):link = urllib.parse.urljoin(base, htmldecode(match.group(1)))self.process_url(link)if url.startswith(self.index_url) and getattr(f, 'code', None) != 404:page = self.process_index(url, page)def process_filename(self, fn, nested=False):# process filenames or directoriesif not os.path.exists(fn):self.warn("Not found: %s", fn)returnif os.path.isdir(fn) and not nested:path = os.path.realpath(fn)for item in os.listdir(path):self.process_filename(os.path.join(path, item), True)dists = distros_for_filename(fn)if dists:self.debug("Found: %s", fn)list(map(self.add, dists))def url_ok(self, url, fatal=False):s = URL_SCHEME(url)is_file = s and s.group(1).lower() == 'file'if is_file or self.allows(urllib.parse.urlparse(url)[1]):return Truemsg = ("\nNote: Bypassing %s (disallowed host; see ""http://bit.ly/2hrImnY for details).\n")if fatal:raise DistutilsError(msg % url)else:self.warn(msg, url)def scan_egg_links(self, search_path):dirs = filter(os.path.isdir, search_path)egg_links = ((path, entry)for path in dirsfor entry in os.listdir(path)if entry.endswith('.egg-link'))list(itertools.starmap(self.scan_egg_link, egg_links))def scan_egg_link(self, path, entry):with open(os.path.join(path, entry)) as raw_lines:# filter non-empty lineslines = list(filter(None, map(str.strip, raw_lines)))if len(lines) != 2:# format is not recognized; puntreturnegg_path, setup_path = linesfor dist in find_distributions(os.path.join(path, egg_path)):dist.location = os.path.join(path, *lines)dist.precedence = SOURCE_DISTself.add(dist)def process_index(self, url, page):"""Process the contents of a PyPI page"""def scan(link):# Process a URL to see if it's for a package pageif link.startswith(self.index_url):parts = list(map(urllib.parse.unquote, link[len(self.index_url):].split('/')))if len(parts) == 2 and '#' not in parts[1]:# it's a package page, sanitize and index itpkg = safe_name(parts[0])ver = safe_version(parts[1])self.package_pages.setdefault(pkg.lower(), {})[link] = Truereturn to_filename(pkg), to_filename(ver)return None, None# process an index page into the package-page indexfor match in HREF.finditer(page):try:scan(urllib.parse.urljoin(url, htmldecode(match.group(1))))except ValueError:passpkg, ver = scan(url) # ensure this page is in the page indexif pkg:# process individual package pagefor new_url in find_external_links(url, page):# Process the found URLbase, frag = egg_info_for_url(new_url)if base.endswith('.py') and not frag:if ver:new_url += '#egg=%s-%s' % (pkg, ver)else:self.need_version_info(url)self.scan_url(new_url)return PYPI_MD5.sub(lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page)else:return "" # no sense double-scanning non-package pagesdef need_version_info(self, url):self.scan_all("Page at %s links to .py file(s) without version info; an index ""scan is required.", url)def scan_all(self, msg=None, *args):if self.index_url not in self.fetched_urls:if msg:self.warn(msg, *args)self.info("Scanning index of all packages (this may take a while)")self.scan_url(self.index_url)def find_packages(self, requirement):self.scan_url(self.index_url + requirement.unsafe_name + '/')if not self.package_pages.get(requirement.key):# Fall back to safe version of the nameself.scan_url(self.index_url + requirement.project_name + '/')if not self.package_pages.get(requirement.key):# We couldn't find the target package, so search the index page tooself.not_found_in_index(requirement)for url in list(self.package_pages.get(requirement.key, ())):# scan each page that might be related to the desired packageself.scan_url(url)def obtain(self, requirement, installer=None):self.prescan()self.find_packages(requirement)for dist in self[requirement.key]:if dist in requirement:return distself.debug("%s does not match %s", requirement, dist)return super(PackageIndex, self).obtain(requirement, installer)def check_hash(self, checker, filename, tfp):"""checker is a ContentChecker"""checker.report(self.debug,"Validating %%s checksum for %s" % filename)if not checker.is_valid():tfp.close()os.unlink(filename)raise DistutilsError("%s validation failed for %s; ""possible download problem?"% (checker.hash.name, os.path.basename(filename)))def add_find_links(self, urls):"""Add `urls` to the list that will be prescanned for searches"""for url in urls:if (self.to_scan is None # if we have already "gone online"or not URL_SCHEME(url) # or it's a local file/directoryor url.startswith('file:')or list(distros_for_url(url)) # or a direct package link):# then go ahead and process it nowself.scan_url(url)else:# otherwise, defer retrieval till laterself.to_scan.append(url)def prescan(self):"""Scan urls scheduled for prescanning (e.g. --find-links)"""if self.to_scan:list(map(self.scan_url, self.to_scan))self.to_scan = None # from now on, go ahead and process immediatelydef not_found_in_index(self, requirement):if self[requirement.key]: # we've seen at least one distrometh, msg = self.info, "Couldn't retrieve index page for %r"else: # no distros seen for this name, might be misspelledmeth, msg = (self.warn,"Couldn't find index page for %r (maybe misspelled?)")meth(msg, requirement.unsafe_name)self.scan_all()def download(self, spec, tmpdir):"""Locate and/or download `spec` to `tmpdir`, returning a local path`spec` may be a ``Requirement`` object, or a string containing a URL,an existing local filename, or a project/version requirement spec(i.e. the string form of a ``Requirement`` object). If it is the URLof a .py file with an unambiguous ``#egg=name-version`` tag (i.e., onethat escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` isautomatically created alongside the downloaded file.If `spec` is a ``Requirement`` object or a string containing aproject/version requirement spec, this method returns the location ofa matching distribution (possibly after downloading it to `tmpdir`).If `spec` is a locally existing file or directory name, it is simplyreturned unchanged. If `spec` is a URL, it is downloaded to a subpathof `tmpdir`, and the local filename is returned. Various errors may beraised if a problem occurs during downloading."""if not isinstance(spec, Requirement):scheme = URL_SCHEME(spec)if scheme:# It's a url, download it to tmpdirfound = self._download_url(scheme.group(1), spec, tmpdir)base, fragment = egg_info_for_url(spec)if base.endswith('.py'):found = self.gen_setup(found, fragment, tmpdir)return foundelif os.path.exists(spec):# Existing file or directory, just return itreturn specelse:spec = parse_requirement_arg(spec)return getattr(self.fetch_distribution(spec, tmpdir), 'location', None)def fetch_distribution(self, requirement, tmpdir, force_scan=False, source=False,develop_ok=False, local_index=None):"""Obtain a distribution suitable for fulfilling `requirement``requirement` must be a ``pkg_resources.Requirement`` instance.If necessary, or if the `force_scan` flag is set, the requirement issearched for in the (online) package index as well as the locallyinstalled packages. If a distribution matching `requirement` is found,the returned distribution's ``location`` is the value you would havegotten from calling the ``download()`` method with the matchingdistribution's URL or filename. If no matching distribution is found,``None`` is returned.If the `source` flag is set, only source distributions and sourcecheckout links will be considered. Unless the `develop_ok` flag isset, development and system eggs (i.e., those using the ``.egg-info``format) will be ignored."""# process a Requirementself.info("Searching for %s", requirement)skipped = {}dist = Nonedef find(req, env=None):if env is None:env = self# Find a matching distribution; may be called more than oncefor dist in env[req.key]:if dist.precedence == DEVELOP_DIST and not develop_ok:if dist not in skipped:self.warn("Skipping development or system egg: %s", dist,)skipped[dist] = 1continuetest = (dist in reqand (dist.precedence <= SOURCE_DIST or not source))if test:loc = self.download(dist.location, tmpdir)dist.download_location = locif os.path.exists(dist.download_location):return distif force_scan:self.prescan()self.find_packages(requirement)dist = find(requirement)if not dist and local_index is not None:dist = find(requirement, local_index)if dist is None:if self.to_scan is not None:self.prescan()dist = find(requirement)if dist is None and not force_scan:self.find_packages(requirement)dist = find(requirement)if dist is None:self.warn("No local packages or working download links found for %s%s",(source and "a source distribution of " or ""),requirement,)else:self.info("Best match: %s", dist)return dist.clone(location=dist.download_location)def fetch(self, requirement, tmpdir, force_scan=False, source=False):"""Obtain a file suitable for fulfilling `requirement`DEPRECATED; use the ``fetch_distribution()`` method now instead. Forbackward compatibility, this routine is identical but returns the``location`` of the downloaded distribution instead of a distributionobject."""dist = self.fetch_distribution(requirement, tmpdir, force_scan, source)if dist is not None:return dist.locationreturn Nonedef gen_setup(self, filename, fragment, tmpdir):match = EGG_FRAGMENT.match(fragment)dists = match and [d for d ininterpret_distro_name(filename, match.group(1), None) if d.version] or []if len(dists) == 1: # unambiguous ``#egg`` fragmentbasename = os.path.basename(filename)# Make sure the file has been downloaded to the temp dir.if os.path.dirname(filename) != tmpdir:dst = os.path.join(tmpdir, basename)from setuptools.command.easy_install import samefileif not samefile(filename, dst):shutil.copy2(filename, dst)filename = dstwith open(os.path.join(tmpdir, 'setup.py'), 'w') as file:file.write("from setuptools import setup\n""setup(name=%r, version=%r, py_modules=[%r])\n"% (dists[0].project_name, dists[0].version,os.path.splitext(basename)[0]))return filenameelif match:raise DistutilsError("Can't unambiguously interpret project/version identifier %r; ""any dashes in the name or version should be escaped using ""underscores. %r" % (fragment, dists))else:raise DistutilsError("Can't process plain .py files without an '#egg=name-version'"" suffix to enable automatic setup script generation.")dl_blocksize = 8192def _download_to(self, url, filename):self.info("Downloading %s", url)# Download the filefp = Nonetry:checker = HashChecker.from_url(url)fp = self.open_url(url)if isinstance(fp, urllib.error.HTTPError):raise DistutilsError("Can't download %s: %s %s" % (url, fp.code, fp.msg))headers = fp.info()blocknum = 0bs = self.dl_blocksizesize = -1if "content-length" in headers:# Some servers return multiple Content-Length headers :(sizes = get_all_headers(headers, 'Content-Length')size = max(map(int, sizes))self.reporthook(url, filename, blocknum, bs, size)with open(filename, 'wb') as tfp:while True:block = fp.read(bs)if block:checker.feed(block)tfp.write(block)blocknum += 1self.reporthook(url, filename, blocknum, bs, size)else:breakself.check_hash(checker, filename, tfp)return headersfinally:if fp:fp.close()def reporthook(self, url, filename, blocknum, blksize, size):pass # no-opdef open_url(self, url, warning=None):if url.startswith('file:'):return local_open(url)try:return open_with_auth(url, self.opener)except (ValueError, http_client.InvalidURL) as v:msg = ' '.join([str(arg) for arg in v.args])if warning:self.warn(warning, msg)else:raise DistutilsError('%s %s' % (url, msg))except urllib.error.HTTPError as v:return vexcept urllib.error.URLError as v:if warning:self.warn(warning, v.reason)else:raise DistutilsError("Download error for %s: %s"% (url, v.reason))except http_client.BadStatusLine as v:if warning:self.warn(warning, v.line)else:raise DistutilsError('%s returned a bad status line. The server might be ''down, %s' %(url, v.line))except (http_client.HTTPException, socket.error) as v:if warning:self.warn(warning, v)else:raise DistutilsError("Download error for %s: %s"% (url, v))def _download_url(self, scheme, url, tmpdir):# Determine download filename#name, fragment = egg_info_for_url(url)if name:while '..' in name:name = name.replace('..', '.').replace('\\', '_')else:name = "__downloaded__" # default if URL has no path contentsif name.endswith('.egg.zip'):name = name[:-4] # strip the extra .zip before downloadfilename = os.path.join(tmpdir, name)# Download the file#if scheme == 'svn' or scheme.startswith('svn+'):return self._download_svn(url, filename)elif scheme == 'git' or scheme.startswith('git+'):return self._download_git(url, filename)elif scheme.startswith('hg+'):return self._download_hg(url, filename)elif scheme == 'file':return urllib.request.url2pathname(urllib.parse.urlparse(url)[2])else:self.url_ok(url, True) # raises error if not allowedreturn self._attempt_download(url, filename)def scan_url(self, url):self.process_url(url, True)def _attempt_download(self, url, filename):headers = self._download_to(url, filename)if 'html' in headers.get('content-type', '').lower():return self._download_html(url, headers, filename)else:return filenamedef _download_html(self, url, headers, filename):file = open(filename)for line in file:if line.strip():# Check for a subversion index pageif re.search(r'<title>([^- ]+ - )?Revision \d+:', line):# it's a subversion index page:file.close()os.unlink(filename)return self._download_svn(url, filename)break # not an index pagefile.close()os.unlink(filename)raise DistutilsError("Unexpected HTML page found at " + url)def _download_svn(self, url, filename):url = url.split('#', 1)[0] # remove any fragment for svn's sakecreds = ''if url.lower().startswith('svn:') and '@' in url:scheme, netloc, path, p, q, f = urllib.parse.urlparse(url)if not netloc and path.startswith('//') and '/' in path[2:]:netloc, path = path[2:].split('/', 1)auth, host = splituser(netloc)if auth:if ':' in auth:user, pw = auth.split(':', 1)creds = " --username=%s --password=%s" % (user, pw)else:creds = " --username=" + authnetloc = hostparts = scheme, netloc, url, p, q, furl = urllib.parse.urlunparse(parts)self.info("Doing subversion checkout from %s to %s", url, filename)os.system("svn checkout%s -q %s %s" % (creds, url, filename))return filename@staticmethoddef _vcs_split_rev_from_url(url, pop_prefix=False):scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)scheme = scheme.split('+', 1)[-1]# Some fragment identification failspath = path.split('#', 1)[0]rev = Noneif '@' in path:path, rev = path.rsplit('@', 1)# Also, discard fragmenturl = urllib.parse.urlunsplit((scheme, netloc, path, query, ''))return url, revdef _download_git(self, url, filename):filename = filename.split('#', 1)[0]url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)self.info("Doing git clone from %s to %s", url, filename)os.system("git clone --quiet %s %s" % (url, filename))if rev is not None:self.info("Checking out %s", rev)os.system("(cd %s && git checkout --quiet %s)" % (filename,rev,))return filenamedef _download_hg(self, url, filename):filename = filename.split('#', 1)[0]url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)self.info("Doing hg clone from %s to %s", url, filename)os.system("hg clone --quiet %s %s" % (url, filename))if rev is not None:self.info("Updating to %s", rev)os.system("(cd %s && hg up -C -r %s -q)" % (filename,rev,))return filenamedef debug(self, msg, *args):log.debug(msg, *args)def info(self, msg, *args):log.info(msg, *args)def warn(self, msg, *args):log.warn(msg, *args)# This pattern matches a character entity reference (a decimal numeric# references, a hexadecimal numeric reference, or a named reference).entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').subdef uchr(c):if not isinstance(c, int):return cif c > 255:return six.unichr(c)return chr(c)def decode_entity(match):what = match.group(1)if what.startswith('#x'):what = int(what[2:], 16)elif what.startswith('#'):what = int(what[1:])else:what = six.moves.html_entities.name2codepoint.get(what, match.group(0))return uchr(what)def htmldecode(text):"""Decode HTML entities in the given text."""return entity_sub(decode_entity, text)def socket_timeout(timeout=15):def _socket_timeout(func):def _socket_timeout(*args, **kwargs):old_timeout = socket.getdefaulttimeout()socket.setdefaulttimeout(timeout)try:return func(*args, **kwargs)finally:socket.setdefaulttimeout(old_timeout)return _socket_timeoutreturn _socket_timeoutdef _encode_auth(auth):"""A function compatible with Python 2.3-3.3 that will encodeauth from a URL suitable for an HTTP header.>>> str(_encode_auth('username%3Apassword'))'dXNlcm5hbWU6cGFzc3dvcmQ='Long auth strings should not cause a newline to be inserted.>>> long_auth = 'username:' + 'password'*10>>> chr(10) in str(_encode_auth(long_auth))False"""auth_s = urllib.parse.unquote(auth)# convert to bytesauth_bytes = auth_s.encode()# use the legacy interface for Python 2.3 supportencoded_bytes = base64.encodestring(auth_bytes)# convert back to a stringencoded = encoded_bytes.decode()# strip the trailing carriage returnreturn encoded.replace('\n', '')class Credential(object):"""A username/password pair. Use like a namedtuple."""def __init__(self, username, password):self.username = usernameself.password = passworddef __iter__(self):yield self.usernameyield self.passworddef __str__(self):return '%(username)s:%(password)s' % vars(self)class PyPIConfig(configparser.RawConfigParser):def __init__(self):"""Load from ~/.pypirc"""defaults = dict.fromkeys(['username', 'password', 'repository'], '')configparser.RawConfigParser.__init__(self, defaults)rc = os.path.join(os.path.expanduser('~'), '.pypirc')if os.path.exists(rc):self.read(rc)@propertydef creds_by_repository(self):sections_with_repositories = [section for section in self.sections()if self.get(section, 'repository').strip()]return dict(map(self._get_repo_cred, sections_with_repositories))def _get_repo_cred(self, section):repo = self.get(section, 'repository').strip()return repo, Credential(self.get(section, 'username').strip(),self.get(section, 'password').strip(),)def find_credential(self, url):"""If the URL indicated appears to be a repository defined in thisconfig, return the credential for that repository."""for repository, cred in self.creds_by_repository.items():if url.startswith(repository):return creddef open_with_auth(url, opener=urllib.request.urlopen):"""Open a urllib2 request, handling HTTP authentication"""scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url)# Double scheme does not raise on Mac OS X as revealed by a# failing test. We would expect "nonnumeric port". Refs #20.if netloc.endswith(':'):raise http_client.InvalidURL("nonnumeric port: ''")if scheme in ('http', 'https'):auth, host = splituser(netloc)else:auth = Noneif not auth:cred = PyPIConfig().find_credential(url)if cred:auth = str(cred)info = cred.username, urllog.info('Authenticating as %s for %s (from .pypirc)', *info)if auth:auth = "Basic " + _encode_auth(auth)parts = scheme, host, path, params, query, fragnew_url = urllib.parse.urlunparse(parts)request = urllib.request.Request(new_url)request.add_header("Authorization", auth)else:request = urllib.request.Request(url)request.add_header('User-Agent', user_agent)fp = opener(request)if auth:# Put authentication info back into request URL if same host,# so that links found on the page will works2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url)if s2 == scheme and h2 == host:parts = s2, netloc, path2, param2, query2, frag2fp.url = urllib.parse.urlunparse(parts)return fp# adding a timeout to avoid freezing package_indexopen_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)def fix_sf_url(url):return url # backward compatibilitydef local_open(url):"""Read a local path, with special support for directories"""scheme, server, path, param, query, frag = urllib.parse.urlparse(url)filename = urllib.request.url2pathname(path)if os.path.isfile(filename):return urllib.request.urlopen(url)elif path.endswith('/') and os.path.isdir(filename):files = []for f in os.listdir(filename):filepath = os.path.join(filename, f)if f == 'index.html':with open(filepath, 'r') as fp:body = fp.read()breakelif os.path.isdir(filepath):f += '/'files.append('<a href="{name}">{name}</a>'.format(name=f))else:tmpl = ("<html><head><title>{url}</title>""</head><body>{files}</body></html>")body = tmpl.format(url=url, files='\n'.join(files))status, message = 200, "OK"else:status, message, body = 404, "Path not found", "Not found"headers = {'content-type': 'text/html'}body_stream = six.StringIO(body)return urllib.error.HTTPError(url, status, message, headers, body_stream)
import osfrom distutils import logimport itertoolsfrom setuptools.extern.six.moves import mapflatten = itertools.chain.from_iterableclass Installer:nspkg_ext = '-nspkg.pth'def install_namespaces(self):nsp = self._get_all_ns_packages()if not nsp:returnfilename, ext = os.path.splitext(self._get_target())filename += self.nspkg_extself.outputs.append(filename)log.info("Installing %s", filename)lines = map(self._gen_nspkg_line, nsp)if self.dry_run:# always generate the lines, even in dry runlist(lines)returnwith open(filename, 'wt') as f:f.writelines(lines)def uninstall_namespaces(self):filename, ext = os.path.splitext(self._get_target())filename += self.nspkg_extif not os.path.exists(filename):returnlog.info("Removing %s", filename)os.remove(filename)def _get_target(self):return self.target_nspkg_tmpl = ("import sys, types, os","has_mfs = sys.version_info > (3, 5)","p = os.path.join(%(root)s, *%(pth)r)","importlib = has_mfs and __import__('importlib.util')","has_mfs and __import__('importlib.machinery')","m = has_mfs and ""sys.modules.setdefault(%(pkg)r, ""importlib.util.module_from_spec(""importlib.machinery.PathFinder.find_spec(%(pkg)r, ""[os.path.dirname(p)])))","m = m or ""sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))","mp = (m or []) and m.__dict__.setdefault('__path__',[])","(p not in mp) and mp.append(p)",)"lines for the namespace installer"_nspkg_tmpl_multi = ('m and setattr(sys.modules[%(parent)r], %(child)r, m)',)"additional line(s) when a parent package is indicated"def _get_root(self):return "sys._getframe(1).f_locals['sitedir']"def _gen_nspkg_line(self, pkg):# ensure pkg is not a unicode string under Python 2.7pkg = str(pkg)pth = tuple(pkg.split('.'))root = self._get_root()tmpl_lines = self._nspkg_tmplparent, sep, child = pkg.rpartition('.')if parent:tmpl_lines += self._nspkg_tmpl_multireturn ';'.join(tmpl_lines) % locals() + '\n'def _get_all_ns_packages(self):"""Return sorted list of all package namespaces"""pkgs = self.distribution.namespace_packages or []return sorted(flatten(map(self._pkg_names, pkgs)))@staticmethoddef _pkg_names(pkg):"""Given a namespace package, yield the components of thatpackage.>>> names = Installer._pkg_names('a.b.c')>>> set(names) == set(['a', 'a.b', 'a.b.c'])True"""parts = pkg.split('.')while parts:yield '.'.join(parts)parts.pop()class DevelopInstaller(Installer):def _get_root(self):return repr(str(self.egg_path))def _get_target(self):return self.egg_link
"""Improved support for Microsoft Visual C++ compilers.Known supported compilers:--------------------------Microsoft Visual C++ 9.0:Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)Microsoft Windows SDK 6.1 (x86, x64, ia64)Microsoft Windows SDK 7.0 (x86, x64, ia64)Microsoft Visual C++ 10.0:Microsoft Windows SDK 7.1 (x86, x64, ia64)Microsoft Visual C++ 14.0:Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)Microsoft Visual Studio 2017 (x86, x64, arm, arm64)Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)"""import osimport sysimport platformimport itertoolsimport distutils.errorsfrom pkg_resources.extern.packaging.version import LegacyVersionfrom setuptools.extern.six.moves import filterfalsefrom .monkey import get_unpatchedif platform.system() == 'Windows':from setuptools.extern.six.moves import winregsafe_env = os.environelse:"""Mock winreg and environ so the module can be importedon this platform."""class winreg:HKEY_USERS = NoneHKEY_CURRENT_USER = NoneHKEY_LOCAL_MACHINE = NoneHKEY_CLASSES_ROOT = Nonesafe_env = dict()_msvc9_suppress_errors = (# msvc9compiler isn't available on some platformsImportError,# msvc9compiler raises DistutilsPlatformError in some# environments. See #1118.distutils.errors.DistutilsPlatformError,)try:from distutils.msvc9compiler import Regexcept _msvc9_suppress_errors:passdef msvc9_find_vcvarsall(version):"""Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalonecompiler build for Python (VCForPython). Fall back to original behaviorwhen the standalone compiler is not available.Redirect the path of "vcvarsall.bat".Known supported compilers-------------------------Microsoft Visual C++ 9.0:Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)Parameters----------version: floatRequired Microsoft Visual C++ version.Return------vcvarsall.bat path: str"""VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'key = VC_BASE % ('', version)try:# Per-user installs register the compiler path hereproductdir = Reg.get_value(key, "installdir")except KeyError:try:# All-user installs on a 64-bit system register herekey = VC_BASE % ('Wow6432Node\\', version)productdir = Reg.get_value(key, "installdir")except KeyError:productdir = Noneif productdir:vcvarsall = os.path.os.path.join(productdir, "vcvarsall.bat")if os.path.isfile(vcvarsall):return vcvarsallreturn get_unpatched(msvc9_find_vcvarsall)(version)def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs):"""Patched "distutils.msvc9compiler.query_vcvarsall" for support extracompilers.Set environment without use of "vcvarsall.bat".Known supported compilers-------------------------Microsoft Visual C++ 9.0:Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)Microsoft Windows SDK 6.1 (x86, x64, ia64)Microsoft Windows SDK 7.0 (x86, x64, ia64)Microsoft Visual C++ 10.0:Microsoft Windows SDK 7.1 (x86, x64, ia64)Parameters----------ver: floatRequired Microsoft Visual C++ version.arch: strTarget architecture.Return------environment: dict"""# Try to get environement from vcvarsall.bat (Classical way)try:orig = get_unpatched(msvc9_query_vcvarsall)return orig(ver, arch, *args, **kwargs)except distutils.errors.DistutilsPlatformError:# Pass error if Vcvarsall.bat is missingpassexcept ValueError:# Pass error if environment not set after executing vcvarsall.batpass# If error, try to set environment directlytry:return EnvironmentInfo(arch, ver).return_env()except distutils.errors.DistutilsPlatformError as exc:_augment_exception(exc, ver, arch)raisedef msvc14_get_vc_env(plat_spec):"""Patched "distutils._msvccompiler._get_vc_env" for support extracompilers.Set environment without use of "vcvarsall.bat".Known supported compilers-------------------------Microsoft Visual C++ 14.0:Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)Microsoft Visual Studio 2017 (x86, x64, arm, arm64)Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)Parameters----------plat_spec: strTarget architecture.Return------environment: dict"""# Try to get environment from vcvarsall.bat (Classical way)try:return get_unpatched(msvc14_get_vc_env)(plat_spec)except distutils.errors.DistutilsPlatformError:# Pass error Vcvarsall.bat is missingpass# If error, try to set environment directlytry:return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env()except distutils.errors.DistutilsPlatformError as exc:_augment_exception(exc, 14.0)raisedef msvc14_gen_lib_options(*args, **kwargs):"""Patched "distutils._msvccompiler.gen_lib_options" for fixcompatibility between "numpy.distutils" and "distutils._msvccompiler"(for Numpy < 1.11.2)"""if "numpy.distutils" in sys.modules:import numpy as npif LegacyVersion(np.__version__) < LegacyVersion('1.11.2'):return np.distutils.ccompiler.gen_lib_options(*args, **kwargs)return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs)def _augment_exception(exc, version, arch=''):"""Add details to the exception message to help guide the useras to what action will resolve it."""# Error if MSVC++ directory not found or environment not setmessage = exc.args[0]if "vcvarsall" in message.lower() or "visual c" in message.lower():# Special error message if MSVC++ not installedtmpl = 'Microsoft Visual C++ {version:0.1f} is required.'message = tmpl.format(**locals())msdownload = 'www.microsoft.com/download/details.aspx?id=%d'if version == 9.0:if arch.lower().find('ia64') > -1:# For VC++ 9.0, if IA64 support is needed, redirect user# to Windows SDK 7.0message += ' Get it with "Microsoft Windows SDK 7.0": 'message += msdownload % 3138else:# For VC++ 9.0 redirect user to Vc++ for Python 2.7 :# This redirection link is maintained by Microsoft.# Contact vspython@microsoft.com if it needs updating.message += ' Get it from http://aka.ms/vcpython27'elif version == 10.0:# For VC++ 10.0 Redirect user to Windows SDK 7.1message += ' Get it with "Microsoft Windows SDK 7.1": 'message += msdownload % 8279elif version >= 14.0:# For VC++ 14.0 Redirect user to Visual C++ Build Toolsmessage += (' Get it with "Microsoft Visual C++ Build Tools": 'r'http://landinghub.visualstudio.com/''visual-cpp-build-tools')exc.args = (message, )class PlatformInfo:"""Current and Target Architectures informations.Parameters----------arch: strTarget architecture."""current_cpu = safe_env.get('processor_architecture', '').lower()def __init__(self, arch):self.arch = arch.lower().replace('x64', 'amd64')@propertydef target_cpu(self):return self.arch[self.arch.find('_') + 1:]def target_is_x86(self):return self.target_cpu == 'x86'def current_is_x86(self):return self.current_cpu == 'x86'def current_dir(self, hidex86=False, x64=False):"""Current platform specific subfolder.Parameters----------hidex86: boolreturn '' and not '\x86' if architecture is x86.x64: boolreturn '\x64' and not '\amd64' if architecture is amd64.Return------subfolder: str'\target', or '' (see hidex86 parameter)"""return ('' if (self.current_cpu == 'x86' and hidex86) elser'\x64' if (self.current_cpu == 'amd64' and x64) elser'\%s' % self.current_cpu)def target_dir(self, hidex86=False, x64=False):r"""Target platform specific subfolder.Parameters----------hidex86: boolreturn '' and not '\x86' if architecture is x86.x64: boolreturn '\x64' and not '\amd64' if architecture is amd64.Return------subfolder: str'\current', or '' (see hidex86 parameter)"""return ('' if (self.target_cpu == 'x86' and hidex86) elser'\x64' if (self.target_cpu == 'amd64' and x64) elser'\%s' % self.target_cpu)def cross_dir(self, forcex86=False):r"""Cross platform specific subfolder.Parameters----------forcex86: boolUse 'x86' as current architecture even if current acritecture isnot x86.Return------subfolder: str'' if target architecture is current architecture,'\current_target' if not."""current = 'x86' if forcex86 else self.current_cpureturn ('' if self.target_cpu == current elseself.target_dir().replace('\\', '\\%s_' % current))class RegistryInfo:"""Microsoft Visual Studio related registry informations.Parameters----------platform_info: PlatformInfo"PlatformInfo" instance."""HKEYS = (winreg.HKEY_USERS,winreg.HKEY_CURRENT_USER,winreg.HKEY_LOCAL_MACHINE,winreg.HKEY_CLASSES_ROOT)def __init__(self, platform_info):self.pi = platform_info@propertydef visualstudio(self):"""Microsoft Visual Studio root registry key."""return 'VisualStudio'@propertydef sxs(self):"""Microsoft Visual Studio SxS registry key."""return os.path.join(self.visualstudio, 'SxS')@propertydef vc(self):"""Microsoft Visual C++ VC7 registry key."""return os.path.join(self.sxs, 'VC7')@propertydef vs(self):"""Microsoft Visual Studio VS7 registry key."""return os.path.join(self.sxs, 'VS7')@propertydef vc_for_python(self):"""Microsoft Visual C++ for Python registry key."""return r'DevDiv\VCForPython'@propertydef microsoft_sdk(self):"""Microsoft SDK registry key."""return 'Microsoft SDKs'@propertydef windows_sdk(self):"""Microsoft Windows/Platform SDK registry key."""return os.path.join(self.microsoft_sdk, 'Windows')@propertydef netfx_sdk(self):"""Microsoft .NET Framework SDK registry key."""return os.path.join(self.microsoft_sdk, 'NETFXSDK')@propertydef windows_kits_roots(self):"""Microsoft Windows Kits Roots registry key."""return r'Windows Kits\Installed Roots'def microsoft(self, key, x86=False):"""Return key in Microsoft software registry.Parameters----------key: strRegistry key path where look.x86: strForce x86 software registry.Return------str: value"""node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node'return os.path.join('Software', node64, 'Microsoft', key)def lookup(self, key, name):"""Look for values in registry in Microsoft software registry.Parameters----------key: strRegistry key path where look.name: strValue name to find.Return------str: value"""KEY_READ = winreg.KEY_READopenkey = winreg.OpenKeyms = self.microsoftfor hkey in self.HKEYS:try:bkey = openkey(hkey, ms(key), 0, KEY_READ)except (OSError, IOError):if not self.pi.current_is_x86():try:bkey = openkey(hkey, ms(key, True), 0, KEY_READ)except (OSError, IOError):continueelse:continuetry:return winreg.QueryValueEx(bkey, name)[0]except (OSError, IOError):passclass SystemInfo:"""Microsoft Windows and Visual Studio related system inormations.Parameters----------registry_info: RegistryInfo"RegistryInfo" instance.vc_ver: floatRequired Microsoft Visual C++ version."""# Variables and properties in this class use originals CamelCase variables# names from Microsoft source files for more easy comparaison.WinDir = safe_env.get('WinDir', '')ProgramFiles = safe_env.get('ProgramFiles', '')ProgramFilesx86 = safe_env.get('ProgramFiles(x86)', ProgramFiles)def __init__(self, registry_info, vc_ver=None):self.ri = registry_infoself.pi = self.ri.piself.vc_ver = vc_ver or self._find_latest_available_vc_ver()def _find_latest_available_vc_ver(self):try:return self.find_available_vc_vers()[-1]except IndexError:err = 'No Microsoft Visual C++ version found'raise distutils.errors.DistutilsPlatformError(err)def find_available_vc_vers(self):"""Find all available Microsoft Visual C++ versions."""ms = self.ri.microsoftvckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs)vc_vers = []for hkey in self.ri.HKEYS:for key in vckeys:try:bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ)except (OSError, IOError):continuesubkeys, values, _ = winreg.QueryInfoKey(bkey)for i in range(values):try:ver = float(winreg.EnumValue(bkey, i)[0])if ver not in vc_vers:vc_vers.append(ver)except ValueError:passfor i in range(subkeys):try:ver = float(winreg.EnumKey(bkey, i))if ver not in vc_vers:vc_vers.append(ver)except ValueError:passreturn sorted(vc_vers)@propertydef VSInstallDir(self):"""Microsoft Visual Studio directory."""# Default pathname = 'Microsoft Visual Studio %0.1f' % self.vc_verdefault = os.path.join(self.ProgramFilesx86, name)# Try to get path from registry, if fail use default pathreturn self.ri.lookup(self.ri.vs, '%0.1f' % self.vc_ver) or default@propertydef VCInstallDir(self):"""Microsoft Visual C++ directory."""self.VSInstallDirguess_vc = self._guess_vc() or self._guess_vc_legacy()# Try to get "VC++ for Python" path from registry as default pathreg_path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)python_vc = self.ri.lookup(reg_path, 'installdir')default_vc = os.path.join(python_vc, 'VC') if python_vc else guess_vc# Try to get path from registry, if fail use default pathpath = self.ri.lookup(self.ri.vc, '%0.1f' % self.vc_ver) or default_vcif not os.path.isdir(path):msg = 'Microsoft Visual C++ directory not found'raise distutils.errors.DistutilsPlatformError(msg)return pathdef _guess_vc(self):"""Locate Visual C for 2017"""if self.vc_ver <= 14.0:returndefault = r'VC\Tools\MSVC'guess_vc = os.path.join(self.VSInstallDir, default)# Subdir with VC exact version as nametry:vc_exact_ver = os.listdir(guess_vc)[-1]return os.path.join(guess_vc, vc_exact_ver)except (OSError, IOError, IndexError):passdef _guess_vc_legacy(self):"""Locate Visual C for versions prior to 2017"""default = r'Microsoft Visual Studio %0.1f\VC' % self.vc_verreturn os.path.join(self.ProgramFilesx86, default)@propertydef WindowsSdkVersion(self):"""Microsoft Windows SDK versions for specified MSVC++ version."""if self.vc_ver <= 9.0:return ('7.0', '6.1', '6.0a')elif self.vc_ver == 10.0:return ('7.1', '7.0a')elif self.vc_ver == 11.0:return ('8.0', '8.0a')elif self.vc_ver == 12.0:return ('8.1', '8.1a')elif self.vc_ver >= 14.0:return ('10.0', '8.1')@propertydef WindowsSdkLastVersion(self):"""Microsoft Windows SDK last version"""return self._use_last_dir_name(os.path.join(self.WindowsSdkDir, 'lib'))@propertydef WindowsSdkDir(self):"""Microsoft Windows SDK directory."""sdkdir = ''for ver in self.WindowsSdkVersion:# Try to get it from registryloc = os.path.join(self.ri.windows_sdk, 'v%s' % ver)sdkdir = self.ri.lookup(loc, 'installationfolder')if sdkdir:breakif not sdkdir or not os.path.isdir(sdkdir):# Try to get "VC++ for Python" version from registrypath = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)install_base = self.ri.lookup(path, 'installdir')if install_base:sdkdir = os.path.join(install_base, 'WinSDK')if not sdkdir or not os.path.isdir(sdkdir):# If fail, use default new pathfor ver in self.WindowsSdkVersion:intver = ver[:ver.rfind('.')]path = r'Microsoft SDKs\Windows Kits\%s' % (intver)d = os.path.join(self.ProgramFiles, path)if os.path.isdir(d):sdkdir = dif not sdkdir or not os.path.isdir(sdkdir):# If fail, use default old pathfor ver in self.WindowsSdkVersion:path = r'Microsoft SDKs\Windows\v%s' % verd = os.path.join(self.ProgramFiles, path)if os.path.isdir(d):sdkdir = dif not sdkdir:# If fail, use Platform SDKsdkdir = os.path.join(self.VCInstallDir, 'PlatformSDK')return sdkdir@propertydef WindowsSDKExecutablePath(self):"""Microsoft Windows SDK executable directory."""# Find WinSDK NetFx Tools registry dir nameif self.vc_ver <= 11.0:netfxver = 35arch = ''else:netfxver = 40hidex86 = True if self.vc_ver <= 12.0 else Falsearch = self.pi.current_dir(x64=True, hidex86=hidex86)fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-'))# liste all possibles registry pathsregpaths = []if self.vc_ver >= 14.0:for ver in self.NetFxSdkVersion:regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)]for ver in self.WindowsSdkVersion:regpaths += [os.path.join(self.ri.windows_sdk, 'v%sA' % ver, fx)]# Return installation folder from the more recent pathfor path in regpaths:execpath = self.ri.lookup(path, 'installationfolder')if execpath:breakreturn execpath@propertydef FSharpInstallDir(self):"""Microsoft Visual F# directory."""path = r'%0.1f\Setup\F#' % self.vc_verpath = os.path.join(self.ri.visualstudio, path)return self.ri.lookup(path, 'productdir') or ''@propertydef UniversalCRTSdkDir(self):"""Microsoft Universal CRT SDK directory."""# Set Kit Roots versions for specified MSVC++ versionif self.vc_ver >= 14.0:vers = ('10', '81')else:vers = ()# Find path of the more recent Kitfor ver in vers:sdkdir = self.ri.lookup(self.ri.windows_kits_roots,'kitsroot%s' % ver)if sdkdir:breakreturn sdkdir or ''@propertydef UniversalCRTSdkLastVersion(self):"""Microsoft Universal C Runtime SDK last version"""return self._use_last_dir_name(os.path.join(self.UniversalCRTSdkDir, 'lib'))@propertydef NetFxSdkVersion(self):"""Microsoft .NET Framework SDK versions."""# Set FxSdk versions for specified MSVC++ versionif self.vc_ver >= 14.0:return ('4.6.1', '4.6')else:return ()@propertydef NetFxSdkDir(self):"""Microsoft .NET Framework SDK directory."""for ver in self.NetFxSdkVersion:loc = os.path.join(self.ri.netfx_sdk, ver)sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder')if sdkdir:breakreturn sdkdir or ''@propertydef FrameworkDir32(self):"""Microsoft .NET Framework 32bit directory."""# Default pathguess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework')# Try to get path from registry, if fail use default pathreturn self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw@propertydef FrameworkDir64(self):"""Microsoft .NET Framework 64bit directory."""# Default pathguess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework64')# Try to get path from registry, if fail use default pathreturn self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw@propertydef FrameworkVersion32(self):"""Microsoft .NET Framework 32bit versions."""return self._find_dot_net_versions(32)@propertydef FrameworkVersion64(self):"""Microsoft .NET Framework 64bit versions."""return self._find_dot_net_versions(64)def _find_dot_net_versions(self, bits):"""Find Microsoft .NET Framework versions.Parameters----------bits: intPlatform number of bits: 32 or 64."""# Find actual .NET version in registryreg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits)dot_net_dir = getattr(self, 'FrameworkDir%d' % bits)ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or ''# Set .NET versions for specified MSVC++ versionif self.vc_ver >= 12.0:frameworkver = (ver, 'v4.0')elif self.vc_ver >= 10.0:frameworkver = ('v4.0.30319' if ver.lower()[:2] != 'v4' else ver,'v3.5')elif self.vc_ver == 9.0:frameworkver = ('v3.5', 'v2.0.50727')if self.vc_ver == 8.0:frameworkver = ('v3.0', 'v2.0.50727')return frameworkverdef _use_last_dir_name(self, path, prefix=''):"""Return name of the last dir in path or '' if no dir found.Parameters----------path: strUse dirs in this pathprefix: strUse only dirs startings by this prefix"""matching_dirs = (dir_namefor dir_name in reversed(os.listdir(path))if os.path.isdir(os.path.join(path, dir_name)) anddir_name.startswith(prefix))return next(matching_dirs, None) or ''class EnvironmentInfo:"""Return environment variables for specified Microsoft Visual C++ versionand platform : Lib, Include, Path and libpath.This function is compatible with Microsoft Visual C++ 9.0 to 14.0.Script created by analysing Microsoft environment configuration files like"vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ...Parameters----------arch: strTarget architecture.vc_ver: floatRequired Microsoft Visual C++ version. If not set, autodetect the lastversion.vc_min_ver: floatMinimum Microsoft Visual C++ version."""# Variables and properties in this class use originals CamelCase variables# names from Microsoft source files for more easy comparaison.def __init__(self, arch, vc_ver=None, vc_min_ver=0):self.pi = PlatformInfo(arch)self.ri = RegistryInfo(self.pi)self.si = SystemInfo(self.ri, vc_ver)if self.vc_ver < vc_min_ver:err = 'No suitable Microsoft Visual C++ version found'raise distutils.errors.DistutilsPlatformError(err)@propertydef vc_ver(self):"""Microsoft Visual C++ version."""return self.si.vc_ver@propertydef VSTools(self):"""Microsoft Visual Studio Tools"""paths = [r'Common7\IDE', r'Common7\Tools']if self.vc_ver >= 14.0:arch_subdir = self.pi.current_dir(hidex86=True, x64=True)paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow']paths += [r'Team Tools\Performance Tools']paths += [r'Team Tools\Performance Tools%s' % arch_subdir]return [os.path.join(self.si.VSInstallDir, path) for path in paths]@propertydef VCIncludes(self):"""Microsoft Visual C++ & Microsoft Foundation Class Includes"""return [os.path.join(self.si.VCInstallDir, 'Include'),os.path.join(self.si.VCInstallDir, r'ATLMFC\Include')]@propertydef VCLibraries(self):"""Microsoft Visual C++ & Microsoft Foundation Class Libraries"""if self.vc_ver >= 15.0:arch_subdir = self.pi.target_dir(x64=True)else:arch_subdir = self.pi.target_dir(hidex86=True)paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir]if self.vc_ver >= 14.0:paths += [r'Lib\store%s' % arch_subdir]return [os.path.join(self.si.VCInstallDir, path) for path in paths]@propertydef VCStoreRefs(self):"""Microsoft Visual C++ store references Libraries"""if self.vc_ver < 14.0:return []return [os.path.join(self.si.VCInstallDir, r'Lib\store\references')]@propertydef VCTools(self):"""Microsoft Visual C++ Tools"""si = self.sitools = [os.path.join(si.VCInstallDir, 'VCPackages')]forcex86 = True if self.vc_ver <= 10.0 else Falsearch_subdir = self.pi.cross_dir(forcex86)if arch_subdir:tools += [os.path.join(si.VCInstallDir, 'Bin%s' % arch_subdir)]if self.vc_ver == 14.0:path = 'Bin%s' % self.pi.current_dir(hidex86=True)tools += [os.path.join(si.VCInstallDir, path)]elif self.vc_ver >= 15.0:host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() elser'bin\HostX64%s')tools += [os.path.join(si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))]if self.pi.current_cpu != self.pi.target_cpu:tools += [os.path.join(si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))]else:tools += [os.path.join(si.VCInstallDir, 'Bin')]return tools@propertydef OSLibraries(self):"""Microsoft Windows SDK Libraries"""if self.vc_ver <= 10.0:arch_subdir = self.pi.target_dir(hidex86=True, x64=True)return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]else:arch_subdir = self.pi.target_dir(x64=True)lib = os.path.join(self.si.WindowsSdkDir, 'lib')libver = self._sdk_subdirreturn [os.path.join(lib, '%sum%s' % (libver , arch_subdir))]@propertydef OSIncludes(self):"""Microsoft Windows SDK Include"""include = os.path.join(self.si.WindowsSdkDir, 'include')if self.vc_ver <= 10.0:return [include, os.path.join(include, 'gl')]else:if self.vc_ver >= 14.0:sdkver = self._sdk_subdirelse:sdkver = ''return [os.path.join(include, '%sshared' % sdkver),os.path.join(include, '%sum' % sdkver),os.path.join(include, '%swinrt' % sdkver)]@propertydef OSLibpath(self):"""Microsoft Windows SDK Libraries Paths"""ref = os.path.join(self.si.WindowsSdkDir, 'References')libpath = []if self.vc_ver <= 9.0:libpath += self.OSLibrariesif self.vc_ver >= 11.0:libpath += [os.path.join(ref, r'CommonConfiguration\Neutral')]if self.vc_ver >= 14.0:libpath += [ref,os.path.join(self.si.WindowsSdkDir, 'UnionMetadata'),os.path.join(ref,'Windows.Foundation.UniversalApiContract','1.0.0.0',),os.path.join(ref,'Windows.Foundation.FoundationContract','1.0.0.0',),os.path.join(ref,'Windows.Networking.Connectivity.WwanContract','1.0.0.0',),os.path.join(self.si.WindowsSdkDir,'ExtensionSDKs','Microsoft.VCLibs','%0.1f' % self.vc_ver,'References','CommonConfiguration','neutral',),]return libpath@propertydef SdkTools(self):"""Microsoft Windows SDK Tools"""return list(self._sdk_tools())def _sdk_tools(self):"""Microsoft Windows SDK Tools paths generator"""if self.vc_ver < 15.0:bin_dir = 'Bin' if self.vc_ver <= 11.0 else r'Bin\x86'yield os.path.join(self.si.WindowsSdkDir, bin_dir)if not self.pi.current_is_x86():arch_subdir = self.pi.current_dir(x64=True)path = 'Bin%s' % arch_subdiryield os.path.join(self.si.WindowsSdkDir, path)if self.vc_ver == 10.0 or self.vc_ver == 11.0:if self.pi.target_is_x86():arch_subdir = ''else:arch_subdir = self.pi.current_dir(hidex86=True, x64=True)path = r'Bin\NETFX 4.0 Tools%s' % arch_subdiryield os.path.join(self.si.WindowsSdkDir, path)elif self.vc_ver >= 15.0:path = os.path.join(self.si.WindowsSdkDir, 'Bin')arch_subdir = self.pi.current_dir(x64=True)sdkver = self.si.WindowsSdkLastVersionyield os.path.join(path, '%s%s' % (sdkver, arch_subdir))if self.si.WindowsSDKExecutablePath:yield self.si.WindowsSDKExecutablePath@propertydef _sdk_subdir(self):"""Microsoft Windows SDK version subdir"""ucrtver = self.si.WindowsSdkLastVersionreturn ('%s\\' % ucrtver) if ucrtver else ''@propertydef SdkSetup(self):"""Microsoft Windows SDK Setup"""if self.vc_ver > 9.0:return []return [os.path.join(self.si.WindowsSdkDir, 'Setup')]@propertydef FxTools(self):"""Microsoft .NET Framework Tools"""pi = self.pisi = self.siif self.vc_ver <= 10.0:include32 = Trueinclude64 = not pi.target_is_x86() and not pi.current_is_x86()else:include32 = pi.target_is_x86() or pi.current_is_x86()include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64'tools = []if include32:tools += [os.path.join(si.FrameworkDir32, ver)for ver in si.FrameworkVersion32]if include64:tools += [os.path.join(si.FrameworkDir64, ver)for ver in si.FrameworkVersion64]return tools@propertydef NetFxSDKLibraries(self):"""Microsoft .Net Framework SDK Libraries"""if self.vc_ver < 14.0 or not self.si.NetFxSdkDir:return []arch_subdir = self.pi.target_dir(x64=True)return [os.path.join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]@propertydef NetFxSDKIncludes(self):"""Microsoft .Net Framework SDK Includes"""if self.vc_ver < 14.0 or not self.si.NetFxSdkDir:return []return [os.path.join(self.si.NetFxSdkDir, r'include\um')]@propertydef VsTDb(self):"""Microsoft Visual Studio Team System Database"""return [os.path.join(self.si.VSInstallDir, r'VSTSDB\Deploy')]@propertydef MSBuild(self):"""Microsoft Build Engine"""if self.vc_ver < 12.0:return []elif self.vc_ver < 15.0:base_path = self.si.ProgramFilesx86arch_subdir = self.pi.current_dir(hidex86=True)else:base_path = self.si.VSInstallDirarch_subdir = ''path = r'MSBuild\%0.1f\bin%s' % (self.vc_ver, arch_subdir)build = [os.path.join(base_path, path)]if self.vc_ver >= 15.0:# Add Roslyn C# & Visual Basic Compilerbuild += [os.path.join(base_path, path, 'Roslyn')]return build@propertydef HTMLHelpWorkshop(self):"""Microsoft HTML Help Workshop"""if self.vc_ver < 11.0:return []return [os.path.join(self.si.ProgramFilesx86, 'HTML Help Workshop')]@propertydef UCRTLibraries(self):"""Microsoft Universal C Runtime SDK Libraries"""if self.vc_ver < 14.0:return []arch_subdir = self.pi.target_dir(x64=True)lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib')ucrtver = self._ucrt_subdirreturn [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]@propertydef UCRTIncludes(self):"""Microsoft Universal C Runtime SDK Include"""if self.vc_ver < 14.0:return []include = os.path.join(self.si.UniversalCRTSdkDir, 'include')return [os.path.join(include, '%sucrt' % self._ucrt_subdir)]@propertydef _ucrt_subdir(self):"""Microsoft Universal C Runtime SDK version subdir"""ucrtver = self.si.UniversalCRTSdkLastVersionreturn ('%s\\' % ucrtver) if ucrtver else ''@propertydef FSharp(self):"""Microsoft Visual F#"""if self.vc_ver < 11.0 and self.vc_ver > 12.0:return []return self.si.FSharpInstallDir@propertydef VCRuntimeRedist(self):"""Microsoft Visual C++ runtime redistribuable dll"""arch_subdir = self.pi.target_dir(x64=True)if self.vc_ver < 15:redist_path = self.si.VCInstallDirvcruntime = 'redist%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll'else:redist_path = self.si.VCInstallDir.replace('\\Tools', '\\Redist')vcruntime = 'onecore%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll'# Visual Studio 2017 is still Visual C++ 14.0dll_ver = 14.0 if self.vc_ver == 15 else self.vc_vervcruntime = vcruntime % (arch_subdir, self.vc_ver, dll_ver)return os.path.join(redist_path, vcruntime)def return_env(self, exists=True):"""Return environment dict.Parameters----------exists: boolIt True, only return existing paths."""env = dict(include=self._build_paths('include',[self.VCIncludes,self.OSIncludes,self.UCRTIncludes,self.NetFxSDKIncludes],exists),lib=self._build_paths('lib',[self.VCLibraries,self.OSLibraries,self.FxTools,self.UCRTLibraries,self.NetFxSDKLibraries],exists),libpath=self._build_paths('libpath',[self.VCLibraries,self.FxTools,self.VCStoreRefs,self.OSLibpath],exists),path=self._build_paths('path',[self.VCTools,self.VSTools,self.VsTDb,self.SdkTools,self.SdkSetup,self.FxTools,self.MSBuild,self.HTMLHelpWorkshop,self.FSharp],exists),)if self.vc_ver >= 14 and os.path.isfile(self.VCRuntimeRedist):env['py_vcruntime_redist'] = self.VCRuntimeRedistreturn envdef _build_paths(self, name, spec_path_lists, exists):"""Given an environment variable name and specified paths,return a pathsep-separated string of paths containingunique, extant, directories from those paths and fromthe environment variable. Raise an error if no pathsare resolved."""# flatten spec_path_listsspec_paths = itertools.chain.from_iterable(spec_path_lists)env_paths = safe_env.get(name, '').split(os.pathsep)paths = itertools.chain(spec_paths, env_paths)extant_paths = list(filter(os.path.isdir, paths)) if exists else pathsif not extant_paths:msg = "%s environment variable is empty" % name.upper()raise distutils.errors.DistutilsPlatformError(msg)unique_paths = self._unique_everseen(extant_paths)return os.pathsep.join(unique_paths)# from Python docsdef _unique_everseen(self, iterable, key=None):"""List unique elements, preserving order.Remember all elements ever seen._unique_everseen('AAAABBBCCDAABBB') --> A B C D_unique_everseen('ABBCcAD', str.lower) --> A B C D"""seen = set()seen_add = seen.addif key is None:for element in filterfalse(seen.__contains__, iterable):seen_add(element)yield elementelse:for element in iterable:k = key(element)if k not in seen:seen_add(k)yield element
"""Monkey patching of distutils."""import sysimport distutils.filelistimport platformimport typesimport functoolsfrom importlib import import_moduleimport inspectfrom setuptools.extern import siximport setuptools__all__ = []"""Everything is private. Contact the project teamif you think you need this functionality."""def _get_mro(cls):"""Returns the bases classes for cls sorted by the MRO.Works around an issue on Jython where inspect.getmro will not return allbase classes if multiple classes share the same name. Instead, thisfunction will return a tuple containing the class itself, and the contentsof cls.__bases__. See https://github.com/pypa/setuptools/issues/1024."""if platform.python_implementation() == "Jython":return (cls,) + cls.__bases__return inspect.getmro(cls)def get_unpatched(item):lookup = (get_unpatched_class if isinstance(item, six.class_types) elseget_unpatched_function if isinstance(item, types.FunctionType) elselambda item: None)return lookup(item)def get_unpatched_class(cls):"""Protect against re-patching the distutils if reloadedAlso ensures that no other distutils extension monkeypatched the distutilsfirst."""external_bases = (clsfor cls in _get_mro(cls)if not cls.__module__.startswith('setuptools'))base = next(external_bases)if not base.__module__.startswith('distutils'):msg = "distutils has already been patched by %r" % clsraise AssertionError(msg)return basedef patch_all():# we can't patch distutils.cmd, alasdistutils.core.Command = setuptools.Commandhas_issue_12885 = sys.version_info <= (3, 5, 3)if has_issue_12885:# fix findall bug in distutils (http://bugs.python.org/issue12885)distutils.filelist.findall = setuptools.findallneeds_warehouse = (sys.version_info < (2, 7, 13)or(3, 0) < sys.version_info < (3, 3, 7)or(3, 4) < sys.version_info < (3, 4, 6)or(3, 5) < sys.version_info <= (3, 5, 3))if needs_warehouse:warehouse = 'https://upload.pypi.org/legacy/'distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse_patch_distribution_metadata_write_pkg_file()_patch_distribution_metadata_write_pkg_info()# Install Distribution throughout the distutilsfor module in distutils.dist, distutils.core, distutils.cmd:module.Distribution = setuptools.dist.Distribution# Install the patched Extensiondistutils.core.Extension = setuptools.extension.Extensiondistutils.extension.Extension = setuptools.extension.Extensionif 'distutils.command.build_ext' in sys.modules:sys.modules['distutils.command.build_ext'].Extension = (setuptools.extension.Extension)patch_for_msvc_specialized_compiler()def _patch_distribution_metadata_write_pkg_file():"""Patch write_pkg_file to also write Requires-Python/Requires-External"""distutils.dist.DistributionMetadata.write_pkg_file = (setuptools.dist.write_pkg_file)def _patch_distribution_metadata_write_pkg_info():"""Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-localencoding to save the pkg_info. Monkey-patch its write_pkg_info method tocorrect this undesirable behavior."""environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2)if not environment_local:returndistutils.dist.DistributionMetadata.write_pkg_info = (setuptools.dist.write_pkg_info)def patch_func(replacement, target_mod, func_name):"""Patch func_name in target_mod with replacementImportant - original must be resolved by name to avoidpatching an already patched function."""original = getattr(target_mod, func_name)# set the 'unpatched' attribute on the replacement to# point to the original.vars(replacement).setdefault('unpatched', original)# replace the function in the original modulesetattr(target_mod, func_name, replacement)def get_unpatched_function(candidate):return getattr(candidate, 'unpatched')def patch_for_msvc_specialized_compiler():"""Patch functions in distutils to use standalone Microsoft Visual C++compilers."""# import late to avoid circular imports on Python < 3.5msvc = import_module('setuptools.msvc')if platform.system() != 'Windows':# Compilers only availables on Microsoft Windowsreturndef patch_params(mod_name, func_name):"""Prepare the parameters for patch_func to patch indicated function."""repl_prefix = 'msvc9_' if 'msvc9' in mod_name else 'msvc14_'repl_name = repl_prefix + func_name.lstrip('_')repl = getattr(msvc, repl_name)mod = import_module(mod_name)if not hasattr(mod, func_name):raise ImportError(func_name)return repl, mod, func_name# Python 2.7 to 3.4msvc9 = functools.partial(patch_params, 'distutils.msvc9compiler')# Python 3.5+msvc14 = functools.partial(patch_params, 'distutils._msvccompiler')try:# Patch distutils.msvc9compilerpatch_func(*msvc9('find_vcvarsall'))patch_func(*msvc9('query_vcvarsall'))except ImportError:passtry:# Patch distutils._msvccompiler._get_vc_envpatch_func(*msvc14('_get_vc_env'))except ImportError:passtry:# Patch distutils._msvccompiler.gen_lib_options for Numpypatch_func(*msvc14('gen_lib_options'))except ImportError:pass
"""Customized Mixin2to3 support:- adds support for converting doctestsThis module raises an ImportError on Python 2."""from distutils.util import Mixin2to3 as _Mixin2to3from distutils import logfrom lib2to3.refactor import RefactoringTool, get_fixers_from_packageimport setuptoolsclass DistutilsRefactoringTool(RefactoringTool):def log_error(self, msg, *args, **kw):log.error(msg, *args)def log_message(self, msg, *args):log.info(msg, *args)def log_debug(self, msg, *args):log.debug(msg, *args)class Mixin2to3(_Mixin2to3):def run_2to3(self, files, doctests=False):# See of the distribution option has been set, otherwise check the# setuptools default.if self.distribution.use_2to3 is not True:returnif not files:returnlog.info("Fixing " + " ".join(files))self.__build_fixer_names()self.__exclude_fixers()if doctests:if setuptools.run_2to3_on_doctests:r = DistutilsRefactoringTool(self.fixer_names)r.refactor(files, write=True, doctests_only=True)else:_Mixin2to3.run_2to3(self, files)def __build_fixer_names(self):if self.fixer_names:returnself.fixer_names = []for p in setuptools.lib2to3_fixer_packages:self.fixer_names.extend(get_fixers_from_package(p))if self.distribution.use_2to3_fixers is not None:for p in self.distribution.use_2to3_fixers:self.fixer_names.extend(get_fixers_from_package(p))def __exclude_fixers(self):excluded_fixers = getattr(self, 'exclude_fixers', [])if self.distribution.use_2to3_exclude_fixers is not None:excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers)for fixer_name in excluded_fixers:if fixer_name in self.fixer_names:self.fixer_names.remove(fixer_name)
"""Launch the Python script on the command line aftersetuptools is bootstrapped via import."""# Note that setuptools gets imported implicitly by the# invocation of this script using python -m setuptools.launchimport tokenizeimport sysdef run():"""Run the script in sys.argv[1] as if it hadbeen invoked naturally."""__builtins__script_name = sys.argv[1]namespace = dict(__file__=script_name,__name__='__main__',__doc__=None,)sys.argv[:] = sys.argv[1:]open_ = getattr(tokenize, 'open', open)script = open_(script_name).read()norm_script = script.replace('\\r\\n', '\\n')code = compile(norm_script, script_name, 'exec')exec(code, namespace)if __name__ == '__main__':run()
"""Filename globbing utility. Mostly a copy of `glob` from Python 3.5.Changes include:* `yield from` and PEP3102 `*` removed.* `bytes` changed to `six.binary_type`.* Hidden files are not ignored."""import osimport reimport fnmatchfrom setuptools.extern.six import binary_type__all__ = ["glob", "iglob", "escape"]def glob(pathname, recursive=False):"""Return a list of paths matching a pathname pattern.The pattern may contain simple shell-style wildcards a lafnmatch. However, unlike fnmatch, filenames starting with adot are special cases that are not matched by '*' and '?'patterns.If recursive is true, the pattern '**' will match any files andzero or more directories and subdirectories."""return list(iglob(pathname, recursive=recursive))def iglob(pathname, recursive=False):"""Return an iterator which yields the paths matching a pathname pattern.The pattern may contain simple shell-style wildcards a lafnmatch. However, unlike fnmatch, filenames starting with adot are special cases that are not matched by '*' and '?'patterns.If recursive is true, the pattern '**' will match any files andzero or more directories and subdirectories."""it = _iglob(pathname, recursive)if recursive and _isrecursive(pathname):s = next(it) # skip empty stringassert not sreturn itdef _iglob(pathname, recursive):dirname, basename = os.path.split(pathname)if not has_magic(pathname):if basename:if os.path.lexists(pathname):yield pathnameelse:# Patterns ending with a slash should match only directoriesif os.path.isdir(dirname):yield pathnamereturnif not dirname:if recursive and _isrecursive(basename):for x in glob2(dirname, basename):yield xelse:for x in glob1(dirname, basename):yield xreturn# `os.path.split()` returns the argument itself as a dirname if it is a# drive or UNC path. Prevent an infinite recursion if a drive or UNC path# contains magic characters (i.e. r'\\?\C:').if dirname != pathname and has_magic(dirname):dirs = _iglob(dirname, recursive)else:dirs = [dirname]if has_magic(basename):if recursive and _isrecursive(basename):glob_in_dir = glob2else:glob_in_dir = glob1else:glob_in_dir = glob0for dirname in dirs:for name in glob_in_dir(dirname, basename):yield os.path.join(dirname, name)# These 2 helper functions non-recursively glob inside a literal directory.# They return a list of basenames. `glob1` accepts a pattern while `glob0`# takes a literal basename (so it only has to check for its existence).def glob1(dirname, pattern):if not dirname:if isinstance(pattern, binary_type):dirname = os.curdir.encode('ASCII')else:dirname = os.curdirtry:names = os.listdir(dirname)except OSError:return []return fnmatch.filter(names, pattern)def glob0(dirname, basename):if not basename:# `os.path.split()` returns an empty basename for paths ending with a# directory separator. 'q*x/' should match only directories.if os.path.isdir(dirname):return [basename]else:if os.path.lexists(os.path.join(dirname, basename)):return [basename]return []# This helper function recursively yields relative pathnames inside a literal# directory.def glob2(dirname, pattern):assert _isrecursive(pattern)yield pattern[:0]for x in _rlistdir(dirname):yield x# Recursively yields relative pathnames inside a literal directory.def _rlistdir(dirname):if not dirname:if isinstance(dirname, binary_type):dirname = binary_type(os.curdir, 'ASCII')else:dirname = os.curdirtry:names = os.listdir(dirname)except os.error:returnfor x in names:yield xpath = os.path.join(dirname, x) if dirname else xfor y in _rlistdir(path):yield os.path.join(x, y)magic_check = re.compile('([*?[])')magic_check_bytes = re.compile(b'([*?[])')def has_magic(s):if isinstance(s, binary_type):match = magic_check_bytes.search(s)else:match = magic_check.search(s)return match is not Nonedef _isrecursive(pattern):if isinstance(pattern, binary_type):return pattern == b'**'else:return pattern == '**'def escape(pathname):"""Escape all special characters."""# Escaping is done by wrapping any of "*?[" between square brackets.# Metacharacters do not work in the drive part and shouldn't be escaped.drive, pathname = os.path.splitdrive(pathname)if isinstance(pathname, binary_type):pathname = magic_check_bytes.sub(br'[\1]', pathname)else:pathname = magic_check.sub(r'[\1]', pathname)return drive + pathname
# This file originally from pip:# https://github.com/pypa/pip/blob/8f4f15a5a95d7d5b511ceaee9ed261176c181970/src/pip/_internal/utils/glibc.pyfrom __future__ import absolute_importimport ctypesimport reimport warningsdef glibc_version_string():"Returns glibc version string, or None if not using glibc."# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen# manpage says, "If filename is NULL, then the returned handle is for the# main program". This way we can let the linker do the work to figure out# which libc our process is actually using.process_namespace = ctypes.CDLL(None)try:gnu_get_libc_version = process_namespace.gnu_get_libc_versionexcept AttributeError:# Symbol doesn't exist -> therefore, we are not linked to# glibc.return None# Call gnu_get_libc_version, which returns a string like "2.5"gnu_get_libc_version.restype = ctypes.c_char_pversion_str = gnu_get_libc_version()# py2 / py3 compatibility:if not isinstance(version_str, str):version_str = version_str.decode("ascii")return version_str# Separated out from have_compatible_glibc for easier unit testingdef check_glibc_version(version_str, required_major, minimum_minor):# Parse string and check against requested version.## We use a regexp instead of str.split because we want to discard any# random junk that might come after the minor version -- this might happen# in patched/forked versions of glibc (e.g. Linaro's version of glibc# uses version strings like "2.20-2014.11"). See gh-3588.m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)if not m:warnings.warn("Expected glibc version with 2 components major.minor,"" got: %s" % version_str, RuntimeWarning)return Falsereturn (int(m.group("major")) == required_major andint(m.group("minor")) >= minimum_minor)def have_compatible_glibc(required_major, minimum_minor):version_str = glibc_version_string()if version_str is None:return Falsereturn check_glibc_version(version_str, required_major, minimum_minor)# platform.libc_ver regularly returns completely nonsensical glibc# versions. E.g. on my computer, platform says:## ~$ python2.7 -c 'import platform; print(platform.libc_ver())'# ('glibc', '2.7')# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'# ('glibc', '2.9')## But the truth is:## ~$ ldd --version# ldd (Debian GLIBC 2.22-11) 2.22## This is unfortunate, because it means that the linehaul data on libc# versions that was generated by pip 8.1.2 and earlier is useless and# misleading. Solution: instead of using platform, use our code that actually# works.def libc_ver():"""Try to determine the glibc versionReturns a tuple of strings (lib, version) which default to empty stringsin case the lookup fails."""glibc_version = glibc_version_string()if glibc_version is None:return ("", "")else:return ("glibc", glibc_version)
from pkg_resources.extern import VendorImporternames = 'six',VendorImporter(__name__, names, 'pkg_resources._vendor').install()
import reimport functoolsimport distutils.coreimport distutils.errorsimport distutils.extensionfrom setuptools.extern.six.moves import mapfrom .monkey import get_unpatcheddef _have_cython():"""Return True if Cython can be imported."""cython_impl = 'Cython.Distutils.build_ext'try:# from (cython_impl) import build_ext__import__(cython_impl, fromlist=['build_ext']).build_extreturn Trueexcept Exception:passreturn False# for compatibilityhave_pyrex = _have_cython_Extension = get_unpatched(distutils.core.Extension)class Extension(_Extension):"""Extension that uses '.c' files in place of '.pyx' files"""def __init__(self, name, sources, *args, **kw):# The *args is needed for compatibility as calls may use positional# arguments. py_limited_api may be set only via keyword.self.py_limited_api = kw.pop("py_limited_api", False)_Extension.__init__(self, name, sources, *args, **kw)def _convert_pyx_sources_to_lang(self):"""Replace sources with .pyx extensions to sources with the targetlanguage extension. This mechanism allows language authors to supplypre-converted sources but to prefer the .pyx sources."""if _have_cython():# the build has Cython, so allow it to compile the .pyx filesreturnlang = self.language or ''target_ext = '.cpp' if lang.lower() == 'c++' else '.c'sub = functools.partial(re.sub, '.pyx$', target_ext)self.sources = list(map(sub, self.sources))class Library(Extension):"""Just like a regular Extension, but built as a library instead"""
__all__ = ['Distribution']import reimport osimport warningsimport numbersimport distutils.logimport distutils.coreimport distutils.cmdimport distutils.distimport itertoolsfrom collections import defaultdictfrom distutils.errors import (DistutilsOptionError, DistutilsPlatformError, DistutilsSetupError,)from distutils.util import rfc822_escapefrom setuptools.extern import sixfrom setuptools.extern.six.moves import map, filter, filterfalsefrom pkg_resources.extern import packagingfrom setuptools.depends import Requirefrom setuptools import windows_supportfrom setuptools.monkey import get_unpatchedfrom setuptools.config import parse_configurationimport pkg_resourcesfrom .py36compat import Distribution_parse_config_files__import__('pkg_resources.extern.packaging.specifiers')__import__('pkg_resources.extern.packaging.version')def _get_unpatched(cls):warnings.warn("Do not call this function", DeprecationWarning)return get_unpatched(cls)# Based on Python 3.5 versiondef write_pkg_file(self, file):"""Write the PKG-INFO format data to a file object."""version = '1.0'if (self.provides or self.requires or self.obsoletes orself.classifiers or self.download_url):version = '1.1'# Setuptools specific for PEP 345if hasattr(self, 'python_requires'):version = '1.2'file.write('Metadata-Version: %s\n' % version)file.write('Name: %s\n' % self.get_name())file.write('Version: %s\n' % self.get_version())file.write('Summary: %s\n' % self.get_description())file.write('Home-page: %s\n' % self.get_url())file.write('Author: %s\n' % self.get_contact())file.write('Author-email: %s\n' % self.get_contact_email())file.write('License: %s\n' % self.get_license())if self.download_url:file.write('Download-URL: %s\n' % self.download_url)long_desc_content_type = getattr(self,'long_description_content_type',None) or 'UNKNOWN'file.write('Description-Content-Type: %s\n' % long_desc_content_type)long_desc = rfc822_escape(self.get_long_description())file.write('Description: %s\n' % long_desc)keywords = ','.join(self.get_keywords())if keywords:file.write('Keywords: %s\n' % keywords)self._write_list(file, 'Platform', self.get_platforms())self._write_list(file, 'Classifier', self.get_classifiers())# PEP 314self._write_list(file, 'Requires', self.get_requires())self._write_list(file, 'Provides', self.get_provides())self._write_list(file, 'Obsoletes', self.get_obsoletes())# Setuptools specific for PEP 345if hasattr(self, 'python_requires'):file.write('Requires-Python: %s\n' % self.python_requires)# from Python 3.4def write_pkg_info(self, base_dir):"""Write the PKG-INFO file into the release tree."""with open(os.path.join(base_dir, 'PKG-INFO'), 'w',encoding='UTF-8') as pkg_info:self.write_pkg_file(pkg_info)sequence = tuple, listdef check_importable(dist, attr, value):try:ep = pkg_resources.EntryPoint.parse('x=' + value)assert not ep.extrasexcept (TypeError, ValueError, AttributeError, AssertionError):raise DistutilsSetupError("%r must be importable 'module:attrs' string (got %r)"% (attr, value))def assert_string_list(dist, attr, value):"""Verify that value is a string list or None"""try:assert ''.join(value) != valueexcept (TypeError, ValueError, AttributeError, AssertionError):raise DistutilsSetupError("%r must be a list of strings (got %r)" % (attr, value))def check_nsp(dist, attr, value):"""Verify that namespace packages are valid"""ns_packages = valueassert_string_list(dist, attr, ns_packages)for nsp in ns_packages:if not dist.has_contents_for(nsp):raise DistutilsSetupError("Distribution contains no modules or packages for " +"namespace package %r" % nsp)parent, sep, child = nsp.rpartition('.')if parent and parent not in ns_packages:distutils.log.warn("WARNING: %r is declared as a package namespace, but %r"" is not: please correct this in setup.py", nsp, parent)def check_extras(dist, attr, value):"""Verify that extras_require mapping is valid"""try:list(itertools.starmap(_check_extra, value.items()))except (TypeError, ValueError, AttributeError):raise DistutilsSetupError("'extras_require' must be a dictionary whose values are ""strings or lists of strings containing valid project/version ""requirement specifiers.")def _check_extra(extra, reqs):name, sep, marker = extra.partition(':')if marker and pkg_resources.invalid_marker(marker):raise DistutilsSetupError("Invalid environment marker: " + marker)list(pkg_resources.parse_requirements(reqs))def assert_bool(dist, attr, value):"""Verify that value is True, False, 0, or 1"""if bool(value) != value:tmpl = "{attr!r} must be a boolean value (got {value!r})"raise DistutilsSetupError(tmpl.format(attr=attr, value=value))def check_requirements(dist, attr, value):"""Verify that install_requires is a valid requirements list"""try:list(pkg_resources.parse_requirements(value))if isinstance(value, (dict, set)):raise TypeError("Unordered types are not allowed")except (TypeError, ValueError) as error:tmpl = ("{attr!r} must be a string or list of strings ""containing valid project/version requirement specifiers; {error}")raise DistutilsSetupError(tmpl.format(attr=attr, error=error))def check_specifier(dist, attr, value):"""Verify that value is a valid version specifier"""try:packaging.specifiers.SpecifierSet(value)except packaging.specifiers.InvalidSpecifier as error:tmpl = ("{attr!r} must be a string ""containing valid version specifiers; {error}")raise DistutilsSetupError(tmpl.format(attr=attr, error=error))def check_entry_points(dist, attr, value):"""Verify that entry_points map is parseable"""try:pkg_resources.EntryPoint.parse_map(value)except ValueError as e:raise DistutilsSetupError(e)def check_test_suite(dist, attr, value):if not isinstance(value, six.string_types):raise DistutilsSetupError("test_suite must be a string")def check_package_data(dist, attr, value):"""Verify that value is a dictionary of package names to glob lists"""if isinstance(value, dict):for k, v in value.items():if not isinstance(k, str):breaktry:iter(v)except TypeError:breakelse:returnraise DistutilsSetupError(attr + " must be a dictionary mapping package names to lists of ""wildcard patterns")def check_packages(dist, attr, value):for pkgname in value:if not re.match(r'\w+(\.\w+)*', pkgname):distutils.log.warn("WARNING: %r not a valid package name; please use only "".-separated package names in setup.py", pkgname)_Distribution = get_unpatched(distutils.core.Distribution)class Distribution(Distribution_parse_config_files, _Distribution):"""Distribution with support for features, tests, and package dataThis is an enhanced version of 'distutils.dist.Distribution' thateffectively adds the following new optional keyword arguments to 'setup()':'install_requires' -- a string or sequence of strings specifying projectversions that the distribution requires when installed, in the formatused by 'pkg_resources.require()'. They will be installedautomatically when the package is installed. If you wish to usepackages that are not available in PyPI, or want to give your users analternate download location, you can add a 'find_links' option to the'[easy_install]' section of your project's 'setup.cfg' file, and thensetuptools will scan the listed web pages for links that satisfy therequirements.'extras_require' -- a dictionary mapping names of optional "extras" to theadditional requirement(s) that using those extras incurs. For example,this::extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])indicates that the distribution can optionally provide an extracapability called "reST", but it can only be used if docutils andreSTedit are installed. If the user installs your package usingEasyInstall and requests one of your extras, the correspondingadditional requirements will be installed if needed.'features' **deprecated** -- a dictionary mapping option names to'setuptools.Feature'objects. Features are a portion of the distribution that can beincluded or excluded based on user options, inter-feature dependencies,and availability on the current system. Excluded features are omittedfrom all setup commands, including source and binary distributions, soyou can create multiple distributions from the same source tree.Feature names should be valid Python identifiers, except that they maycontain the '-' (minus) sign. Features can be included or excludedvia the command line options '--with-X' and '--without-X', where 'X' isthe name of the feature. Whether a feature is included by default, andwhether you are allowed to control this from the command line, isdetermined by the Feature object. See the 'Feature' class for moreinformation.'test_suite' -- the name of a test suite to run for the 'test' command.If the user runs 'python setup.py test', the package will be installed,and the named test suite will be run. The format is the same aswould be used on a 'unittest.py' command line. That is, it is thedotted name of an object to import and call to generate a test suite.'package_data' -- a dictionary mapping package names to lists of filenamesor globs to use to find data files contained in the named packages.If the dictionary has filenames or globs listed under '""' (the emptystring), those names will be searched for in every package, in additionto any names for the specific package. Data files found using thesenames/globs will be installed along with the package, in the samelocation as the package. Note that globs are allowed to referencethe contents of non-package subdirectories, as long as you use '/' asa path separator. (Globs are automatically converted toplatform-specific paths at runtime.)In addition to these new keywords, this class also has several new methodsfor manipulating the distribution's contents. For example, the 'include()'and 'exclude()' methods can be thought of as in-place add and subtractcommands that add or remove packages, modules, extensions, and so on fromthe distribution. They are used by the feature subsystem to configure thedistribution for the included and excluded features."""_patched_dist = Nonedef patch_missing_pkg_info(self, attrs):# Fake up a replacement for the data that would normally come from# PKG-INFO, but which might not yet be built if this is a fresh# checkout.#if not attrs or 'name' not in attrs or 'version' not in attrs:returnkey = pkg_resources.safe_name(str(attrs['name'])).lower()dist = pkg_resources.working_set.by_key.get(key)if dist is not None and not dist.has_metadata('PKG-INFO'):dist._version = pkg_resources.safe_version(str(attrs['version']))self._patched_dist = distdef __init__(self, attrs=None):have_package_data = hasattr(self, "package_data")if not have_package_data:self.package_data = {}attrs = attrs or {}if 'features' in attrs or 'require_features' in attrs:Feature.warn_deprecated()self.require_features = []self.features = {}self.dist_files = []self.src_root = attrs.pop("src_root", None)self.patch_missing_pkg_info(attrs)self.long_description_content_type = attrs.get('long_description_content_type')self.dependency_links = attrs.pop('dependency_links', [])self.setup_requires = attrs.pop('setup_requires', [])for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):vars(self).setdefault(ep.name, None)_Distribution.__init__(self, attrs)if isinstance(self.metadata.version, numbers.Number):# Some people apparently take "version number" too literally :)self.metadata.version = str(self.metadata.version)if self.metadata.version is not None:try:ver = packaging.version.Version(self.metadata.version)normalized_version = str(ver)if self.metadata.version != normalized_version:warnings.warn("Normalizing '%s' to '%s'" % (self.metadata.version,normalized_version,))self.metadata.version = normalized_versionexcept (packaging.version.InvalidVersion, TypeError):warnings.warn("The version specified (%r) is an invalid version, this ""may not work as expected with newer versions of ""setuptools, pip, and PyPI. Please see PEP 440 for more ""details." % self.metadata.version)self._finalize_requires()def _finalize_requires(self):"""Set `metadata.python_requires` and fix environment markersin `install_requires` and `extras_require`."""if getattr(self, 'python_requires', None):self.metadata.python_requires = self.python_requiresself._convert_extras_requirements()self._move_install_requirements_markers()def _convert_extras_requirements(self):"""Convert requirements in `extras_require` of the form`"extra": ["barbazquux; {marker}"]` to`"extra:{marker}": ["barbazquux"]`."""spec_ext_reqs = getattr(self, 'extras_require', None) or {}self._tmp_extras_require = defaultdict(list)for section, v in spec_ext_reqs.items():# Do not strip empty sections.self._tmp_extras_require[section]for r in pkg_resources.parse_requirements(v):suffix = self._suffix_for(r)self._tmp_extras_require[section + suffix].append(r)@staticmethoddef _suffix_for(req):"""For a requirement, return the 'extras_require' suffix forthat requirement."""return ':' + str(req.marker) if req.marker else ''def _move_install_requirements_markers(self):"""Move requirements in `install_requires` that are using environmentmarkers `extras_require`."""# divide the install_requires into two sets, simple ones still# handled by install_requires and more complex ones handled# by extras_require.def is_simple_req(req):return not req.markerspec_inst_reqs = getattr(self, 'install_requires', None) or ()inst_reqs = list(pkg_resources.parse_requirements(spec_inst_reqs))simple_reqs = filter(is_simple_req, inst_reqs)complex_reqs = filterfalse(is_simple_req, inst_reqs)self.install_requires = list(map(str, simple_reqs))for r in complex_reqs:self._tmp_extras_require[':' + str(r.marker)].append(r)self.extras_require = dict((k, [str(r) for r in map(self._clean_req, v)])for k, v in self._tmp_extras_require.items())def _clean_req(self, req):"""Given a Requirement, remove environment markers and return it."""req.marker = Nonereturn reqdef parse_config_files(self, filenames=None, ignore_option_errors=False):"""Parses configuration files from various levelsand loads configuration."""_Distribution.parse_config_files(self, filenames=filenames)parse_configuration(self, self.command_options,ignore_option_errors=ignore_option_errors)self._finalize_requires()def parse_command_line(self):"""Process features after parsing command line options"""result = _Distribution.parse_command_line(self)if self.features:self._finalize_features()return resultdef _feature_attrname(self, name):"""Convert feature name to corresponding option attribute name"""return 'with_' + name.replace('-', '_')def fetch_build_eggs(self, requires):"""Resolve pre-setup requirements"""resolved_dists = pkg_resources.working_set.resolve(pkg_resources.parse_requirements(requires),installer=self.fetch_build_egg,replace_conflicting=True,)for dist in resolved_dists:pkg_resources.working_set.add(dist, replace=True)return resolved_distsdef finalize_options(self):_Distribution.finalize_options(self)if self.features:self._set_global_opts_from_features()for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):value = getattr(self, ep.name, None)if value is not None:ep.require(installer=self.fetch_build_egg)ep.load()(self, ep.name, value)if getattr(self, 'convert_2to3_doctests', None):# XXX may convert to set here when we can rely on set being builtinself.convert_2to3_doctests = [os.path.abspath(p)for p in self.convert_2to3_doctests]else:self.convert_2to3_doctests = []def get_egg_cache_dir(self):egg_cache_dir = os.path.join(os.curdir, '.eggs')if not os.path.exists(egg_cache_dir):os.mkdir(egg_cache_dir)windows_support.hide_file(egg_cache_dir)readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt')with open(readme_txt_filename, 'w') as f:f.write('This directory contains eggs that were downloaded ''by setuptools to build, test, and run plug-ins.\n\n')f.write('This directory caches those eggs to prevent ''repeated downloads.\n\n')f.write('However, it is safe to delete this directory.\n\n')return egg_cache_dirdef fetch_build_egg(self, req):"""Fetch an egg needed for building"""from setuptools.command.easy_install import easy_installdist = self.__class__({'script_args': ['easy_install']})opts = dist.get_option_dict('easy_install')opts.clear()opts.update((k, v)for k, v in self.get_option_dict('easy_install').items()if k in (# don't use any other settings'find_links', 'site_dirs', 'index_url','optimize', 'site_dirs', 'allow_hosts',))if self.dependency_links:links = self.dependency_links[:]if 'find_links' in opts:links = opts['find_links'][1] + linksopts['find_links'] = ('setup', links)install_dir = self.get_egg_cache_dir()cmd = easy_install(dist, args=["x"], install_dir=install_dir,exclude_scripts=True,always_copy=False, build_directory=None, editable=False,upgrade=False, multi_version=True, no_report=True, user=False)cmd.ensure_finalized()return cmd.easy_install(req)def _set_global_opts_from_features(self):"""Add --with-X/--without-X options based on optional features"""go = []no = self.negative_opt.copy()for name, feature in self.features.items():self._set_feature(name, None)feature.validate(self)if feature.optional:descr = feature.descriptionincdef = ' (default)'excdef = ''if not feature.include_by_default():excdef, incdef = incdef, excdefnew = (('with-' + name, None, 'include ' + descr + incdef),('without-' + name, None, 'exclude ' + descr + excdef),)go.extend(new)no['without-' + name] = 'with-' + nameself.global_options = self.feature_options = go + self.global_optionsself.negative_opt = self.feature_negopt = nodef _finalize_features(self):"""Add/remove features and resolve dependencies between them"""# First, flag all the enabled items (and thus their dependencies)for name, feature in self.features.items():enabled = self.feature_is_included(name)if enabled or (enabled is None and feature.include_by_default()):feature.include_in(self)self._set_feature(name, 1)# Then disable the rest, so that off-by-default features don't# get flagged as errors when they're required by an enabled featurefor name, feature in self.features.items():if not self.feature_is_included(name):feature.exclude_from(self)self._set_feature(name, 0)def get_command_class(self, command):"""Pluggable version of get_command_class()"""if command in self.cmdclass:return self.cmdclass[command]eps = pkg_resources.iter_entry_points('distutils.commands', command)for ep in eps:ep.require(installer=self.fetch_build_egg)self.cmdclass[command] = cmdclass = ep.load()return cmdclasselse:return _Distribution.get_command_class(self, command)def print_commands(self):for ep in pkg_resources.iter_entry_points('distutils.commands'):if ep.name not in self.cmdclass:# don't require extras as the commands won't be invokedcmdclass = ep.resolve()self.cmdclass[ep.name] = cmdclassreturn _Distribution.print_commands(self)def get_command_list(self):for ep in pkg_resources.iter_entry_points('distutils.commands'):if ep.name not in self.cmdclass:# don't require extras as the commands won't be invokedcmdclass = ep.resolve()self.cmdclass[ep.name] = cmdclassreturn _Distribution.get_command_list(self)def _set_feature(self, name, status):"""Set feature's inclusion status"""setattr(self, self._feature_attrname(name), status)def feature_is_included(self, name):"""Return 1 if feature is included, 0 if excluded, 'None' if unknown"""return getattr(self, self._feature_attrname(name))def include_feature(self, name):"""Request inclusion of feature named 'name'"""if self.feature_is_included(name) == 0:descr = self.features[name].descriptionraise DistutilsOptionError(descr + " is required, but was excluded or is not available")self.features[name].include_in(self)self._set_feature(name, 1)def include(self, **attrs):"""Add items to distribution that are named in keyword argumentsFor example, 'dist.exclude(py_modules=["x"])' would add 'x' tothe distribution's 'py_modules' attribute, if it was not alreadythere.Currently, this method only supports inclusion for attributes that arelists or tuples. If you need to add support for adding to otherattributes in this or a subclass, you can add an '_include_X' method,where 'X' is the name of the attribute. The method will be called withthe value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'will try to call 'dist._include_foo({"bar":"baz"})', which can thenhandle whatever special inclusion logic is needed."""for k, v in attrs.items():include = getattr(self, '_include_' + k, None)if include:include(v)else:self._include_misc(k, v)def exclude_package(self, package):"""Remove packages, modules, and extensions in named package"""pfx = package + '.'if self.packages:self.packages = [p for p in self.packagesif p != package and not p.startswith(pfx)]if self.py_modules:self.py_modules = [p for p in self.py_modulesif p != package and not p.startswith(pfx)]if self.ext_modules:self.ext_modules = [p for p in self.ext_modulesif p.name != package and not p.name.startswith(pfx)]def has_contents_for(self, package):"""Return true if 'exclude_package(package)' would do something"""pfx = package + '.'for p in self.iter_distribution_names():if p == package or p.startswith(pfx):return Truedef _exclude_misc(self, name, value):"""Handle 'exclude()' for list/tuple attrs without a special handler"""if not isinstance(value, sequence):raise DistutilsSetupError("%s: setting must be a list or tuple (%r)" % (name, value))try:old = getattr(self, name)except AttributeError:raise DistutilsSetupError("%s: No such distribution setting" % name)if old is not None and not isinstance(old, sequence):raise DistutilsSetupError(name + ": this setting cannot be changed via include/exclude")elif old:setattr(self, name, [item for item in old if item not in value])def _include_misc(self, name, value):"""Handle 'include()' for list/tuple attrs without a special handler"""if not isinstance(value, sequence):raise DistutilsSetupError("%s: setting must be a list (%r)" % (name, value))try:old = getattr(self, name)except AttributeError:raise DistutilsSetupError("%s: No such distribution setting" % name)if old is None:setattr(self, name, value)elif not isinstance(old, sequence):raise DistutilsSetupError(name + ": this setting cannot be changed via include/exclude")else:new = [item for item in value if item not in old]setattr(self, name, old + new)def exclude(self, **attrs):"""Remove items from distribution that are named in keyword argumentsFor example, 'dist.exclude(py_modules=["x"])' would remove 'x' fromthe distribution's 'py_modules' attribute. Excluding packages usesthe 'exclude_package()' method, so all of the package's containedpackages, modules, and extensions are also excluded.Currently, this method only supports exclusion from attributes that arelists or tuples. If you need to add support for excluding from otherattributes in this or a subclass, you can add an '_exclude_X' method,where 'X' is the name of the attribute. The method will be called withthe value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'will try to call 'dist._exclude_foo({"bar":"baz"})', which can thenhandle whatever special exclusion logic is needed."""for k, v in attrs.items():exclude = getattr(self, '_exclude_' + k, None)if exclude:exclude(v)else:self._exclude_misc(k, v)def _exclude_packages(self, packages):if not isinstance(packages, sequence):raise DistutilsSetupError("packages: setting must be a list or tuple (%r)" % (packages,))list(map(self.exclude_package, packages))def _parse_command_opts(self, parser, args):# Remove --with-X/--without-X options when processing command argsself.global_options = self.__class__.global_optionsself.negative_opt = self.__class__.negative_opt# First, expand any aliasescommand = args[0]aliases = self.get_option_dict('aliases')while command in aliases:src, alias = aliases[command]del aliases[command] # ensure each alias can expand only once!import shlexargs[:1] = shlex.split(alias, True)command = args[0]nargs = _Distribution._parse_command_opts(self, parser, args)# Handle commands that want to consume all remaining argumentscmd_class = self.get_command_class(command)if getattr(cmd_class, 'command_consumes_arguments', None):self.get_option_dict(command)['args'] = ("command line", nargs)if nargs is not None:return []return nargsdef get_cmdline_options(self):"""Return a '{cmd: {opt:val}}' map of all command-line optionsOption names are all long, but do not include the leading '--', andcontain dashes rather than underscores. If the option doesn't takean argument (e.g. '--quiet'), the 'val' is 'None'.Note that options provided by config files are intentionally excluded."""d = {}for cmd, opts in self.command_options.items():for opt, (src, val) in opts.items():if src != "command line":continueopt = opt.replace('_', '-')if val == 0:cmdobj = self.get_command_obj(cmd)neg_opt = self.negative_opt.copy()neg_opt.update(getattr(cmdobj, 'negative_opt', {}))for neg, pos in neg_opt.items():if pos == opt:opt = negval = Nonebreakelse:raise AssertionError("Shouldn't be able to get here")elif val == 1:val = Noned.setdefault(cmd, {})[opt] = valreturn ddef iter_distribution_names(self):"""Yield all packages, modules, and extension names in distribution"""for pkg in self.packages or ():yield pkgfor module in self.py_modules or ():yield modulefor ext in self.ext_modules or ():if isinstance(ext, tuple):name, buildinfo = extelse:name = ext.nameif name.endswith('module'):name = name[:-6]yield namedef handle_display_options(self, option_order):"""If there were any non-global "display-only" options(--help-commands or the metadata display options) on the commandline, display the requested info and return true; else returnfalse."""import sysif six.PY2 or self.help_commands:return _Distribution.handle_display_options(self, option_order)# Stdout may be StringIO (e.g. in tests)import ioif not isinstance(sys.stdout, io.TextIOWrapper):return _Distribution.handle_display_options(self, option_order)# Don't wrap stdout if utf-8 is already the encoding. Provides# workaround for #334.if sys.stdout.encoding.lower() in ('utf-8', 'utf8'):return _Distribution.handle_display_options(self, option_order)# Print metadata in UTF-8 no matter the platformencoding = sys.stdout.encodingerrors = sys.stdout.errorsnewline = sys.platform != 'win32' and '\n' or Noneline_buffering = sys.stdout.line_bufferingsys.stdout = io.TextIOWrapper(sys.stdout.detach(), 'utf-8', errors, newline, line_buffering)try:return _Distribution.handle_display_options(self, option_order)finally:sys.stdout = io.TextIOWrapper(sys.stdout.detach(), encoding, errors, newline, line_buffering)class Feature:"""**deprecated** -- The `Feature` facility was never completely implementedor supported, `has reported issues<https://github.com/pypa/setuptools/issues/58>`_ and will be removed ina future version.A subset of the distribution that can be excluded if unneeded/wantedFeatures are created using these keyword arguments:'description' -- a short, human readable description of the feature, tobe used in error messages, and option help messages.'standard' -- if true, the feature is included by default if it isavailable on the current system. Otherwise, the feature is onlyincluded if requested via a command line '--with-X' option, or ifanother included feature requires it. The default setting is 'False'.'available' -- if true, the feature is available for installation on thecurrent system. The default setting is 'True'.'optional' -- if true, the feature's inclusion can be controlled from thecommand line, using the '--with-X' or '--without-X' options. Iffalse, the feature's inclusion status is determined automatically,based on 'availabile', 'standard', and whether any other featurerequires it. The default setting is 'True'.'require_features' -- a string or sequence of strings naming featuresthat should also be included if this feature is included. Defaults toempty list. May also contain 'Require' objects that should beadded/removed from the distribution.'remove' -- a string or list of strings naming packages to be removedfrom the distribution if this feature is *not* included. If thefeature *is* included, this argument is ignored. This argument existsto support removing features that "crosscut" a distribution, such asdefining a 'tests' feature that removes all the 'tests' subpackagesprovided by other features. The default for this argument is an emptylist. (Note: the named package(s) or modules must exist in the basedistribution when the 'setup()' function is initially called.)other keywords -- any other keyword arguments are saved, and passed tothe distribution's 'include()' and 'exclude()' methods when thefeature is included or excluded, respectively. So, for example, youcould pass 'packages=["a","b"]' to cause packages 'a' and 'b' to beadded or removed from the distribution as appropriate.A feature must include at least one 'requires', 'remove', or otherkeyword argument. Otherwise, it can't affect the distribution in any way.Note also that you can subclass 'Feature' to create your own specializedfeature types that modify the distribution in other ways when included orexcluded. See the docstrings for the various methods here for more detail.Aside from the methods, the only feature attributes that distributions lookat are 'description' and 'optional'."""@staticmethoddef warn_deprecated():msg = ("Features are deprecated and will be removed in a future ""version. See https://github.com/pypa/setuptools/issues/65.")warnings.warn(msg, DeprecationWarning, stacklevel=3)def __init__(self, description, standard=False, available=True,optional=True, require_features=(), remove=(), **extras):self.warn_deprecated()self.description = descriptionself.standard = standardself.available = availableself.optional = optionalif isinstance(require_features, (str, Require)):require_features = require_features,self.require_features = [r for r in require_features if isinstance(r, str)]er = [r for r in require_features if not isinstance(r, str)]if er:extras['require_features'] = erif isinstance(remove, str):remove = remove,self.remove = removeself.extras = extrasif not remove and not require_features and not extras:raise DistutilsSetupError("Feature %s: must define 'require_features', 'remove', or ""at least one of 'packages', 'py_modules', etc.")def include_by_default(self):"""Should this feature be included by default?"""return self.available and self.standarddef include_in(self, dist):"""Ensure feature and its requirements are included in distributionYou may override this in a subclass to perform additional operations onthe distribution. Note that this method may be called more than onceper feature, and so should be idempotent."""if not self.available:raise DistutilsPlatformError(self.description + " is required, ""but is not available on this platform")dist.include(**self.extras)for f in self.require_features:dist.include_feature(f)def exclude_from(self, dist):"""Ensure feature is excluded from distributionYou may override this in a subclass to perform additional operations onthe distribution. This method will be called at most once perfeature, and only after all included features have been asked toinclude themselves."""dist.exclude(**self.extras)if self.remove:for item in self.remove:dist.exclude_package(item)def validate(self, dist):"""Verify that feature makes sense in context of distributionThis method is called by the distribution just before it parses itscommand line. It checks to ensure that the 'remove' attribute, if any,contains only valid package/module names that are present in the basedistribution when 'setup()' is called. You may override it in asubclass to perform any other required validation of the featureagainst a target distribution."""for item in self.remove:if not dist.has_contents_for(item):raise DistutilsSetupError("%s wants to be able to remove %s, but the distribution"" doesn't contain any packages or modules under %s"% (self.description, item, item))
import sysimport impimport marshalfrom distutils.version import StrictVersionfrom imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZENfrom .py33compat import Bytecode__all__ = ['Require', 'find_module', 'get_module_constant', 'extract_constant']class Require:"""A prerequisite to building or installing a distribution"""def __init__(self, name, requested_version, module, homepage='',attribute=None, format=None):if format is None and requested_version is not None:format = StrictVersionif format is not None:requested_version = format(requested_version)if attribute is None:attribute = '__version__'self.__dict__.update(locals())del self.selfdef full_name(self):"""Return full package/distribution name, w/version"""if self.requested_version is not None:return '%s-%s' % (self.name, self.requested_version)return self.namedef version_ok(self, version):"""Is 'version' sufficiently up-to-date?"""return self.attribute is None or self.format is None or \str(version) != "unknown" and version >= self.requested_versiondef get_version(self, paths=None, default="unknown"):"""Get version number of installed module, 'None', or 'default'Search 'paths' for module. If not found, return 'None'. If found,return the extracted version attribute, or 'default' if no versionattribute was specified, or the value cannot be determined withoutimporting the module. The version is formatted according to therequirement's version format (if any), unless it is 'None' or thesupplied 'default'."""if self.attribute is None:try:f, p, i = find_module(self.module, paths)if f:f.close()return defaultexcept ImportError:return Nonev = get_module_constant(self.module, self.attribute, default, paths)if v is not None and v is not default and self.format is not None:return self.format(v)return vdef is_present(self, paths=None):"""Return true if dependency is present on 'paths'"""return self.get_version(paths) is not Nonedef is_current(self, paths=None):"""Return true if dependency is present and up-to-date on 'paths'"""version = self.get_version(paths)if version is None:return Falsereturn self.version_ok(version)def find_module(module, paths=None):"""Just like 'imp.find_module()', but with package support"""parts = module.split('.')while parts:part = parts.pop(0)f, path, (suffix, mode, kind) = info = imp.find_module(part, paths)if kind == PKG_DIRECTORY:parts = parts or ['__init__']paths = [path]elif parts:raise ImportError("Can't find %r in %s" % (parts, module))return infodef get_module_constant(module, symbol, default=-1, paths=None):"""Find 'module' by searching 'paths', and extract 'symbol'Return 'None' if 'module' does not exist on 'paths', or it does not define'symbol'. If the module defines 'symbol' as a constant, return theconstant. Otherwise, return 'default'."""try:f, path, (suffix, mode, kind) = find_module(module, paths)except ImportError:# Module doesn't existreturn Nonetry:if kind == PY_COMPILED:f.read(8) # skip magic & datecode = marshal.load(f)elif kind == PY_FROZEN:code = imp.get_frozen_object(module)elif kind == PY_SOURCE:code = compile(f.read(), path, 'exec')else:# Not something we can parse; we'll have to import it. :(if module not in sys.modules:imp.load_module(module, f, path, (suffix, mode, kind))return getattr(sys.modules[module], symbol, None)finally:if f:f.close()return extract_constant(code, symbol, default)def extract_constant(code, symbol, default=-1):"""Extract the constant value of 'symbol' from 'code'If the name 'symbol' is bound to a constant value by the Python codeobject 'code', return that value. If 'symbol' is bound to an expression,return 'default'. Otherwise, return 'None'.Return value is based on the first assignment to 'symbol'. 'symbol' mustbe a global, or at least a non-"fast" local in the code block. That is,only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'must be present in 'code.co_names'."""if symbol not in code.co_names:# name's not there, can't possibly be an assignmentreturn Nonename_idx = list(code.co_names).index(symbol)STORE_NAME = 90STORE_GLOBAL = 97LOAD_CONST = 100const = defaultfor byte_code in Bytecode(code):op = byte_code.opcodearg = byte_code.argif op == LOAD_CONST:const = code.co_consts[arg]elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):return constelse:const = defaultdef _update_globals():"""Patch the globals to remove the objects not available on some platforms.XXX it'd be better to test assertions about bytecode instead."""if not sys.platform.startswith('java') and sys.platform != 'cli':returnincompatible = 'extract_constant', 'get_module_constant'for name in incompatible:del globals()[name]__all__.remove(name)_update_globals()
from distutils.dep_util import newer_group# yes, this is was almost entirely copy-pasted from# 'newer_pairwise()', this is just another convenience# function.def newer_pairwise_group(sources_groups, targets):"""Walk both arguments in parallel, testing if each source group is newerthan its corresponding target. Returns a pair of lists (sources_groups,targets) where sources is newer than target, according to the semanticsof 'newer_group()'."""if len(sources_groups) != len(targets):raise ValueError("'sources_group' and 'targets' must be the same length")# build a pair of lists (sources_groups, targets) where source is newern_sources = []n_targets = []for i in range(len(sources_groups)):if newer_group(sources_groups[i], targets[i]):n_sources.append(sources_groups[i])n_targets.append(targets[i])return n_sources, n_targets
from __future__ import absolute_import, unicode_literalsimport ioimport osimport sysfrom collections import defaultdictfrom functools import partialfrom importlib import import_modulefrom distutils.errors import DistutilsOptionError, DistutilsFileErrorfrom setuptools.extern.six import string_typesdef read_configuration(filepath, find_others=False, ignore_option_errors=False):"""Read given configuration file and returns options from it as a dict.:param str|unicode filepath: Path to configuration fileto get options from.:param bool find_others: Whether to search for other configuration fileswhich could be on in various places.:param bool ignore_option_errors: Whether to silently ignoreoptions, values of which could not be resolved (e.g. due to exceptionsin directives such as file:, attr:, etc.).If False exceptions are propagated as expected.:rtype: dict"""from setuptools.dist import Distribution, _Distributionfilepath = os.path.abspath(filepath)if not os.path.isfile(filepath):raise DistutilsFileError('Configuration file %s does not exist.' % filepath)current_directory = os.getcwd()os.chdir(os.path.dirname(filepath))try:dist = Distribution()filenames = dist.find_config_files() if find_others else []if filepath not in filenames:filenames.append(filepath)_Distribution.parse_config_files(dist, filenames=filenames)handlers = parse_configuration(dist, dist.command_options,ignore_option_errors=ignore_option_errors)finally:os.chdir(current_directory)return configuration_to_dict(handlers)def configuration_to_dict(handlers):"""Returns configuration data gathered by given handlers as a dict.:param list[ConfigHandler] handlers: Handlers list,usually from parse_configuration():rtype: dict"""config_dict = defaultdict(dict)for handler in handlers:obj_alias = handler.section_prefixtarget_obj = handler.target_objfor option in handler.set_options:getter = getattr(target_obj, 'get_%s' % option, None)if getter is None:value = getattr(target_obj, option)else:value = getter()config_dict[obj_alias][option] = valuereturn config_dictdef parse_configuration(distribution, command_options, ignore_option_errors=False):"""Performs additional parsing of configuration optionsfor a distribution.Returns a list of used option handlers.:param Distribution distribution::param dict command_options::param bool ignore_option_errors: Whether to silently ignoreoptions, values of which could not be resolved (e.g. due to exceptionsin directives such as file:, attr:, etc.).If False exceptions are propagated as expected.:rtype: list"""meta = ConfigMetadataHandler(distribution.metadata, command_options, ignore_option_errors)meta.parse()options = ConfigOptionsHandler(distribution, command_options, ignore_option_errors)options.parse()return [meta, options]class ConfigHandler(object):"""Handles metadata supplied in configuration files."""section_prefix = None"""Prefix for config sections handled by this handler.Must be provided by class heirs."""aliases = {}"""Options aliases.For compatibility with various packages. E.g.: d2to1 and pbr.Note: `-` in keys is replaced with `_` by config parser."""def __init__(self, target_obj, options, ignore_option_errors=False):sections = {}section_prefix = self.section_prefixfor section_name, section_options in options.items():if not section_name.startswith(section_prefix):continuesection_name = section_name.replace(section_prefix, '').strip('.')sections[section_name] = section_optionsself.ignore_option_errors = ignore_option_errorsself.target_obj = target_objself.sections = sectionsself.set_options = []@propertydef parsers(self):"""Metadata item name to parser function mapping."""raise NotImplementedError('%s must provide .parsers property' % self.__class__.__name__)def __setitem__(self, option_name, value):unknown = tuple()target_obj = self.target_obj# Translate alias into real name.option_name = self.aliases.get(option_name, option_name)current_value = getattr(target_obj, option_name, unknown)if current_value is unknown:raise KeyError(option_name)if current_value:# Already inhabited. Skipping.returnskip_option = Falseparser = self.parsers.get(option_name)if parser:try:value = parser(value)except Exception:skip_option = Trueif not self.ignore_option_errors:raiseif skip_option:returnsetter = getattr(target_obj, 'set_%s' % option_name, None)if setter is None:setattr(target_obj, option_name, value)else:setter(value)self.set_options.append(option_name)@classmethoddef _parse_list(cls, value, separator=','):"""Represents value as a list.Value is split either by separator (defaults to comma) or by lines.:param value::param separator: List items separator character.:rtype: list"""if isinstance(value, list): # _get_parser_compound casereturn valueif '\n' in value:value = value.splitlines()else:value = value.split(separator)return [chunk.strip() for chunk in value if chunk.strip()]@classmethoddef _parse_dict(cls, value):"""Represents value as a dict.:param value::rtype: dict"""separator = '='result = {}for line in cls._parse_list(value):key, sep, val = line.partition(separator)if sep != separator:raise DistutilsOptionError('Unable to parse option value to dict: %s' % value)result[key.strip()] = val.strip()return result@classmethoddef _parse_bool(cls, value):"""Represents value as boolean.:param value::rtype: bool"""value = value.lower()return value in ('1', 'true', 'yes')@classmethoddef _parse_file(cls, value):"""Represents value as a string, allowing including textfrom nearest files using `file:` directive.Directive is sandboxed and won't reach anything outsidedirectory with setup.py.Examples:file: LICENSEfile: README.rst, CHANGELOG.md, src/file.txt:param str value::rtype: str"""include_directive = 'file:'if not isinstance(value, string_types):return valueif not value.startswith(include_directive):return valuespec = value[len(include_directive):]filepaths = (os.path.abspath(path.strip()) for path in spec.split(','))return '\n'.join(cls._read_file(path)for path in filepathsif (cls._assert_local(path) or True)and os.path.isfile(path))@staticmethoddef _assert_local(filepath):if not filepath.startswith(os.getcwd()):raise DistutilsOptionError('`file:` directive can not access %s' % filepath)@staticmethoddef _read_file(filepath):with io.open(filepath, encoding='utf-8') as f:return f.read()@classmethoddef _parse_attr(cls, value):"""Represents value as a module attribute.Examples:attr: package.attrattr: package.module.attr:param str value::rtype: str"""attr_directive = 'attr:'if not value.startswith(attr_directive):return valueattrs_path = value.replace(attr_directive, '').strip().split('.')attr_name = attrs_path.pop()module_name = '.'.join(attrs_path)module_name = module_name or '__init__'sys.path.insert(0, os.getcwd())try:module = import_module(module_name)value = getattr(module, attr_name)finally:sys.path = sys.path[1:]return value@classmethoddef _get_parser_compound(cls, *parse_methods):"""Returns parser function to represents value as a list.Parses a value applying given methods one after another.:param parse_methods::rtype: callable"""def parse(value):parsed = valuefor method in parse_methods:parsed = method(parsed)return parsedreturn parse@classmethoddef _parse_section_to_dict(cls, section_options, values_parser=None):"""Parses section options into a dictionary.Optionally applies a given parser to values.:param dict section_options::param callable values_parser::rtype: dict"""value = {}values_parser = values_parser or (lambda val: val)for key, (_, val) in section_options.items():value[key] = values_parser(val)return valuedef parse_section(self, section_options):"""Parses configuration file section.:param dict section_options:"""for (name, (_, value)) in section_options.items():try:self[name] = valueexcept KeyError:pass # Keep silent for a new option may appear anytime.def parse(self):"""Parses configuration file items from oneor more related sections."""for section_name, section_options in self.sections.items():method_postfix = ''if section_name: # [section.option] variantmethod_postfix = '_%s' % section_namesection_parser_method = getattr(self,# Dots in section names are tranlsated into dunderscores.('parse_section%s' % method_postfix).replace('.', '__'),None)if section_parser_method is None:raise DistutilsOptionError('Unsupported distribution option section: [%s.%s]' % (self.section_prefix, section_name))section_parser_method(section_options)class ConfigMetadataHandler(ConfigHandler):section_prefix = 'metadata'aliases = {'home_page': 'url','summary': 'description','classifier': 'classifiers','platform': 'platforms',}strict_mode = False"""We need to keep it loose, to be partially compatible with`pbr` and `d2to1` packages which also uses `metadata` section."""@propertydef parsers(self):"""Metadata item name to parser function mapping."""parse_list = self._parse_listparse_file = self._parse_filereturn {'platforms': parse_list,'keywords': parse_list,'provides': parse_list,'requires': parse_list,'obsoletes': parse_list,'classifiers': self._get_parser_compound(parse_file, parse_list),'license': parse_file,'description': parse_file,'long_description': parse_file,'version': self._parse_version,}def _parse_version(self, value):"""Parses `version` option value.:param value::rtype: str"""version = self._parse_attr(value)if callable(version):version = version()if not isinstance(version, string_types):if hasattr(version, '__iter__'):version = '.'.join(map(str, version))else:version = '%s' % versionreturn versionclass ConfigOptionsHandler(ConfigHandler):section_prefix = 'options'@propertydef parsers(self):"""Metadata item name to parser function mapping."""parse_list = self._parse_listparse_list_semicolon = partial(self._parse_list, separator=';')parse_bool = self._parse_boolparse_dict = self._parse_dictreturn {'zip_safe': parse_bool,'use_2to3': parse_bool,'include_package_data': parse_bool,'package_dir': parse_dict,'use_2to3_fixers': parse_list,'use_2to3_exclude_fixers': parse_list,'convert_2to3_doctests': parse_list,'scripts': parse_list,'eager_resources': parse_list,'dependency_links': parse_list,'namespace_packages': parse_list,'install_requires': parse_list_semicolon,'setup_requires': parse_list_semicolon,'tests_require': parse_list_semicolon,'packages': self._parse_packages,'entry_points': self._parse_file,'py_modules': parse_list,}def _parse_packages(self, value):"""Parses `packages` option value.:param value::rtype: list"""find_directive = 'find:'if not value.startswith(find_directive):return self._parse_list(value)# Read function arguments from a dedicated section.find_kwargs = self.parse_section_packages__find(self.sections.get('packages.find', {}))from setuptools import find_packagesreturn find_packages(**find_kwargs)def parse_section_packages__find(self, section_options):"""Parses `packages.find` configuration file section.To be used in conjunction with _parse_packages().:param dict section_options:"""section_data = self._parse_section_to_dict(section_options, self._parse_list)valid_keys = ['where', 'include', 'exclude']find_kwargs = dict([(k, v) for k, v in section_data.items() if k in valid_keys and v])where = find_kwargs.get('where')if where is not None:find_kwargs['where'] = where[0] # cast list to single valreturn find_kwargsdef parse_section_entry_points(self, section_options):"""Parses `entry_points` configuration file section.:param dict section_options:"""parsed = self._parse_section_to_dict(section_options, self._parse_list)self['entry_points'] = parseddef _parse_package_data(self, section_options):parsed = self._parse_section_to_dict(section_options, self._parse_list)root = parsed.get('*')if root:parsed[''] = rootdel parsed['*']return parseddef parse_section_package_data(self, section_options):"""Parses `package_data` configuration file section.:param dict section_options:"""self['package_data'] = self._parse_package_data(section_options)def parse_section_exclude_package_data(self, section_options):"""Parses `exclude_package_data` configuration file section.:param dict section_options:"""self['exclude_package_data'] = self._parse_package_data(section_options)def parse_section_extras_require(self, section_options):"""Parses `extras_require` configuration file section.:param dict section_options:"""parse_list = partial(self._parse_list, separator=';')self['extras_require'] = self._parse_section_to_dict(section_options, parse_list)
# -*- coding: utf-8 -*-"""upload_docsImplements a Distutils 'upload_docs' subcommand (upload documentation toPyPI's pythonhosted.org)."""from base64 import standard_b64encodefrom distutils import logfrom distutils.errors import DistutilsOptionErrorimport osimport socketimport zipfileimport tempfileimport shutilimport itertoolsimport functoolsfrom setuptools.extern import sixfrom setuptools.extern.six.moves import http_client, urllibfrom pkg_resources import iter_entry_pointsfrom .upload import uploaddef _encode(s):errors = 'surrogateescape' if six.PY3 else 'strict'return s.encode('utf-8', errors)class upload_docs(upload):# override the default repository as upload_docs isn't# supported by Warehouse (and won't be).DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/'description = 'Upload documentation to PyPI'user_options = [('repository=', 'r',"url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),('show-response', None,'display full response text from server'),('upload-dir=', None, 'directory to upload'),]boolean_options = upload.boolean_optionsdef has_sphinx(self):if self.upload_dir is None:for ep in iter_entry_points('distutils.commands', 'build_sphinx'):return Truesub_commands = [('build_sphinx', has_sphinx)]def initialize_options(self):upload.initialize_options(self)self.upload_dir = Noneself.target_dir = Nonedef finalize_options(self):upload.finalize_options(self)if self.upload_dir is None:if self.has_sphinx():build_sphinx = self.get_finalized_command('build_sphinx')self.target_dir = build_sphinx.builder_target_direlse:build = self.get_finalized_command('build')self.target_dir = os.path.join(build.build_base, 'docs')else:self.ensure_dirname('upload_dir')self.target_dir = self.upload_dirif 'pypi.python.org' in self.repository:log.warn("Upload_docs command is deprecated. Use RTD instead.")self.announce('Using upload directory %s' % self.target_dir)def create_zipfile(self, filename):zip_file = zipfile.ZipFile(filename, "w")try:self.mkpath(self.target_dir) # just in casefor root, dirs, files in os.walk(self.target_dir):if root == self.target_dir and not files:tmpl = "no files found in upload directory '%s'"raise DistutilsOptionError(tmpl % self.target_dir)for name in files:full = os.path.join(root, name)relative = root[len(self.target_dir):].lstrip(os.path.sep)dest = os.path.join(relative, name)zip_file.write(full, dest)finally:zip_file.close()def run(self):# Run sub commandsfor cmd_name in self.get_sub_commands():self.run_command(cmd_name)tmp_dir = tempfile.mkdtemp()name = self.distribution.metadata.get_name()zip_file = os.path.join(tmp_dir, "%s.zip" % name)try:self.create_zipfile(zip_file)self.upload_file(zip_file)finally:shutil.rmtree(tmp_dir)@staticmethoddef _build_part(item, sep_boundary):key, values = itemtitle = '\nContent-Disposition: form-data; name="%s"' % key# handle multiple entries for the same nameif not isinstance(values, list):values = [values]for value in values:if isinstance(value, tuple):title += '; filename="%s"' % value[0]value = value[1]else:value = _encode(value)yield sep_boundaryyield _encode(title)yield b"\n\n"yield valueif value and value[-1:] == b'\r':yield b'\n' # write an extra newline (lurve Macs)@classmethoddef _build_multipart(cls, data):"""Build up the MIME payload for the POST data"""boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'sep_boundary = b'\n--' + boundaryend_boundary = sep_boundary + b'--'end_items = end_boundary, b"\n",builder = functools.partial(cls._build_part,sep_boundary=sep_boundary,)part_groups = map(builder, data.items())parts = itertools.chain.from_iterable(part_groups)body_items = itertools.chain(parts, end_items)content_type = 'multipart/form-data; boundary=%s' % boundary.decode('ascii')return b''.join(body_items), content_typedef upload_file(self, filename):with open(filename, 'rb') as f:content = f.read()meta = self.distribution.metadatadata = {':action': 'doc_upload','name': meta.get_name(),'content': (os.path.basename(filename), content),}# set up the authenticationcredentials = _encode(self.username + ':' + self.password)credentials = standard_b64encode(credentials)if six.PY3:credentials = credentials.decode('ascii')auth = "Basic " + credentialsbody, ct = self._build_multipart(data)msg = "Submitting documentation to %s" % (self.repository)self.announce(msg, log.INFO)# build the Request# We can't use urllib2 since we need to send the Basic# auth right with the first requestschema, netloc, url, params, query, fragments = \urllib.parse.urlparse(self.repository)assert not params and not query and not fragmentsif schema == 'http':conn = http_client.HTTPConnection(netloc)elif schema == 'https':conn = http_client.HTTPSConnection(netloc)else:raise AssertionError("unsupported schema " + schema)data = ''try:conn.connect()conn.putrequest("POST", url)content_type = ctconn.putheader('Content-type', content_type)conn.putheader('Content-length', str(len(body)))conn.putheader('Authorization', auth)conn.endheaders()conn.send(body)except socket.error as e:self.announce(str(e), log.ERROR)returnr = conn.getresponse()if r.status == 200:msg = 'Server response (%s): %s' % (r.status, r.reason)self.announce(msg, log.INFO)elif r.status == 301:location = r.getheader('Location')if location is None:location = 'https://pythonhosted.org/%s/' % meta.get_name()msg = 'Upload successful. Visit %s' % locationself.announce(msg, log.INFO)else:msg = 'Upload failed (%s): %s' % (r.status, r.reason)self.announce(msg, log.ERROR)if self.show_response:print('-' * 75, r.read(), '-' * 75)
import getpassfrom distutils.command import upload as origclass upload(orig.upload):"""Override default upload behavior to obtain passwordin a variety of different ways."""def finalize_options(self):orig.upload.finalize_options(self)self.username = (self.username orgetpass.getuser())# Attempt to obtain password. Short circuit evaluation at the first# sign of success.self.password = (self.password orself._load_password_from_keyring() orself._prompt_for_password())def _load_password_from_keyring(self):"""Attempt to load password from keyring. Suppress Exceptions."""try:keyring = __import__('keyring')return keyring.get_password(self.repository, self.username)except Exception:passdef _prompt_for_password(self):"""Prompt for a password on the tty. Suppress Exceptions."""try:return getpass.getpass()except (Exception, KeyboardInterrupt):pass
import osimport operatorimport sysimport contextlibimport itertoolsimport unittestfrom distutils.errors import DistutilsError, DistutilsOptionErrorfrom distutils import logfrom unittest import TestLoaderfrom setuptools.extern import sixfrom setuptools.extern.six.moves import map, filterfrom pkg_resources import (resource_listdir, resource_exists, normalize_path,working_set, _namespace_packages, evaluate_marker,add_activation_listener, require, EntryPoint)from setuptools import Commandclass ScanningLoader(TestLoader):def __init__(self):TestLoader.__init__(self)self._visited = set()def loadTestsFromModule(self, module, pattern=None):"""Return a suite of all tests cases contained in the given moduleIf the module is a package, load tests from all the modules in it.If the module has an ``additional_tests`` function, call it and addthe return value to the tests."""if module in self._visited:return Noneself._visited.add(module)tests = []tests.append(TestLoader.loadTestsFromModule(self, module))if hasattr(module, "additional_tests"):tests.append(module.additional_tests())if hasattr(module, '__path__'):for file in resource_listdir(module.__name__, ''):if file.endswith('.py') and file != '__init__.py':submodule = module.__name__ + '.' + file[:-3]else:if resource_exists(module.__name__, file + '/__init__.py'):submodule = module.__name__ + '.' + fileelse:continuetests.append(self.loadTestsFromName(submodule))if len(tests) != 1:return self.suiteClass(tests)else:return tests[0] # don't create a nested suite for only one return# adapted from jaraco.classes.properties:NonDataPropertyclass NonDataProperty(object):def __init__(self, fget):self.fget = fgetdef __get__(self, obj, objtype=None):if obj is None:return selfreturn self.fget(obj)class test(Command):"""Command to run unit tests after in-place build"""description = "run unit tests after in-place build"user_options = [('test-module=', 'm', "Run 'test_suite' in specified module"),('test-suite=', 's',"Run single test, case or suite (e.g. 'module.test_suite')"),('test-runner=', 'r', "Test runner to use"),]def initialize_options(self):self.test_suite = Noneself.test_module = Noneself.test_loader = Noneself.test_runner = Nonedef finalize_options(self):if self.test_suite and self.test_module:msg = "You may specify a module or a suite, but not both"raise DistutilsOptionError(msg)if self.test_suite is None:if self.test_module is None:self.test_suite = self.distribution.test_suiteelse:self.test_suite = self.test_module + ".test_suite"if self.test_loader is None:self.test_loader = getattr(self.distribution, 'test_loader', None)if self.test_loader is None:self.test_loader = "setuptools.command.test:ScanningLoader"if self.test_runner is None:self.test_runner = getattr(self.distribution, 'test_runner', None)@NonDataPropertydef test_args(self):return list(self._test_args())def _test_args(self):if not self.test_suite and sys.version_info >= (2, 7):yield 'discover'if self.verbose:yield '--verbose'if self.test_suite:yield self.test_suitedef with_project_on_sys_path(self, func):"""Backward compatibility for project_on_sys_path context."""with self.project_on_sys_path():func()@contextlib.contextmanagerdef project_on_sys_path(self, include_dists=[]):with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False)if with_2to3:# If we run 2to3 we can not do this inplace:# Ensure metadata is up-to-dateself.reinitialize_command('build_py', inplace=0)self.run_command('build_py')bpy_cmd = self.get_finalized_command("build_py")build_path = normalize_path(bpy_cmd.build_lib)# Build extensionsself.reinitialize_command('egg_info', egg_base=build_path)self.run_command('egg_info')self.reinitialize_command('build_ext', inplace=0)self.run_command('build_ext')else:# Without 2to3 inplace works fine:self.run_command('egg_info')# Build extensions in-placeself.reinitialize_command('build_ext', inplace=1)self.run_command('build_ext')ei_cmd = self.get_finalized_command("egg_info")old_path = sys.path[:]old_modules = sys.modules.copy()try:project_path = normalize_path(ei_cmd.egg_base)sys.path.insert(0, project_path)working_set.__init__()add_activation_listener(lambda dist: dist.activate())require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))with self.paths_on_pythonpath([project_path]):yieldfinally:sys.path[:] = old_pathsys.modules.clear()sys.modules.update(old_modules)working_set.__init__()@staticmethod@contextlib.contextmanagerdef paths_on_pythonpath(paths):"""Add the indicated paths to the head of the PYTHONPATH environmentvariable so that subprocesses will also see the packages atthese paths.Do this in a context that restores the value on exit."""nothing = object()orig_pythonpath = os.environ.get('PYTHONPATH', nothing)current_pythonpath = os.environ.get('PYTHONPATH', '')try:prefix = os.pathsep.join(paths)to_join = filter(None, [prefix, current_pythonpath])new_path = os.pathsep.join(to_join)if new_path:os.environ['PYTHONPATH'] = new_pathyieldfinally:if orig_pythonpath is nothing:os.environ.pop('PYTHONPATH', None)else:os.environ['PYTHONPATH'] = orig_pythonpath@staticmethoddef install_dists(dist):"""Install the requirements indicated by self.distribution andreturn an iterable of the dists that were built."""ir_d = dist.fetch_build_eggs(dist.install_requires)tr_d = dist.fetch_build_eggs(dist.tests_require or [])er_d = dist.fetch_build_eggs(v for k, v in dist.extras_require.items()if k.startswith(':') and evaluate_marker(k[1:]))return itertools.chain(ir_d, tr_d, er_d)def run(self):installed_dists = self.install_dists(self.distribution)cmd = ' '.join(self._argv)if self.dry_run:self.announce('skipping "%s" (dry run)' % cmd)returnself.announce('running "%s"' % cmd)paths = map(operator.attrgetter('location'), installed_dists)with self.paths_on_pythonpath(paths):with self.project_on_sys_path():self.run_tests()def run_tests(self):# Purge modules under test from sys.modules. The test loader will# re-import them from the build location. Required when 2to3 is used# with namespace packages.if six.PY3 and getattr(self.distribution, 'use_2to3', False):module = self.test_suite.split('.')[0]if module in _namespace_packages:del_modules = []if module in sys.modules:del_modules.append(module)module += '.'for name in sys.modules:if name.startswith(module):del_modules.append(name)list(map(sys.modules.__delitem__, del_modules))test = unittest.main(None, None, self._argv,testLoader=self._resolve_as_ep(self.test_loader),testRunner=self._resolve_as_ep(self.test_runner),exit=False,)if not test.result.wasSuccessful():msg = 'Test failed: %s' % test.resultself.announce(msg, log.ERROR)raise DistutilsError(msg)@propertydef _argv(self):return ['unittest'] + self.test_args@staticmethoddef _resolve_as_ep(val):"""Load the indicated attribute value, called, as a as if it werespecified as an entry point."""if val is None:returnparsed = EntryPoint.parse("x=" + val)return parsed.resolve()()
from distutils.util import convert_pathfrom distutils import logfrom distutils.errors import DistutilsOptionErrorimport distutilsimport osfrom setuptools.extern.six.moves import configparserfrom setuptools import Command__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']def config_file(kind="local"):"""Get the filename of the distutils, local, global, or per-user config`kind` must be one of "local", "global", or "user""""if kind == 'local':return 'setup.cfg'if kind == 'global':return os.path.join(os.path.dirname(distutils.__file__), 'distutils.cfg')if kind == 'user':dot = os.name == 'posix' and '.' or ''return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))raise ValueError("config_file() type must be 'local', 'global', or 'user'", kind)def edit_config(filename, settings, dry_run=False):"""Edit a configuration file to include `settings``settings` is a dictionary of dictionaries or ``None`` values, keyed bycommand/section name. A ``None`` value means to delete the entire section,while a dictionary lists settings to be changed or deleted in that section.A setting of ``None`` means to delete that setting."""log.debug("Reading configuration from %s", filename)opts = configparser.RawConfigParser()opts.read([filename])for section, options in settings.items():if options is None:log.info("Deleting section [%s] from %s", section, filename)opts.remove_section(section)else:if not opts.has_section(section):log.debug("Adding new section [%s] to %s", section, filename)opts.add_section(section)for option, value in options.items():if value is None:log.debug("Deleting %s.%s from %s",section, option, filename)opts.remove_option(section, option)if not opts.options(section):log.info("Deleting empty [%s] section from %s",section, filename)opts.remove_section(section)else:log.debug("Setting %s.%s to %r in %s",section, option, value, filename)opts.set(section, option, value)log.info("Writing %s", filename)if not dry_run:with open(filename, 'w') as f:opts.write(f)class option_base(Command):"""Abstract base class for commands that mess with config files"""user_options = [('global-config', 'g',"save options to the site-wide distutils.cfg file"),('user-config', 'u',"save options to the current user's pydistutils.cfg file"),('filename=', 'f',"configuration file to use (default=setup.cfg)"),]boolean_options = ['global-config', 'user-config',]def initialize_options(self):self.global_config = Noneself.user_config = Noneself.filename = Nonedef finalize_options(self):filenames = []if self.global_config:filenames.append(config_file('global'))if self.user_config:filenames.append(config_file('user'))if self.filename is not None:filenames.append(self.filename)if not filenames:filenames.append(config_file('local'))if len(filenames) > 1:raise DistutilsOptionError("Must specify only one configuration file option",filenames)self.filename, = filenamesclass setopt(option_base):"""Save command-line options to a file"""description = "set an option in setup.cfg or another config file"user_options = [('command=', 'c', 'command to set an option for'),('option=', 'o', 'option to set'),('set-value=', 's', 'value of the option'),('remove', 'r', 'remove (unset) the value'),] + option_base.user_optionsboolean_options = option_base.boolean_options + ['remove']def initialize_options(self):option_base.initialize_options(self)self.command = Noneself.option = Noneself.set_value = Noneself.remove = Nonedef finalize_options(self):option_base.finalize_options(self)if self.command is None or self.option is None:raise DistutilsOptionError("Must specify --command *and* --option")if self.set_value is None and not self.remove:raise DistutilsOptionError("Must specify --set-value or --remove")def run(self):edit_config(self.filename, {self.command: {self.option.replace('-', '_'): self.set_value}},self.dry_run)
from distutils import logimport distutils.command.sdist as origimport osimport sysimport ioimport contextlibfrom setuptools.extern import sixfrom .py36compat import sdist_add_defaultsimport pkg_resources_default_revctrl = listdef walk_revctrl(dirname=''):"""Find all files under revision control"""for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):for item in ep.load()(dirname):yield itemclass sdist(sdist_add_defaults, orig.sdist):"""Smart sdist that finds anything supported by revision control"""user_options = [('formats=', None,"formats for source distribution (comma-separated list)"),('keep-temp', 'k',"keep the distribution tree around after creating " +"archive file(s)"),('dist-dir=', 'd',"directory to put the source distribution archive(s) in ""[default: dist]"),]negative_opt = {}README_EXTENSIONS = ['', '.rst', '.txt', '.md']READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS)def run(self):self.run_command('egg_info')ei_cmd = self.get_finalized_command('egg_info')self.filelist = ei_cmd.filelistself.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))self.check_readme()# Run sub commandsfor cmd_name in self.get_sub_commands():self.run_command(cmd_name)self.make_distribution()dist_files = getattr(self.distribution, 'dist_files', [])for file in self.archive_files:data = ('sdist', '', file)if data not in dist_files:dist_files.append(data)def initialize_options(self):orig.sdist.initialize_options(self)self._default_to_gztar()def _default_to_gztar(self):# only needed on Python prior to 3.6.if sys.version_info >= (3, 6, 0, 'beta', 1):returnself.formats = ['gztar']def make_distribution(self):"""Workaround for #516"""with self._remove_os_link():orig.sdist.make_distribution(self)@staticmethod@contextlib.contextmanagerdef _remove_os_link():"""In a context, remove and restore os.link if it exists"""class NoValue:passorig_val = getattr(os, 'link', NoValue)try:del os.linkexcept Exception:passtry:yieldfinally:if orig_val is not NoValue:setattr(os, 'link', orig_val)def __read_template_hack(self):# This grody hack closes the template file (MANIFEST.in) if an# exception occurs during read_template.# Doing so prevents an error when easy_install attempts to delete the# file.try:orig.sdist.read_template(self)except Exception:_, _, tb = sys.exc_info()tb.tb_next.tb_frame.f_locals['template'].close()raise# Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle# has been fixed, so only override the method if we're using an earlier# Python.has_leaky_handle = (sys.version_info < (2, 7, 2)or (3, 0) <= sys.version_info < (3, 1, 4)or (3, 2) <= sys.version_info < (3, 2, 1))if has_leaky_handle:read_template = __read_template_hackdef _add_defaults_python(self):"""getting python files"""if self.distribution.has_pure_modules():build_py = self.get_finalized_command('build_py')self.filelist.extend(build_py.get_source_files())# This functionality is incompatible with include_package_data, and# will in fact create an infinite recursion if include_package_data# is True. Use of include_package_data will imply that# distutils-style automatic handling of package_data is disabledif not self.distribution.include_package_data:for _, src_dir, _, filenames in build_py.data_files:self.filelist.extend([os.path.join(src_dir, filename)for filename in filenames])def _add_defaults_data_files(self):try:if six.PY2:sdist_add_defaults._add_defaults_data_files(self)else:super()._add_defaults_data_files()except TypeError:log.warn("data_files contains unexpected objects")def check_readme(self):for f in self.READMES:if os.path.exists(f):returnelse:self.warn("standard file not found: should have one of " +', '.join(self.READMES))def make_release_tree(self, base_dir, files):orig.sdist.make_release_tree(self, base_dir, files)# Save any egg_info command line options used to create this sdistdest = os.path.join(base_dir, 'setup.cfg')if hasattr(os, 'link') and os.path.exists(dest):# unlink and re-copy, since it might be hard-linked, and# we don't want to change the source versionos.unlink(dest)self.copy_file('setup.cfg', dest)self.get_finalized_command('egg_info').save_version_info(dest)def _manifest_is_not_generated(self):# check for special comment used in 2.7.1 and higherif not os.path.isfile(self.manifest):return Falsewith io.open(self.manifest, 'rb') as fp:first_line = fp.readline()return (first_line !='# file GENERATED by distutils, do NOT edit\n'.encode())def read_manifest(self):"""Read the manifest file (named by 'self.manifest') and use it tofill in 'self.filelist', the list of files to include in the sourcedistribution."""log.info("reading manifest file '%s'", self.manifest)manifest = open(self.manifest, 'rb')for line in manifest:# The manifest must contain UTF-8. See #303.if six.PY3:try:line = line.decode('UTF-8')except UnicodeDecodeError:log.warn("%r not UTF-8 decodable -- skipping" % line)continue# ignore comments and blank linesline = line.strip()if line.startswith('#') or not line:continueself.filelist.append(line)manifest.close()
from setuptools.command.setopt import edit_config, option_baseclass saveopts(option_base):"""Save command-line options to a file"""description = "save supplied options to setup.cfg or other config file"def run(self):dist = self.distributionsettings = {}for cmd in dist.command_options:if cmd == 'saveopts':continue # don't save our own options!for opt, (src, val) in dist.get_option_dict(cmd).items():if src == "command line":settings.setdefault(cmd, {})[opt] = valedit_config(self.filename, settings, self.dry_run)
from distutils.util import convert_pathfrom distutils import logfrom distutils.errors import DistutilsOptionErrorimport osimport shutilfrom setuptools.extern import sixfrom setuptools import Commandclass rotate(Command):"""Delete older distributions"""description = "delete older distributions, keeping N newest files"user_options = [('match=', 'm', "patterns to match (required)"),('dist-dir=', 'd', "directory where the distributions are"),('keep=', 'k', "number of matching distributions to keep"),]boolean_options = []def initialize_options(self):self.match = Noneself.dist_dir = Noneself.keep = Nonedef finalize_options(self):if self.match is None:raise DistutilsOptionError("Must specify one or more (comma-separated) match patterns ""(e.g. '.zip' or '.egg')")if self.keep is None:raise DistutilsOptionError("Must specify number of files to keep")try:self.keep = int(self.keep)except ValueError:raise DistutilsOptionError("--keep must be an integer")if isinstance(self.match, six.string_types):self.match = [convert_path(p.strip()) for p in self.match.split(',')]self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))def run(self):self.run_command("egg_info")from glob import globfor pattern in self.match:pattern = self.distribution.get_name() + '*' + patternfiles = glob(os.path.join(self.dist_dir, pattern))files = [(os.path.getmtime(f), f) for f in files]files.sort()files.reverse()log.info("%d file(s) matching %s", len(files), pattern)files = files[self.keep:]for (t, f) in files:log.info("Deleting %s", f)if not self.dry_run:if os.path.isdir(f):shutil.rmtree(f)else:os.unlink(f)
import distutils.command.register as origclass register(orig.register):__doc__ = orig.register.__doc__def run(self):# Make sure that we are using valid current name/version infoself.run_command('egg_info')orig.register.run(self)
import osfrom glob import globfrom distutils.util import convert_pathfrom distutils.command import sdistfrom setuptools.extern.six.moves import filterclass sdist_add_defaults:"""Mix-in providing forward-compatibility for functionality as found indistutils on Python 3.7.Do not edit the code in this class except to update functionalityas implemented in distutils. Instead, override in the subclass."""def add_defaults(self):"""Add all the default files to self.filelist:- README or README.txt- setup.py- test/test*.py- all pure Python modules mentioned in setup script- all files pointed by package_data (build_py)- all files defined in data_files.- all files defined as scripts.- all C sources listed as part of extensions or C librariesin the setup script (doesn't catch C headers!)Warns if (README or README.txt) or setup.py are missing; everythingelse is optional."""self._add_defaults_standards()self._add_defaults_optional()self._add_defaults_python()self._add_defaults_data_files()self._add_defaults_ext()self._add_defaults_c_libs()self._add_defaults_scripts()@staticmethoddef _cs_path_exists(fspath):"""Case-sensitive path existence check>>> sdist_add_defaults._cs_path_exists(__file__)True>>> sdist_add_defaults._cs_path_exists(__file__.upper())False"""if not os.path.exists(fspath):return False# make absolute so we always have a directoryabspath = os.path.abspath(fspath)directory, filename = os.path.split(abspath)return filename in os.listdir(directory)def _add_defaults_standards(self):standards = [self.READMES, self.distribution.script_name]for fn in standards:if isinstance(fn, tuple):alts = fngot_it = Falsefor fn in alts:if self._cs_path_exists(fn):got_it = Trueself.filelist.append(fn)breakif not got_it:self.warn("standard file not found: should have one of " +', '.join(alts))else:if self._cs_path_exists(fn):self.filelist.append(fn)else:self.warn("standard file '%s' not found" % fn)def _add_defaults_optional(self):optional = ['test/test*.py', 'setup.cfg']for pattern in optional:files = filter(os.path.isfile, glob(pattern))self.filelist.extend(files)def _add_defaults_python(self):# build_py is used to get:# - python modules# - files defined in package_databuild_py = self.get_finalized_command('build_py')# getting python filesif self.distribution.has_pure_modules():self.filelist.extend(build_py.get_source_files())# getting package_data files# (computed in build_py.data_files by build_py.finalize_options)for pkg, src_dir, build_dir, filenames in build_py.data_files:for filename in filenames:self.filelist.append(os.path.join(src_dir, filename))def _add_defaults_data_files(self):# getting distribution.data_filesif self.distribution.has_data_files():for item in self.distribution.data_files:if isinstance(item, str):# plain fileitem = convert_path(item)if os.path.isfile(item):self.filelist.append(item)else:# a (dirname, filenames) tupledirname, filenames = itemfor f in filenames:f = convert_path(f)if os.path.isfile(f):self.filelist.append(f)def _add_defaults_ext(self):if self.distribution.has_ext_modules():build_ext = self.get_finalized_command('build_ext')self.filelist.extend(build_ext.get_source_files())def _add_defaults_c_libs(self):if self.distribution.has_c_libraries():build_clib = self.get_finalized_command('build_clib')self.filelist.extend(build_clib.get_source_files())def _add_defaults_scripts(self):if self.distribution.has_scripts():build_scripts = self.get_finalized_command('build_scripts')self.filelist.extend(build_scripts.get_source_files())if hasattr(sdist.sdist, '_add_defaults_standards'):# disable the functionality already available upstreamclass sdist_add_defaults:pass
<?xml version="1.0" encoding="UTF-8" standalone="yes"?><assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"><assemblyIdentity version="1.0.0.0"processorArchitecture="X86"name="%(name)s"type="win32"/><!-- Identify the application security requirements. --><trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"><security><requestedPrivileges><requestedExecutionLevel level="asInvoker" uiAccess="false"/></requestedPrivileges></security></trustInfo></assembly>
from distutils import logimport distutils.command.install_scripts as origimport osimport sysfrom pkg_resources import Distribution, PathMetadata, ensure_directoryclass install_scripts(orig.install_scripts):"""Do normal script install, plus any egg_info wrapper scripts"""def initialize_options(self):orig.install_scripts.initialize_options(self)self.no_ep = Falsedef run(self):import setuptools.command.easy_install as eiself.run_command("egg_info")if self.distribution.scripts:orig.install_scripts.run(self) # run first to set up self.outfileselse:self.outfiles = []if self.no_ep:# don't install entry point scripts into .egg file!returnei_cmd = self.get_finalized_command("egg_info")dist = Distribution(ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),ei_cmd.egg_name, ei_cmd.egg_version,)bs_cmd = self.get_finalized_command('build_scripts')exec_param = getattr(bs_cmd, 'executable', None)bw_cmd = self.get_finalized_command("bdist_wininst")is_wininst = getattr(bw_cmd, '_is_running', False)writer = ei.ScriptWriterif is_wininst:exec_param = "python.exe"writer = ei.WindowsScriptWriterif exec_param == sys.executable:# In case the path to the Python executable contains a space, wrap# it so it's not split up.exec_param = [exec_param]# resolve the writer to the environmentwriter = writer.best()cmd = writer.command_spec_class.best().from_param(exec_param)for args in writer.get_args(dist, cmd.as_header()):self.write_script(*args)def write_script(self, script_name, contents, mode="t", *ignored):"""Write an executable file to the scripts directory"""from setuptools.command.easy_install import chmod, current_umasklog.info("Installing %s script to %s", script_name, self.install_dir)target = os.path.join(self.install_dir, script_name)self.outfiles.append(target)mask = current_umask()if not self.dry_run:ensure_directory(target)f = open(target, "w" + mode)f.write(contents)f.close()chmod(target, 0o777 - mask)
import osimport impfrom itertools import product, starmapimport distutils.command.install_lib as origclass install_lib(orig.install_lib):"""Don't add compiled flags to filenames of non-Python files"""def run(self):self.build()outfiles = self.install()if outfiles is not None:# always compile, in case we have any extension stubs to deal withself.byte_compile(outfiles)def get_exclusions(self):"""Return a collections.Sized collections.Container of paths to beexcluded for single_version_externally_managed installations."""all_packages = (pkgfor ns_pkg in self._get_SVEM_NSPs()for pkg in self._all_packages(ns_pkg))excl_specs = product(all_packages, self._gen_exclusion_paths())return set(starmap(self._exclude_pkg_path, excl_specs))def _exclude_pkg_path(self, pkg, exclusion_path):"""Given a package name and exclusion path within that package,compute the full exclusion path."""parts = pkg.split('.') + [exclusion_path]return os.path.join(self.install_dir, *parts)@staticmethoddef _all_packages(pkg_name):""">>> list(install_lib._all_packages('foo.bar.baz'))['foo.bar.baz', 'foo.bar', 'foo']"""while pkg_name:yield pkg_namepkg_name, sep, child = pkg_name.rpartition('.')def _get_SVEM_NSPs(self):"""Get namespace packages (list) but only forsingle_version_externally_managed installations and empty otherwise."""# TODO: is it necessary to short-circuit here? i.e. what's the cost# if get_finalized_command is called even when namespace_packages is# False?if not self.distribution.namespace_packages:return []install_cmd = self.get_finalized_command('install')svem = install_cmd.single_version_externally_managedreturn self.distribution.namespace_packages if svem else []@staticmethoddef _gen_exclusion_paths():"""Generate file paths to be excluded for namespace packages (bytecodecache files)."""# always exclude the package module itselfyield '__init__.py'yield '__init__.pyc'yield '__init__.pyo'if not hasattr(imp, 'get_tag'):returnbase = os.path.join('__pycache__', '__init__.' + imp.get_tag())yield base + '.pyc'yield base + '.pyo'yield base + '.opt-1.pyc'yield base + '.opt-2.pyc'def copy_tree(self, infile, outfile,preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1):assert preserve_mode and preserve_times and not preserve_symlinksexclude = self.get_exclusions()if not exclude:return orig.install_lib.copy_tree(self, infile, outfile)# Exclude namespace package __init__.py* files from the outputfrom setuptools.archive_util import unpack_directoryfrom distutils import logoutfiles = []def pf(src, dst):if dst in exclude:log.warn("Skipping installation of %s (namespace package)",dst)return Falselog.info("copying %s -> %s", src, os.path.dirname(dst))outfiles.append(dst)return dstunpack_directory(infile, outfile, pf)return outfilesdef get_outputs(self):outputs = orig.install_lib.get_outputs(self)exclude = self.get_exclusions()if exclude:return [f for f in outputs if f not in exclude]return outputs
from distutils import log, dir_utilimport osfrom setuptools import Commandfrom setuptools import namespacesfrom setuptools.archive_util import unpack_archiveimport pkg_resourcesclass install_egg_info(namespaces.Installer, Command):"""Install an .egg-info directory for the package"""description = "Install an .egg-info directory for the package"user_options = [('install-dir=', 'd', "directory to install to"),]def initialize_options(self):self.install_dir = Nonedef finalize_options(self):self.set_undefined_options('install_lib',('install_dir', 'install_dir'))ei_cmd = self.get_finalized_command("egg_info")basename = pkg_resources.Distribution(None, None, ei_cmd.egg_name, ei_cmd.egg_version).egg_name() + '.egg-info'self.source = ei_cmd.egg_infoself.target = os.path.join(self.install_dir, basename)self.outputs = []def run(self):self.run_command('egg_info')if os.path.isdir(self.target) and not os.path.islink(self.target):dir_util.remove_tree(self.target, dry_run=self.dry_run)elif os.path.exists(self.target):self.execute(os.unlink, (self.target,), "Removing " + self.target)if not self.dry_run:pkg_resources.ensure_directory(self.target)self.execute(self.copytree, (), "Copying %s to %s" % (self.source, self.target))self.install_namespaces()def get_outputs(self):return self.outputsdef copytree(self):# Copy the .egg-info tree to site-packagesdef skimmer(src, dst):# filter out source-control directories; note that 'src' is always# a '/'-separated path, regardless of platform. 'dst' is a# platform-specific path.for skip in '.svn/', 'CVS/':if src.startswith(skip) or '/' + skip in src:return Noneself.outputs.append(dst)log.debug("Copying %s to %s", src, dst)return dstunpack_archive(self.source, self.target, skimmer)
from distutils.errors import DistutilsArgErrorimport inspectimport globimport warningsimport platformimport distutils.command.install as origimport setuptools# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for# now. See https://github.com/pypa/setuptools/issues/199/_install = orig.installclass install(orig.install):"""Use easy_install to install the package, w/dependencies"""user_options = orig.install.user_options + [('old-and-unmanageable', None, "Try not to use this!"),('single-version-externally-managed', None,"used by system package builders to create 'flat' eggs"),]boolean_options = orig.install.boolean_options + ['old-and-unmanageable', 'single-version-externally-managed',]new_commands = [('install_egg_info', lambda self: True),('install_scripts', lambda self: True),]_nc = dict(new_commands)def initialize_options(self):orig.install.initialize_options(self)self.old_and_unmanageable = Noneself.single_version_externally_managed = Nonedef finalize_options(self):orig.install.finalize_options(self)if self.root:self.single_version_externally_managed = Trueelif self.single_version_externally_managed:if not self.root and not self.record:raise DistutilsArgError("You must specify --record or --root when building system"" packages")def handle_extra_path(self):if self.root or self.single_version_externally_managed:# explicit backward-compatibility mode, allow extra_path to workreturn orig.install.handle_extra_path(self)# Ignore extra_path when installing an egg (or being run by another# command without --root or --single-version-externally-managedself.path_file = Noneself.extra_dirs = ''def run(self):# Explicit request for old-style install? Just do itif self.old_and_unmanageable or self.single_version_externally_managed:return orig.install.run(self)if not self._called_from_setup(inspect.currentframe()):# Run in backward-compatibility mode to support bdist_* commands.orig.install.run(self)else:self.do_egg_install()@staticmethoddef _called_from_setup(run_frame):"""Attempt to detect whether run() was called from setup() or by anothercommand. If called by setup(), the parent caller will be the'run_command' method in 'distutils.dist', and *its* caller will bethe 'run_commands' method. If called any other way, theimmediate caller *might* be 'run_command', but it won't have beencalled by 'run_commands'. Return True in that case or if a call stackis unavailable. Return False otherwise."""if run_frame is None:msg = "Call stack not available. bdist_* commands may fail."warnings.warn(msg)if platform.python_implementation() == 'IronPython':msg = "For best results, pass -X:Frames to enable call stack."warnings.warn(msg)return Trueres = inspect.getouterframes(run_frame)[2]caller, = res[:1]info = inspect.getframeinfo(caller)caller_module = caller.f_globals.get('__name__', '')return (caller_module == 'distutils.dist'and info.function == 'run_commands')def do_egg_install(self):easy_install = self.distribution.get_command_class('easy_install')cmd = easy_install(self.distribution, args="x", root=self.root, record=self.record,)cmd.ensure_finalized() # finalize before bdist_egg munges install cmdcmd.always_copy_from = '.' # make sure local-dir eggs get installed# pick up setup-dir .egg files only: no .egg-infocmd.package_index.scan(glob.glob('*.egg'))self.run_command('bdist_egg')args = [self.distribution.get_command_obj('bdist_egg').egg_output]if setuptools.bootstrap_install_from:# Bootstrap self-installation of setuptoolsargs.insert(0, setuptools.bootstrap_install_from)cmd.args = argscmd.run()setuptools.bootstrap_install_from = None# XXX Python 3.1 doesn't see _nc if this is inside the classinstall.sub_commands = ([cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] +install.new_commands)
"""setuptools.command.egg_infoCreate a distribution's .egg-info directory and contents"""from distutils.filelist import FileList as _FileListfrom distutils.errors import DistutilsInternalErrorfrom distutils.util import convert_pathfrom distutils import logimport distutils.errorsimport distutils.filelistimport osimport reimport sysimport ioimport warningsimport timeimport collectionsfrom setuptools.extern import sixfrom setuptools.extern.six.moves import mapfrom setuptools import Commandfrom setuptools.command.sdist import sdistfrom setuptools.command.sdist import walk_revctrlfrom setuptools.command.setopt import edit_configfrom setuptools.command import bdist_eggfrom pkg_resources import (parse_requirements, safe_name, parse_version,safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)import setuptools.unicode_utils as unicode_utilsfrom setuptools.glob import globfrom pkg_resources.extern import packagingdef translate_pattern(glob):"""Translate a file path glob like '*.txt' in to a regular expression.This differs from fnmatch.translate which allows wildcards to matchdirectory separators. It also knows about '**/' which matches any number ofdirectories."""pat = ''# This will split on '/' within [character classes]. This is deliberate.chunks = glob.split(os.path.sep)sep = re.escape(os.sep)valid_char = '[^%s]' % (sep,)for c, chunk in enumerate(chunks):last_chunk = c == len(chunks) - 1# Chunks that are a literal ** are globstars. They match anything.if chunk == '**':if last_chunk:# Match anything if this is the last componentpat += '.*'else:# Match '(name/)*'pat += '(?:%s+%s)*' % (valid_char, sep)continue # Break here as the whole path component has been handled# Find any special characters in the remainderi = 0chunk_len = len(chunk)while i < chunk_len:char = chunk[i]if char == '*':# Match any number of name characterspat += valid_char + '*'elif char == '?':# Match a name characterpat += valid_charelif char == '[':# Character classinner_i = i + 1# Skip initial !/] charsif inner_i < chunk_len and chunk[inner_i] == '!':inner_i = inner_i + 1if inner_i < chunk_len and chunk[inner_i] == ']':inner_i = inner_i + 1# Loop till the closing ] is foundwhile inner_i < chunk_len and chunk[inner_i] != ']':inner_i = inner_i + 1if inner_i >= chunk_len:# Got to the end of the string without finding a closing ]# Do not treat this as a matching group, but as a literal [pat += re.escape(char)else:# Grab the insides of the [brackets]inner = chunk[i + 1:inner_i]char_class = ''# Class negationif inner[0] == '!':char_class = '^'inner = inner[1:]char_class += re.escape(inner)pat += '[%s]' % (char_class,)# Skip to the end ]i = inner_ielse:pat += re.escape(char)i += 1# Join each chunk with the dir separatorif not last_chunk:pat += seppat += r'\Z'return re.compile(pat, flags=re.MULTILINE|re.DOTALL)class egg_info(Command):description = "create a distribution's .egg-info directory"user_options = [('egg-base=', 'e', "directory containing .egg-info directories"" (default: top of the source tree)"),('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),('tag-build=', 'b', "Specify explicit tag to add to version number"),('no-date', 'D', "Don't include date stamp [default]"),]boolean_options = ['tag-date']negative_opt = {'no-date': 'tag-date',}def initialize_options(self):self.egg_name = Noneself.egg_version = Noneself.egg_base = Noneself.egg_info = Noneself.tag_build = Noneself.tag_date = 0self.broken_egg_info = Falseself.vtags = None##################################### allow the 'tag_svn_revision' to be detected and# set, supporting sdists built on older Setuptools.@propertydef tag_svn_revision(self):pass@tag_svn_revision.setterdef tag_svn_revision(self, value):pass####################################def save_version_info(self, filename):"""Materialize the value of date into thebuild tag. Install build keys in a deterministic orderto avoid arbitrary reordering on subsequent builds."""egg_info = collections.OrderedDict()# follow the order these keys would have been added# when PYTHONHASHSEED=0egg_info['tag_build'] = self.tags()egg_info['tag_date'] = 0edit_config(filename, dict(egg_info=egg_info))def finalize_options(self):self.egg_name = safe_name(self.distribution.get_name())self.vtags = self.tags()self.egg_version = self.tagged_version()parsed_version = parse_version(self.egg_version)try:is_version = isinstance(parsed_version, packaging.version.Version)spec = ("%s==%s" if is_version else "%s===%s")list(parse_requirements(spec % (self.egg_name, self.egg_version)))except ValueError:raise distutils.errors.DistutilsOptionError("Invalid distribution name or version syntax: %s-%s" %(self.egg_name, self.egg_version))if self.egg_base is None:dirs = self.distribution.package_dirself.egg_base = (dirs or {}).get('', os.curdir)self.ensure_dirname('egg_base')self.egg_info = to_filename(self.egg_name) + '.egg-info'if self.egg_base != os.curdir:self.egg_info = os.path.join(self.egg_base, self.egg_info)if '-' in self.egg_name:self.check_broken_egg_info()# Set package version for the benefit of dumber commands# (e.g. sdist, bdist_wininst, etc.)#self.distribution.metadata.version = self.egg_version# If we bootstrapped around the lack of a PKG-INFO, as might be the# case in a fresh checkout, make sure that any special tags get added# to the version info#pd = self.distribution._patched_distif pd is not None and pd.key == self.egg_name.lower():pd._version = self.egg_versionpd._parsed_version = parse_version(self.egg_version)self.distribution._patched_dist = Nonedef write_or_delete_file(self, what, filename, data, force=False):"""Write `data` to `filename` or delete if emptyIf `data` is non-empty, this routine is the same as ``write_file()``.If `data` is empty but not ``None``, this is the same as calling``delete_file(filename)`. If `data` is ``None``, then this is a no-opunless `filename` exists, in which case a warning is issued about theorphaned file (if `force` is false), or deleted (if `force` is true)."""if data:self.write_file(what, filename, data)elif os.path.exists(filename):if data is None and not force:log.warn("%s not set in setup(), but %s exists", what, filename)returnelse:self.delete_file(filename)def write_file(self, what, filename, data):"""Write `data` to `filename` (if not a dry run) after announcing it`what` is used in a log message to identify what is being writtento the file."""log.info("writing %s to %s", what, filename)if six.PY3:data = data.encode("utf-8")if not self.dry_run:f = open(filename, 'wb')f.write(data)f.close()def delete_file(self, filename):"""Delete `filename` (if not a dry run) after announcing it"""log.info("deleting %s", filename)if not self.dry_run:os.unlink(filename)def tagged_version(self):version = self.distribution.get_version()# egg_info may be called more than once for a distribution,# in which case the version string already contains all tags.if self.vtags and version.endswith(self.vtags):return safe_version(version)return safe_version(version + self.vtags)def run(self):self.mkpath(self.egg_info)installer = self.distribution.fetch_build_eggfor ep in iter_entry_points('egg_info.writers'):ep.require(installer=installer)writer = ep.resolve()writer(self, ep.name, os.path.join(self.egg_info, ep.name))# Get rid of native_libs.txt if it was put there by older bdist_eggnl = os.path.join(self.egg_info, "native_libs.txt")if os.path.exists(nl):self.delete_file(nl)self.find_sources()def tags(self):version = ''if self.tag_build:version += self.tag_buildif self.tag_date:version += time.strftime("-%Y%m%d")return versiondef find_sources(self):"""Generate SOURCES.txt manifest file"""manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")mm = manifest_maker(self.distribution)mm.manifest = manifest_filenamemm.run()self.filelist = mm.filelistdef check_broken_egg_info(self):bei = self.egg_name + '.egg-info'if self.egg_base != os.curdir:bei = os.path.join(self.egg_base, bei)if os.path.exists(bei):log.warn("-" * 78 + '\n'"Note: Your current .egg-info directory has a '-' in its name;"'\nthis will not work correctly with "setup.py develop".\n\n''Please rename %s to %s to correct this problem.\n' + '-' * 78,bei, self.egg_info)self.broken_egg_info = self.egg_infoself.egg_info = bei # make it work for nowclass FileList(_FileList):# Implementations of the various MANIFEST.in commandsdef process_template_line(self, line):# Parse the line: split it up, make sure the right number of words# is there, and return the relevant words. 'action' is always# defined: it's the first word of the line. Which of the other# three are defined depends on the action; it'll be either# patterns, (dir and patterns), or (dir_pattern).(action, patterns, dir, dir_pattern) = self._parse_template_line(line)# OK, now we know that the action is valid and we have the# right number of words on the line for that action -- so we# can proceed with minimal error-checking.if action == 'include':self.debug_print("include " + ' '.join(patterns))for pattern in patterns:if not self.include(pattern):log.warn("warning: no files found matching '%s'", pattern)elif action == 'exclude':self.debug_print("exclude " + ' '.join(patterns))for pattern in patterns:if not self.exclude(pattern):log.warn(("warning: no previously-included files ""found matching '%s'"), pattern)elif action == 'global-include':self.debug_print("global-include " + ' '.join(patterns))for pattern in patterns:if not self.global_include(pattern):log.warn(("warning: no files found matching '%s' ""anywhere in distribution"), pattern)elif action == 'global-exclude':self.debug_print("global-exclude " + ' '.join(patterns))for pattern in patterns:if not self.global_exclude(pattern):log.warn(("warning: no previously-included files matching ""'%s' found anywhere in distribution"),pattern)elif action == 'recursive-include':self.debug_print("recursive-include %s %s" %(dir, ' '.join(patterns)))for pattern in patterns:if not self.recursive_include(dir, pattern):log.warn(("warning: no files found matching '%s' ""under directory '%s'"),pattern, dir)elif action == 'recursive-exclude':self.debug_print("recursive-exclude %s %s" %(dir, ' '.join(patterns)))for pattern in patterns:if not self.recursive_exclude(dir, pattern):log.warn(("warning: no previously-included files matching ""'%s' found under directory '%s'"),pattern, dir)elif action == 'graft':self.debug_print("graft " + dir_pattern)if not self.graft(dir_pattern):log.warn("warning: no directories found matching '%s'",dir_pattern)elif action == 'prune':self.debug_print("prune " + dir_pattern)if not self.prune(dir_pattern):log.warn(("no previously-included directories found ""matching '%s'"), dir_pattern)else:raise DistutilsInternalError("this cannot happen: invalid action '%s'" % action)def _remove_files(self, predicate):"""Remove all files from the file list that match the predicate.Return True if any matching files were removed"""found = Falsefor i in range(len(self.files) - 1, -1, -1):if predicate(self.files[i]):self.debug_print(" removing " + self.files[i])del self.files[i]found = Truereturn founddef include(self, pattern):"""Include files that match 'pattern'."""found = [f for f in glob(pattern) if not os.path.isdir(f)]self.extend(found)return bool(found)def exclude(self, pattern):"""Exclude files that match 'pattern'."""match = translate_pattern(pattern)return self._remove_files(match.match)def recursive_include(self, dir, pattern):"""Include all files anywhere in 'dir/' that match the pattern."""full_pattern = os.path.join(dir, '**', pattern)found = [f for f in glob(full_pattern, recursive=True)if not os.path.isdir(f)]self.extend(found)return bool(found)def recursive_exclude(self, dir, pattern):"""Exclude any file anywhere in 'dir/' that match the pattern."""match = translate_pattern(os.path.join(dir, '**', pattern))return self._remove_files(match.match)def graft(self, dir):"""Include all files from 'dir/'."""found = [itemfor match_dir in glob(dir)for item in distutils.filelist.findall(match_dir)]self.extend(found)return bool(found)def prune(self, dir):"""Filter out files from 'dir/'."""match = translate_pattern(os.path.join(dir, '**'))return self._remove_files(match.match)def global_include(self, pattern):"""Include all files anywhere in the current directory that match thepattern. This is very inefficient on large file trees."""if self.allfiles is None:self.findall()match = translate_pattern(os.path.join('**', pattern))found = [f for f in self.allfiles if match.match(f)]self.extend(found)return bool(found)def global_exclude(self, pattern):"""Exclude all files anywhere that match the pattern."""match = translate_pattern(os.path.join('**', pattern))return self._remove_files(match.match)def append(self, item):if item.endswith('\r'): # Fix older sdists built on Windowsitem = item[:-1]path = convert_path(item)if self._safe_path(path):self.files.append(path)def extend(self, paths):self.files.extend(filter(self._safe_path, paths))def _repair(self):"""Replace self.files with only safe pathsBecause some owners of FileList manipulate the underlying``files`` attribute directly, this method must be called torepair those paths."""self.files = list(filter(self._safe_path, self.files))def _safe_path(self, path):enc_warn = "'%s' not %s encodable -- skipping"# To avoid accidental trans-codings errors, first to unicodeu_path = unicode_utils.filesys_decode(path)if u_path is None:log.warn("'%s' in unexpected encoding -- skipping" % path)return False# Must ensure utf-8 encodabilityutf8_path = unicode_utils.try_encode(u_path, "utf-8")if utf8_path is None:log.warn(enc_warn, path, 'utf-8')return Falsetry:# accept is either way checks outif os.path.exists(u_path) or os.path.exists(utf8_path):return True# this will catch any encode errors decoding u_pathexcept UnicodeEncodeError:log.warn(enc_warn, path, sys.getfilesystemencoding())class manifest_maker(sdist):template = "MANIFEST.in"def initialize_options(self):self.use_defaults = 1self.prune = 1self.manifest_only = 1self.force_manifest = 1def finalize_options(self):passdef run(self):self.filelist = FileList()if not os.path.exists(self.manifest):self.write_manifest() # it must exist so it'll get in the listself.add_defaults()if os.path.exists(self.template):self.read_template()self.prune_file_list()self.filelist.sort()self.filelist.remove_duplicates()self.write_manifest()def _manifest_normalize(self, path):path = unicode_utils.filesys_decode(path)return path.replace(os.sep, '/')def write_manifest(self):"""Write the file list in 'self.filelist' to the manifest filenamed by 'self.manifest'."""self.filelist._repair()# Now _repairs should encodability, but not unicodefiles = [self._manifest_normalize(f) for f in self.filelist.files]msg = "writing manifest file '%s'" % self.manifestself.execute(write_file, (self.manifest, files), msg)def warn(self, msg):if not self._should_suppress_warning(msg):sdist.warn(self, msg)@staticmethoddef _should_suppress_warning(msg):"""suppress missing-file warnings from sdist"""return re.match(r"standard file .*not found", msg)def add_defaults(self):sdist.add_defaults(self)self.filelist.append(self.template)self.filelist.append(self.manifest)rcfiles = list(walk_revctrl())if rcfiles:self.filelist.extend(rcfiles)elif os.path.exists(self.manifest):self.read_manifest()ei_cmd = self.get_finalized_command('egg_info')self.filelist.graft(ei_cmd.egg_info)def prune_file_list(self):build = self.get_finalized_command('build')base_dir = self.distribution.get_fullname()self.filelist.prune(build.build_base)self.filelist.prune(base_dir)sep = re.escape(os.sep)self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,is_regex=1)def write_file(filename, contents):"""Create a file with the specified name and write 'contents' (asequence of strings without line terminators) to it."""contents = "\n".join(contents)# assuming the contents has been vetted for utf-8 encodingcontents = contents.encode("utf-8")with open(filename, "wb") as f: # always write POSIX-style manifestf.write(contents)def write_pkg_info(cmd, basename, filename):log.info("writing %s", filename)if not cmd.dry_run:metadata = cmd.distribution.metadatametadata.version, oldver = cmd.egg_version, metadata.versionmetadata.name, oldname = cmd.egg_name, metadata.namemetadata.long_description_content_type = getattr(cmd.distribution,'long_description_content_type')try:# write unescaped data to PKG-INFO, so older pkg_resources# can still parse itmetadata.write_pkg_info(cmd.egg_info)finally:metadata.name, metadata.version = oldname, oldversafe = getattr(cmd.distribution, 'zip_safe', None)bdist_egg.write_safety_flag(cmd.egg_info, safe)def warn_depends_obsolete(cmd, basename, filename):if os.path.exists(filename):log.warn("WARNING: 'depends.txt' is not used by setuptools 0.6!\n""Use the install_requires/extras_require setup() args instead.")def _write_requirements(stream, reqs):lines = yield_lines(reqs or ())append_cr = lambda line: line + '\n'lines = map(append_cr, lines)stream.writelines(lines)def write_requirements(cmd, basename, filename):dist = cmd.distributiondata = six.StringIO()_write_requirements(data, dist.install_requires)extras_require = dist.extras_require or {}for extra in sorted(extras_require):data.write('\n[{extra}]\n'.format(**vars()))_write_requirements(data, extras_require[extra])cmd.write_or_delete_file("requirements", filename, data.getvalue())def write_setup_requirements(cmd, basename, filename):data = io.StringIO()_write_requirements(data, cmd.distribution.setup_requires)cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())def write_toplevel_names(cmd, basename, filename):pkgs = dict.fromkeys([k.split('.', 1)[0]for k in cmd.distribution.iter_distribution_names()])cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')def overwrite_arg(cmd, basename, filename):write_arg(cmd, basename, filename, True)def write_arg(cmd, basename, filename, force=False):argname = os.path.splitext(basename)[0]value = getattr(cmd.distribution, argname, None)if value is not None:value = '\n'.join(value) + '\n'cmd.write_or_delete_file(argname, filename, value, force)def write_entries(cmd, basename, filename):ep = cmd.distribution.entry_pointsif isinstance(ep, six.string_types) or ep is None:data = epelif ep is not None:data = []for section, contents in sorted(ep.items()):if not isinstance(contents, six.string_types):contents = EntryPoint.parse_group(section, contents)contents = '\n'.join(sorted(map(str, contents.values())))data.append('[%s]\n%s\n\n' % (section, contents))data = ''.join(data)cmd.write_or_delete_file('entry points', filename, data, True)def get_pkg_info_revision():"""Get a -r### off of PKG-INFO Version in case this is an sdist ofa subversion revision."""warnings.warn("get_pkg_info_revision is deprecated.", DeprecationWarning)if os.path.exists('PKG-INFO'):with io.open('PKG-INFO') as f:for line in f:match = re.match(r"Version:.*-r(\d+)\s*$", line)if match:return int(match.group(1))return 0
#!/usr/bin/env python"""Easy Install------------A tool for doing automatic download/extract/build of distutils-based Pythonpackages. For detailed documentation, see the accompanying EasyInstall.txtfile, or visit the `EasyInstall home page`__.__ https://setuptools.readthedocs.io/en/latest/easy_install.html"""from glob import globfrom distutils.util import get_platformfrom distutils.util import convert_path, subst_varsfrom distutils.errors import (DistutilsArgError, DistutilsOptionError,DistutilsError, DistutilsPlatformError,)from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYSfrom distutils import log, dir_utilfrom distutils.command.build_scripts import first_line_refrom distutils.spawn import find_executableimport sysimport osimport zipimportimport shutilimport tempfileimport zipfileimport reimport statimport randomimport textwrapimport warningsimport siteimport structimport contextlibimport subprocessimport shleximport iofrom setuptools.extern import sixfrom setuptools.extern.six.moves import configparser, mapfrom setuptools import Commandfrom setuptools.sandbox import run_setupfrom setuptools.py31compat import get_path, get_config_varsfrom setuptools.py27compat import rmtree_safefrom setuptools.command import setoptfrom setuptools.archive_util import unpack_archivefrom setuptools.package_index import (PackageIndex, parse_requirement_arg, URL_SCHEME,)from setuptools.command import bdist_egg, egg_infofrom setuptools.wheel import Wheelfrom pkg_resources import (yield_lines, normalize_path, resource_string, ensure_directory,get_distribution, find_distributions, Environment, Requirement,Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,VersionConflict, DEVELOP_DIST,)import pkg_resources.py31compat# Turn on PEP440Warningswarnings.filterwarnings("default", category=pkg_resources.PEP440Warning)__all__ = ['samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg','main', 'get_exe_prefixes',]def is_64bit():return struct.calcsize("P") == 8def samefile(p1, p2):"""Determine if two paths reference the same file.Augments os.path.samefile to work on Windows andsuppresses errors if the path doesn't exist."""both_exist = os.path.exists(p1) and os.path.exists(p2)use_samefile = hasattr(os.path, 'samefile') and both_existif use_samefile:return os.path.samefile(p1, p2)norm_p1 = os.path.normpath(os.path.normcase(p1))norm_p2 = os.path.normpath(os.path.normcase(p2))return norm_p1 == norm_p2if six.PY2:def _to_ascii(s):return sdef isascii(s):try:six.text_type(s, 'ascii')return Trueexcept UnicodeError:return Falseelse:def _to_ascii(s):return s.encode('ascii')def isascii(s):try:s.encode('ascii')return Trueexcept UnicodeError:return False_one_liner = lambda text: textwrap.dedent(text).strip().replace('\n', '; ')class easy_install(Command):"""Manage a download/build/install process"""description = "Find/get/install Python packages"command_consumes_arguments = Trueuser_options = [('prefix=', None, "installation prefix"),("zip-ok", "z", "install package as a zipfile"),("multi-version", "m", "make apps have to require() a version"),("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),("install-dir=", "d", "install package to DIR"),("script-dir=", "s", "install scripts to DIR"),("exclude-scripts", "x", "Don't install scripts"),("always-copy", "a", "Copy all needed packages to install dir"),("index-url=", "i", "base URL of Python Package Index"),("find-links=", "f", "additional URL(s) to search for packages"),("build-directory=", "b","download/extract/build in DIR; keep the results"),('optimize=', 'O',"also compile with optimization: -O1 for \"python -O\", ""-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),('record=', None,"filename in which to record list of installed files"),('always-unzip', 'Z', "don't install as a zipfile, no matter what"),('site-dirs=', 'S', "list of directories where .pth files work"),('editable', 'e', "Install specified packages in editable form"),('no-deps', 'N', "don't install dependencies"),('allow-hosts=', 'H', "pattern(s) that hostnames must match"),('local-snapshots-ok', 'l',"allow building eggs from local checkouts"),('version', None, "print version information and exit"),('no-find-links', None,"Don't load find-links defined in packages being installed")]boolean_options = ['zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy','editable','no-deps', 'local-snapshots-ok', 'version']if site.ENABLE_USER_SITE:help_msg = "install in user site-package '%s'" % site.USER_SITEuser_options.append(('user', None, help_msg))boolean_options.append('user')negative_opt = {'always-unzip': 'zip-ok'}create_index = PackageIndexdef initialize_options(self):# the --user option seems to be an opt-in one,# so the default should be False.self.user = 0self.zip_ok = self.local_snapshots_ok = Noneself.install_dir = self.script_dir = self.exclude_scripts = Noneself.index_url = Noneself.find_links = Noneself.build_directory = Noneself.args = Noneself.optimize = self.record = Noneself.upgrade = self.always_copy = self.multi_version = Noneself.editable = self.no_deps = self.allow_hosts = Noneself.root = self.prefix = self.no_report = Noneself.version = Noneself.install_purelib = None # for pure module distributionsself.install_platlib = None # non-pure (dists w/ extensions)self.install_headers = None # for C/C++ headersself.install_lib = None # set to either purelib or platlibself.install_scripts = Noneself.install_data = Noneself.install_base = Noneself.install_platbase = Noneif site.ENABLE_USER_SITE:self.install_userbase = site.USER_BASEself.install_usersite = site.USER_SITEelse:self.install_userbase = Noneself.install_usersite = Noneself.no_find_links = None# Options not specifiable via command lineself.package_index = Noneself.pth_file = self.always_copy_from = Noneself.site_dirs = Noneself.installed_projects = {}self.sitepy_installed = False# Always read easy_install options, even if we are subclassed, or have# an independent instance created. This ensures that defaults will# always come from the standard configuration file(s)' "easy_install"# section, even if this is a "develop" or "install" command, or some# other embedding.self._dry_run = Noneself.verbose = self.distribution.verboseself.distribution._set_command_options(self, self.distribution.get_option_dict('easy_install'))def delete_blockers(self, blockers):extant_blockers = (filename for filename in blockersif os.path.exists(filename) or os.path.islink(filename))list(map(self._delete_path, extant_blockers))def _delete_path(self, path):log.info("Deleting %s", path)if self.dry_run:returnis_tree = os.path.isdir(path) and not os.path.islink(path)remover = rmtree if is_tree else os.unlinkremover(path)@staticmethoddef _render_version():"""Render the Setuptools version and installation details, then exit."""ver = sys.version[:3]dist = get_distribution('setuptools')tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'print(tmpl.format(**locals()))raise SystemExit()def finalize_options(self):self.version and self._render_version()py_version = sys.version.split()[0]prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')self.config_vars = {'dist_name': self.distribution.get_name(),'dist_version': self.distribution.get_version(),'dist_fullname': self.distribution.get_fullname(),'py_version': py_version,'py_version_short': py_version[0:3],'py_version_nodot': py_version[0] + py_version[2],'sys_prefix': prefix,'prefix': prefix,'sys_exec_prefix': exec_prefix,'exec_prefix': exec_prefix,# Only python 3.2+ has abiflags'abiflags': getattr(sys, 'abiflags', ''),}if site.ENABLE_USER_SITE:self.config_vars['userbase'] = self.install_userbaseself.config_vars['usersite'] = self.install_usersiteself._fix_install_dir_for_user_site()self.expand_basedirs()self.expand_dirs()self._expand('install_dir', 'script_dir', 'build_directory','site_dirs',)# If a non-default installation directory was specified, default the# script directory to match it.if self.script_dir is None:self.script_dir = self.install_dirif self.no_find_links is None:self.no_find_links = False# Let install_dir get set by install_lib command, which in turn# gets its info from the install command, and takes into account# --prefix and --home and all that other crud.self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))# Likewise, set default script_dir from 'install_scripts.install_dir'self.set_undefined_options('install_scripts', ('install_dir', 'script_dir'))if self.user and self.install_purelib:self.install_dir = self.install_purelibself.script_dir = self.install_scripts# default --record from the install commandself.set_undefined_options('install', ('record', 'record'))# Should this be moved to the if statement below? It's not used# elsewherenormpath = map(normalize_path, sys.path)self.all_site_dirs = get_site_dirs()if self.site_dirs is not None:site_dirs = [os.path.expanduser(s.strip()) for s inself.site_dirs.split(',')]for d in site_dirs:if not os.path.isdir(d):log.warn("%s (in --site-dirs) does not exist", d)elif normalize_path(d) not in normpath:raise DistutilsOptionError(d + " (in --site-dirs) is not on sys.path")else:self.all_site_dirs.append(normalize_path(d))if not self.editable:self.check_site_dir()self.index_url = self.index_url or "https://pypi.python.org/simple"self.shadow_path = self.all_site_dirs[:]for path_item in self.install_dir, normalize_path(self.script_dir):if path_item not in self.shadow_path:self.shadow_path.insert(0, path_item)if self.allow_hosts is not None:hosts = [s.strip() for s in self.allow_hosts.split(',')]else:hosts = ['*']if self.package_index is None:self.package_index = self.create_index(self.index_url, search_path=self.shadow_path, hosts=hosts,)self.local_index = Environment(self.shadow_path + sys.path)if self.find_links is not None:if isinstance(self.find_links, six.string_types):self.find_links = self.find_links.split()else:self.find_links = []if self.local_snapshots_ok:self.package_index.scan_egg_links(self.shadow_path + sys.path)if not self.no_find_links:self.package_index.add_find_links(self.find_links)self.set_undefined_options('install_lib', ('optimize', 'optimize'))if not isinstance(self.optimize, int):try:self.optimize = int(self.optimize)if not (0 <= self.optimize <= 2):raise ValueErrorexcept ValueError:raise DistutilsOptionError("--optimize must be 0, 1, or 2")if self.editable and not self.build_directory:raise DistutilsArgError("Must specify a build directory (-b) when using --editable")if not self.args:raise DistutilsArgError("No urls, filenames, or requirements specified (see --help)")self.outputs = []def _fix_install_dir_for_user_site(self):"""Fix the install_dir if "--user" was used."""if not self.user or not site.ENABLE_USER_SITE:returnself.create_home_path()if self.install_userbase is None:msg = "User base directory is not specified"raise DistutilsPlatformError(msg)self.install_base = self.install_platbase = self.install_userbasescheme_name = os.name.replace('posix', 'unix') + '_user'self.select_scheme(scheme_name)def _expand_attrs(self, attrs):for attr in attrs:val = getattr(self, attr)if val is not None:if os.name == 'posix' or os.name == 'nt':val = os.path.expanduser(val)val = subst_vars(val, self.config_vars)setattr(self, attr, val)def expand_basedirs(self):"""Calls `os.path.expanduser` on install_base, install_platbase androot."""self._expand_attrs(['install_base', 'install_platbase', 'root'])def expand_dirs(self):"""Calls `os.path.expanduser` on install dirs."""dirs = ['install_purelib','install_platlib','install_lib','install_headers','install_scripts','install_data',]self._expand_attrs(dirs)def run(self):if self.verbose != self.distribution.verbose:log.set_verbosity(self.verbose)try:for spec in self.args:self.easy_install(spec, not self.no_deps)if self.record:outputs = self.outputsif self.root: # strip any package prefixroot_len = len(self.root)for counter in range(len(outputs)):outputs[counter] = outputs[counter][root_len:]from distutils import file_utilself.execute(file_util.write_file, (self.record, outputs),"writing list of installed files to '%s'" %self.record)self.warn_deprecated_options()finally:log.set_verbosity(self.distribution.verbose)def pseudo_tempname(self):"""Return a pseudo-tempname base in the install directory.This code is intentionally naive; if a malicious party can write tothe target directory you're already in deep doodoo."""try:pid = os.getpid()except Exception:pid = random.randint(0, sys.maxsize)return os.path.join(self.install_dir, "test-easy-install-%s" % pid)def warn_deprecated_options(self):passdef check_site_dir(self):"""Verify that self.install_dir is .pth-capable dir, if needed"""instdir = normalize_path(self.install_dir)pth_file = os.path.join(instdir, 'easy-install.pth')# Is it a configured, PYTHONPATH, implicit, or explicit site dir?is_site_dir = instdir in self.all_site_dirsif not is_site_dir and not self.multi_version:# No? Then directly test whether it does .pth file processingis_site_dir = self.check_pth_processing()else:# make sure we can write to target dirtestfile = self.pseudo_tempname() + '.write-test'test_exists = os.path.exists(testfile)try:if test_exists:os.unlink(testfile)open(testfile, 'w').close()os.unlink(testfile)except (OSError, IOError):self.cant_write_to_target()if not is_site_dir and not self.multi_version:# Can't install non-multi to non-site dirraise DistutilsError(self.no_default_version_msg())if is_site_dir:if self.pth_file is None:self.pth_file = PthDistributions(pth_file, self.all_site_dirs)else:self.pth_file = Noneif instdir not in map(normalize_path, _pythonpath()):# only PYTHONPATH dirs need a site.py, so pretend it's thereself.sitepy_installed = Trueelif self.multi_version and not os.path.exists(pth_file):self.sitepy_installed = True # don't need site.py in this caseself.pth_file = None # and don't create a .pth fileself.install_dir = instdir__cant_write_msg = textwrap.dedent("""can't create or remove files in install directoryThe following error occurred while trying to add or remove files in theinstallation directory:%sThe installation directory you specified (via --install-dir, --prefix, orthe distutils default setting) was:%s""").lstrip()__not_exists_id = textwrap.dedent("""This directory does not currently exist. Please create it and try again, orchoose a different installation directory (using the -d or --install-diroption).""").lstrip()__access_msg = textwrap.dedent("""Perhaps your account does not have write access to this directory? If theinstallation directory is a system-owned directory, you may need to sign inas the administrator or "root" account. If you do not have administrativeaccess to this machine, you may wish to choose a different installationdirectory, preferably one that is listed in your PYTHONPATH environmentvariable.For information on other options, you may wish to consult thedocumentation at:https://setuptools.readthedocs.io/en/latest/easy_install.htmlPlease make the appropriate changes for your system and try again.""").lstrip()def cant_write_to_target(self):msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)if not os.path.exists(self.install_dir):msg += '\n' + self.__not_exists_idelse:msg += '\n' + self.__access_msgraise DistutilsError(msg)def check_pth_processing(self):"""Empirically verify whether .pth files are supported in inst. dir"""instdir = self.install_dirlog.info("Checking .pth file support in %s", instdir)pth_file = self.pseudo_tempname() + ".pth"ok_file = pth_file + '.ok'ok_exists = os.path.exists(ok_file)tmpl = _one_liner("""import osf = open({ok_file!r}, 'w')f.write('OK')f.close()""") + '\n'try:if ok_exists:os.unlink(ok_file)dirname = os.path.dirname(ok_file)pkg_resources.py31compat.makedirs(dirname, exist_ok=True)f = open(pth_file, 'w')except (OSError, IOError):self.cant_write_to_target()else:try:f.write(tmpl.format(**locals()))f.close()f = Noneexecutable = sys.executableif os.name == 'nt':dirname, basename = os.path.split(executable)alt = os.path.join(dirname, 'pythonw.exe')use_alt = (basename.lower() == 'python.exe' andos.path.exists(alt))if use_alt:# use pythonw.exe to avoid opening a console windowexecutable = altfrom distutils.spawn import spawnspawn([executable, '-E', '-c', 'pass'], 0)if os.path.exists(ok_file):log.info("TEST PASSED: %s appears to support .pth files",instdir)return Truefinally:if f:f.close()if os.path.exists(ok_file):os.unlink(ok_file)if os.path.exists(pth_file):os.unlink(pth_file)if not self.multi_version:log.warn("TEST FAILED: %s does NOT support .pth files", instdir)return Falsedef install_egg_scripts(self, dist):"""Write all the scripts for `dist`, unless scripts are excluded"""if not self.exclude_scripts and dist.metadata_isdir('scripts'):for script_name in dist.metadata_listdir('scripts'):if dist.metadata_isdir('scripts/' + script_name):# The "script" is a directory, likely a Python 3# __pycache__ directory, so skip it.continueself.install_script(dist, script_name,dist.get_metadata('scripts/' + script_name))self.install_wrapper_scripts(dist)def add_output(self, path):if os.path.isdir(path):for base, dirs, files in os.walk(path):for filename in files:self.outputs.append(os.path.join(base, filename))else:self.outputs.append(path)def not_editable(self, spec):if self.editable:raise DistutilsArgError("Invalid argument %r: you can't use filenames or URLs ""with --editable (except via the --find-links option)."% (spec,))def check_editable(self, spec):if not self.editable:returnif os.path.exists(os.path.join(self.build_directory, spec.key)):raise DistutilsArgError("%r already exists in %s; can't do a checkout there" %(spec.key, self.build_directory))@contextlib.contextmanagerdef _tmpdir(self):tmpdir = tempfile.mkdtemp(prefix=six.u("easy_install-"))try:# cast to str as workaround for #709 and #710 and #712yield str(tmpdir)finally:os.path.exists(tmpdir) and rmtree(rmtree_safe(tmpdir))def easy_install(self, spec, deps=False):if not self.editable:self.install_site_py()with self._tmpdir() as tmpdir:if not isinstance(spec, Requirement):if URL_SCHEME(spec):# It's a url, download it to tmpdir and processself.not_editable(spec)dl = self.package_index.download(spec, tmpdir)return self.install_item(None, dl, tmpdir, deps, True)elif os.path.exists(spec):# Existing file or directory, just process it directlyself.not_editable(spec)return self.install_item(None, spec, tmpdir, deps, True)else:spec = parse_requirement_arg(spec)self.check_editable(spec)dist = self.package_index.fetch_distribution(spec, tmpdir, self.upgrade, self.editable,not self.always_copy, self.local_index)if dist is None:msg = "Could not find suitable distribution for %r" % specif self.always_copy:msg += " (--always-copy skips system and development eggs)"raise DistutilsError(msg)elif dist.precedence == DEVELOP_DIST:# .egg-info dists don't need installing, just process depsself.process_distribution(spec, dist, deps, "Using")return distelse:return self.install_item(spec, dist.location, tmpdir, deps)def install_item(self, spec, download, tmpdir, deps, install_needed=False):# Installation is also needed if file in tmpdir or is not an egginstall_needed = install_needed or self.always_copyinstall_needed = install_needed or os.path.dirname(download) == tmpdirinstall_needed = install_needed or not download.endswith('.egg')install_needed = install_needed or (self.always_copy_from is not None andos.path.dirname(normalize_path(download)) ==normalize_path(self.always_copy_from))if spec and not install_needed:# at this point, we know it's a local .egg, we just don't know if# it's already installed.for dist in self.local_index[spec.project_name]:if dist.location == download:breakelse:install_needed = True # it's not in the local indexlog.info("Processing %s", os.path.basename(download))if install_needed:dists = self.install_eggs(spec, download, tmpdir)for dist in dists:self.process_distribution(spec, dist, deps)else:dists = [self.egg_distribution(download)]self.process_distribution(spec, dists[0], deps, "Using")if spec is not None:for dist in dists:if dist in spec:return distdef select_scheme(self, name):"""Sets the install directories by applying the install schemes."""# it's the caller's problem if they supply a bad name!scheme = INSTALL_SCHEMES[name]for key in SCHEME_KEYS:attrname = 'install_' + keyif getattr(self, attrname) is None:setattr(self, attrname, scheme[key])def process_distribution(self, requirement, dist, deps=True, *info):self.update_pth(dist)self.package_index.add(dist)if dist in self.local_index[dist.key]:self.local_index.remove(dist)self.local_index.add(dist)self.install_egg_scripts(dist)self.installed_projects[dist.key] = distlog.info(self.installation_report(requirement, dist, *info))if (dist.has_metadata('dependency_links.txt') andnot self.no_find_links):self.package_index.add_find_links(dist.get_metadata_lines('dependency_links.txt'))if not deps and not self.always_copy:returnelif requirement is not None and dist.key != requirement.key:log.warn("Skipping dependencies for %s", dist)return # XXX this is not the distribution we were looking forelif requirement is None or dist not in requirement:# if we wound up with a different version, resolve what we've gotdistreq = dist.as_requirement()requirement = Requirement(str(distreq))log.info("Processing dependencies for %s", requirement)try:distros = WorkingSet([]).resolve([requirement], self.local_index, self.easy_install)except DistributionNotFound as e:raise DistutilsError(str(e))except VersionConflict as e:raise DistutilsError(e.report())if self.always_copy or self.always_copy_from:# Force all the relevant distros to be copied or activatedfor dist in distros:if dist.key not in self.installed_projects:self.easy_install(dist.as_requirement())log.info("Finished processing dependencies for %s", requirement)def should_unzip(self, dist):if self.zip_ok is not None:return not self.zip_okif dist.has_metadata('not-zip-safe'):return Trueif not dist.has_metadata('zip-safe'):return Truereturn Falsedef maybe_move(self, spec, dist_filename, setup_base):dst = os.path.join(self.build_directory, spec.key)if os.path.exists(dst):msg = ("%r already exists in %s; build directory %s will not be kept")log.warn(msg, spec.key, self.build_directory, setup_base)return setup_baseif os.path.isdir(dist_filename):setup_base = dist_filenameelse:if os.path.dirname(dist_filename) == setup_base:os.unlink(dist_filename) # get it out of the tmp dircontents = os.listdir(setup_base)if len(contents) == 1:dist_filename = os.path.join(setup_base, contents[0])if os.path.isdir(dist_filename):# if the only thing there is a directory, move it insteadsetup_base = dist_filenameensure_directory(dst)shutil.move(setup_base, dst)return dstdef install_wrapper_scripts(self, dist):if self.exclude_scripts:returnfor args in ScriptWriter.best().get_args(dist):self.write_script(*args)def install_script(self, dist, script_name, script_text, dev_path=None):"""Generate a legacy script wrapper and install it"""spec = str(dist.as_requirement())is_script = is_python_script(script_text, script_name)if is_script:body = self._load_template(dev_path) % locals()script_text = ScriptWriter.get_header(script_text) + bodyself.write_script(script_name, _to_ascii(script_text), 'b')@staticmethoddef _load_template(dev_path):"""There are a couple of template scripts in the package. Thisfunction loads one of them and prepares it for use."""# See https://github.com/pypa/setuptools/issues/134 for info# on script file naming and downstream issues with SVR4name = 'script.tmpl'if dev_path:name = name.replace('.tmpl', ' (dev).tmpl')raw_bytes = resource_string('setuptools', name)return raw_bytes.decode('utf-8')def write_script(self, script_name, contents, mode="t", blockers=()):"""Write an executable file to the scripts directory"""self.delete_blockers( # clean up old .py/.pyw w/o a script[os.path.join(self.script_dir, x) for x in blockers])log.info("Installing %s script to %s", script_name, self.script_dir)target = os.path.join(self.script_dir, script_name)self.add_output(target)mask = current_umask()if not self.dry_run:ensure_directory(target)if os.path.exists(target):os.unlink(target)with open(target, "w" + mode) as f:f.write(contents)chmod(target, 0o777 - mask)def install_eggs(self, spec, dist_filename, tmpdir):# .egg dirs or files are already built, so just return themif dist_filename.lower().endswith('.egg'):return [self.install_egg(dist_filename, tmpdir)]elif dist_filename.lower().endswith('.exe'):return [self.install_exe(dist_filename, tmpdir)]elif dist_filename.lower().endswith('.whl'):return [self.install_wheel(dist_filename, tmpdir)]# Anything else, try to extract and buildsetup_base = tmpdirif os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):unpack_archive(dist_filename, tmpdir, self.unpack_progress)elif os.path.isdir(dist_filename):setup_base = os.path.abspath(dist_filename)if (setup_base.startswith(tmpdir) # something we downloadedand self.build_directory and spec is not None):setup_base = self.maybe_move(spec, dist_filename, setup_base)# Find the setup.py filesetup_script = os.path.join(setup_base, 'setup.py')if not os.path.exists(setup_script):setups = glob(os.path.join(setup_base, '*', 'setup.py'))if not setups:raise DistutilsError("Couldn't find a setup script in %s" %os.path.abspath(dist_filename))if len(setups) > 1:raise DistutilsError("Multiple setup scripts in %s" %os.path.abspath(dist_filename))setup_script = setups[0]# Now run it, and return the resultif self.editable:log.info(self.report_editable(spec, setup_script))return []else:return self.build_and_install(setup_script, setup_base)def egg_distribution(self, egg_path):if os.path.isdir(egg_path):metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))else:metadata = EggMetadata(zipimport.zipimporter(egg_path))return Distribution.from_filename(egg_path, metadata=metadata)def install_egg(self, egg_path, tmpdir):destination = os.path.join(self.install_dir,os.path.basename(egg_path),)destination = os.path.abspath(destination)if not self.dry_run:ensure_directory(destination)dist = self.egg_distribution(egg_path)if not samefile(egg_path, destination):if os.path.isdir(destination) and not os.path.islink(destination):dir_util.remove_tree(destination, dry_run=self.dry_run)elif os.path.exists(destination):self.execute(os.unlink,(destination,),"Removing " + destination,)try:new_dist_is_zipped = Falseif os.path.isdir(egg_path):if egg_path.startswith(tmpdir):f, m = shutil.move, "Moving"else:f, m = shutil.copytree, "Copying"elif self.should_unzip(dist):self.mkpath(destination)f, m = self.unpack_and_compile, "Extracting"else:new_dist_is_zipped = Trueif egg_path.startswith(tmpdir):f, m = shutil.move, "Moving"else:f, m = shutil.copy2, "Copying"self.execute(f,(egg_path, destination),(m + " %s to %s") % (os.path.basename(egg_path),os.path.dirname(destination)),)update_dist_caches(destination,fix_zipimporter_caches=new_dist_is_zipped,)except Exception:update_dist_caches(destination, fix_zipimporter_caches=False)raiseself.add_output(destination)return self.egg_distribution(destination)def install_exe(self, dist_filename, tmpdir):# See if it's valid, get datacfg = extract_wininst_cfg(dist_filename)if cfg is None:raise DistutilsError("%s is not a valid distutils Windows .exe" % dist_filename)# Create a dummy distribution object until we build the real distrodist = Distribution(None,project_name=cfg.get('metadata', 'name'),version=cfg.get('metadata', 'version'), platform=get_platform(),)# Convert the .exe to an unpacked eggegg_path = os.path.join(tmpdir, dist.egg_name() + '.egg')dist.location = egg_pathegg_tmp = egg_path + '.tmp'_egg_info = os.path.join(egg_tmp, 'EGG-INFO')pkg_inf = os.path.join(_egg_info, 'PKG-INFO')ensure_directory(pkg_inf) # make sure EGG-INFO dir existsdist._provider = PathMetadata(egg_tmp, _egg_info) # XXXself.exe_to_egg(dist_filename, egg_tmp)# Write EGG-INFO/PKG-INFOif not os.path.exists(pkg_inf):f = open(pkg_inf, 'w')f.write('Metadata-Version: 1.0\n')for k, v in cfg.items('metadata'):if k != 'target_version':f.write('%s: %s\n' % (k.replace('_', '-').title(), v))f.close()script_dir = os.path.join(_egg_info, 'scripts')# delete entry-point scripts to avoid dupingself.delete_blockers([os.path.join(script_dir, args[0])for args in ScriptWriter.get_args(dist)])# Build .egg file from tmpdirbdist_egg.make_zipfile(egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run,)# install the .eggreturn self.install_egg(egg_path, tmpdir)def exe_to_egg(self, dist_filename, egg_tmp):"""Extract a bdist_wininst to the directories an egg would use"""# Check for .pth file and set up prefix translationsprefixes = get_exe_prefixes(dist_filename)to_compile = []native_libs = []top_level = {}def process(src, dst):s = src.lower()for old, new in prefixes:if s.startswith(old):src = new + src[len(old):]parts = src.split('/')dst = os.path.join(egg_tmp, *parts)dl = dst.lower()if dl.endswith('.pyd') or dl.endswith('.dll'):parts[-1] = bdist_egg.strip_module(parts[-1])top_level[os.path.splitext(parts[0])[0]] = 1native_libs.append(src)elif dl.endswith('.py') and old != 'SCRIPTS/':top_level[os.path.splitext(parts[0])[0]] = 1to_compile.append(dst)return dstif not src.endswith('.pth'):log.warn("WARNING: can't process %s", src)return None# extract, tracking .pyd/.dll->native_libs and .py -> to_compileunpack_archive(dist_filename, egg_tmp, process)stubs = []for res in native_libs:if res.lower().endswith('.pyd'): # create stubs for .pyd'sparts = res.split('/')resource = parts[-1]parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'pyfile = os.path.join(egg_tmp, *parts)to_compile.append(pyfile)stubs.append(pyfile)bdist_egg.write_stub(resource, pyfile)self.byte_compile(to_compile) # compile .py'sbdist_egg.write_safety_flag(os.path.join(egg_tmp, 'EGG-INFO'),bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flagfor name in 'top_level', 'native_libs':if locals()[name]:txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')if not os.path.exists(txt):f = open(txt, 'w')f.write('\n'.join(locals()[name]) + '\n')f.close()def install_wheel(self, wheel_path, tmpdir):wheel = Wheel(wheel_path)assert wheel.is_compatible()destination = os.path.join(self.install_dir, wheel.egg_name())destination = os.path.abspath(destination)if not self.dry_run:ensure_directory(destination)if os.path.isdir(destination) and not os.path.islink(destination):dir_util.remove_tree(destination, dry_run=self.dry_run)elif os.path.exists(destination):self.execute(os.unlink,(destination,),"Removing " + destination,)try:self.execute(wheel.install_as_egg,(destination,),("Installing %s to %s") % (os.path.basename(wheel_path),os.path.dirname(destination)),)finally:update_dist_caches(destination, fix_zipimporter_caches=False)self.add_output(destination)return self.egg_distribution(destination)__mv_warning = textwrap.dedent("""Because this distribution was installed --multi-version, before you canimport modules from this package in an application, you will need to'import pkg_resources' and then use a 'require()' call similar to one ofthese examples, in order to select the desired version:pkg_resources.require("%(name)s") # latest installed versionpkg_resources.require("%(name)s==%(version)s") # this exact versionpkg_resources.require("%(name)s>=%(version)s") # this version or higher""").lstrip()__id_warning = textwrap.dedent("""Note also that the installation directory must be on sys.path at runtime forthis to work. (e.g. by being the application's script directory, by being onPYTHONPATH, or by being added to sys.path by your code.)""")def installation_report(self, req, dist, what="Installed"):"""Helpful installation message for display to package users"""msg = "\n%(what)s %(eggloc)s%(extras)s"if self.multi_version and not self.no_report:msg += '\n' + self.__mv_warningif self.install_dir not in map(normalize_path, sys.path):msg += '\n' + self.__id_warningeggloc = dist.locationname = dist.project_nameversion = dist.versionextras = '' # TODO: self.report_extras(req, dist)return msg % locals()__editable_msg = textwrap.dedent("""Extracted editable version of %(spec)s to %(dirname)sIf it uses setuptools in its setup script, you can activate it in"development" mode by going to that directory and running::%(python)s setup.py developSee the setuptools documentation for the "develop" command for more info.""").lstrip()def report_editable(self, spec, setup_script):dirname = os.path.dirname(setup_script)python = sys.executablereturn '\n' + self.__editable_msg % locals()def run_setup(self, setup_script, setup_base, args):sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)sys.modules.setdefault('distutils.command.egg_info', egg_info)args = list(args)if self.verbose > 2:v = 'v' * (self.verbose - 1)args.insert(0, '-' + v)elif self.verbose < 2:args.insert(0, '-q')if self.dry_run:args.insert(0, '-n')log.info("Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args))try:run_setup(setup_script, args)except SystemExit as v:raise DistutilsError("Setup script exited with %s" % (v.args[0],))def build_and_install(self, setup_script, setup_base):args = ['bdist_egg', '--dist-dir']dist_dir = tempfile.mkdtemp(prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script))try:self._set_fetcher_options(os.path.dirname(setup_script))args.append(dist_dir)self.run_setup(setup_script, setup_base, args)all_eggs = Environment([dist_dir])eggs = []for key in all_eggs:for dist in all_eggs[key]:eggs.append(self.install_egg(dist.location, setup_base))if not eggs and not self.dry_run:log.warn("No eggs found in %s (setup script problem?)",dist_dir)return eggsfinally:rmtree(dist_dir)log.set_verbosity(self.verbose) # restore our log verbositydef _set_fetcher_options(self, base):"""When easy_install is about to run bdist_egg on a source dist, thatsource dist might have 'setup_requires' directives, requiringadditional fetching. Ensure the fetcher options given to easy_installare available to that command as well."""# find the fetch options from easy_install and write them out# to the setup.cfg file.ei_opts = self.distribution.get_option_dict('easy_install').copy()fetch_directives = ('find_links', 'site_dirs', 'index_url', 'optimize','site_dirs', 'allow_hosts',)fetch_options = {}for key, val in ei_opts.items():if key not in fetch_directives:continuefetch_options[key.replace('_', '-')] = val[1]# create a settings dictionary suitable for `edit_config`settings = dict(easy_install=fetch_options)cfg_filename = os.path.join(base, 'setup.cfg')setopt.edit_config(cfg_filename, settings)def update_pth(self, dist):if self.pth_file is None:returnfor d in self.pth_file[dist.key]: # drop old entriesif self.multi_version or d.location != dist.location:log.info("Removing %s from easy-install.pth file", d)self.pth_file.remove(d)if d.location in self.shadow_path:self.shadow_path.remove(d.location)if not self.multi_version:if dist.location in self.pth_file.paths:log.info("%s is already the active version in easy-install.pth",dist,)else:log.info("Adding %s to easy-install.pth file", dist)self.pth_file.add(dist) # add new entryif dist.location not in self.shadow_path:self.shadow_path.append(dist.location)if not self.dry_run:self.pth_file.save()if dist.key == 'setuptools':# Ensure that setuptools itself never becomes unavailable!# XXX should this check for latest version?filename = os.path.join(self.install_dir, 'setuptools.pth')if os.path.islink(filename):os.unlink(filename)f = open(filename, 'wt')f.write(self.pth_file.make_relative(dist.location) + '\n')f.close()def unpack_progress(self, src, dst):# Progress filter for unpackinglog.debug("Unpacking %s to %s", src, dst)return dst # only unpack-and-compile skips files for dry rundef unpack_and_compile(self, egg_path, destination):to_compile = []to_chmod = []def pf(src, dst):if dst.endswith('.py') and not src.startswith('EGG-INFO/'):to_compile.append(dst)elif dst.endswith('.dll') or dst.endswith('.so'):to_chmod.append(dst)self.unpack_progress(src, dst)return not self.dry_run and dst or Noneunpack_archive(egg_path, destination, pf)self.byte_compile(to_compile)if not self.dry_run:for f in to_chmod:mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755chmod(f, mode)def byte_compile(self, to_compile):if sys.dont_write_bytecode:self.warn('byte-compiling is disabled, skipping.')returnfrom distutils.util import byte_compiletry:# try to make the byte compile messages quieterlog.set_verbosity(self.verbose - 1)byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)if self.optimize:byte_compile(to_compile, optimize=self.optimize, force=1,dry_run=self.dry_run,)finally:log.set_verbosity(self.verbose) # restore original verbosity__no_default_msg = textwrap.dedent("""bad install directory or PYTHONPATHYou are attempting to install a package to a directory that is noton PYTHONPATH and which Python does not read ".pth" files from. Theinstallation directory you specified (via --install-dir, --prefix, orthe distutils default setting) was:%sand your PYTHONPATH environment variable currently contains:%rHere are some of your options for correcting the problem:* You can choose a different installation directory, i.e., one that ison PYTHONPATH or supports .pth files* You can add the installation directory to the PYTHONPATH environmentvariable. (It must then also be on PYTHONPATH whenever you runPython and want to use the package(s) you are installing.)* You can set up the installation directory to support ".pth" files byusing one of the approaches described here:https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locationsPlease make the appropriate changes for your system and try again.""").lstrip()def no_default_version_msg(self):template = self.__no_default_msgreturn template % (self.install_dir, os.environ.get('PYTHONPATH', ''))def install_site_py(self):"""Make sure there's a site.py in the target dir, if needed"""if self.sitepy_installed:return # already did it, or don't need tositepy = os.path.join(self.install_dir, "site.py")source = resource_string("setuptools", "site-patch.py")source = source.decode('utf-8')current = ""if os.path.exists(sitepy):log.debug("Checking existing site.py in %s", self.install_dir)with io.open(sitepy) as strm:current = strm.read()if not current.startswith('def __boot():'):raise DistutilsError("%s is not a setuptools-generated site.py; please"" remove it." % sitepy)if current != source:log.info("Creating %s", sitepy)if not self.dry_run:ensure_directory(sitepy)with io.open(sitepy, 'w', encoding='utf-8') as strm:strm.write(source)self.byte_compile([sitepy])self.sitepy_installed = Truedef create_home_path(self):"""Create directories under ~."""if not self.user:returnhome = convert_path(os.path.expanduser("~"))for name, path in six.iteritems(self.config_vars):if path.startswith(home) and not os.path.isdir(path):self.debug_print("os.makedirs('%s', 0o700)" % path)os.makedirs(path, 0o700)INSTALL_SCHEMES = dict(posix=dict(install_dir='$base/lib/python$py_version_short/site-packages',script_dir='$base/bin',),)DEFAULT_SCHEME = dict(install_dir='$base/Lib/site-packages',script_dir='$base/Scripts',)def _expand(self, *attrs):config_vars = self.get_finalized_command('install').config_varsif self.prefix:# Set default install_dir/scripts from --prefixconfig_vars = config_vars.copy()config_vars['base'] = self.prefixscheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)for attr, val in scheme.items():if getattr(self, attr, None) is None:setattr(self, attr, val)from distutils.util import subst_varsfor attr in attrs:val = getattr(self, attr)if val is not None:val = subst_vars(val, config_vars)if os.name == 'posix':val = os.path.expanduser(val)setattr(self, attr, val)def _pythonpath():items = os.environ.get('PYTHONPATH', '').split(os.pathsep)return filter(None, items)def get_site_dirs():"""Return a list of 'site' dirs"""sitedirs = []# start with PYTHONPATHsitedirs.extend(_pythonpath())prefixes = [sys.prefix]if sys.exec_prefix != sys.prefix:prefixes.append(sys.exec_prefix)for prefix in prefixes:if prefix:if sys.platform in ('os2emx', 'riscos'):sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))elif os.sep == '/':sitedirs.extend([os.path.join(prefix,"lib","python" + sys.version[:3],"site-packages",),os.path.join(prefix, "lib", "site-python"),])else:sitedirs.extend([prefix,os.path.join(prefix, "lib", "site-packages"),])if sys.platform == 'darwin':# for framework builds *only* we add the standard Apple# locations. Currently only per-user, but /Library and# /Network/Library could be added tooif 'Python.framework' in prefix:home = os.environ.get('HOME')if home:home_sp = os.path.join(home,'Library','Python',sys.version[:3],'site-packages',)sitedirs.append(home_sp)lib_paths = get_path('purelib'), get_path('platlib')for site_lib in lib_paths:if site_lib not in sitedirs:sitedirs.append(site_lib)if site.ENABLE_USER_SITE:sitedirs.append(site.USER_SITE)try:sitedirs.extend(site.getsitepackages())except AttributeError:passsitedirs = list(map(normalize_path, sitedirs))return sitedirsdef expand_paths(inputs):"""Yield sys.path directories that might contain "old-style" packages"""seen = {}for dirname in inputs:dirname = normalize_path(dirname)if dirname in seen:continueseen[dirname] = 1if not os.path.isdir(dirname):continuefiles = os.listdir(dirname)yield dirname, filesfor name in files:if not name.endswith('.pth'):# We only care about the .pth filescontinueif name in ('easy-install.pth', 'setuptools.pth'):# Ignore .pth files that we controlcontinue# Read the .pth filef = open(os.path.join(dirname, name))lines = list(yield_lines(f))f.close()# Yield existing non-dupe, non-import directory lines from itfor line in lines:if not line.startswith("import"):line = normalize_path(line.rstrip())if line not in seen:seen[line] = 1if not os.path.isdir(line):continueyield line, os.listdir(line)def extract_wininst_cfg(dist_filename):"""Extract configuration data from a bdist_wininst .exeReturns a configparser.RawConfigParser, or None"""f = open(dist_filename, 'rb')try:endrec = zipfile._EndRecData(f)if endrec is None:return Noneprepended = (endrec[9] - endrec[5]) - endrec[6]if prepended < 12: # no wininst data herereturn Nonef.seek(prepended - 12)tag, cfglen, bmlen = struct.unpack("<iii", f.read(12))if tag not in (0x1234567A, 0x1234567B):return None # not a valid tagf.seek(prepended - (12 + cfglen))init = {'version': '', 'target_version': ''}cfg = configparser.RawConfigParser(init)try:part = f.read(cfglen)# Read up to the first null byte.config = part.split(b'\0', 1)[0]# Now the config is in bytes, but for RawConfigParser, it should# be text, so decode it.config = config.decode(sys.getfilesystemencoding())cfg.readfp(six.StringIO(config))except configparser.Error:return Noneif not cfg.has_section('metadata') or not cfg.has_section('Setup'):return Nonereturn cfgfinally:f.close()def get_exe_prefixes(exe_filename):"""Get exe->egg path translations for a given .exe file"""prefixes = [('PURELIB/', ''),('PLATLIB/pywin32_system32', ''),('PLATLIB/', ''),('SCRIPTS/', 'EGG-INFO/scripts/'),('DATA/lib/site-packages', ''),]z = zipfile.ZipFile(exe_filename)try:for info in z.infolist():name = info.filenameparts = name.split('/')if len(parts) == 3 and parts[2] == 'PKG-INFO':if parts[1].endswith('.egg-info'):prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))breakif len(parts) != 2 or not name.endswith('.pth'):continueif name.endswith('-nspkg.pth'):continueif parts[0].upper() in ('PURELIB', 'PLATLIB'):contents = z.read(name)if six.PY3:contents = contents.decode()for pth in yield_lines(contents):pth = pth.strip().replace('\\', '/')if not pth.startswith('import'):prefixes.append((('%s/%s/' % (parts[0], pth)), ''))finally:z.close()prefixes = [(x.lower(), y) for x, y in prefixes]prefixes.sort()prefixes.reverse()return prefixesclass PthDistributions(Environment):"""A .pth file with Distribution paths in it"""dirty = Falsedef __init__(self, filename, sitedirs=()):self.filename = filenameself.sitedirs = list(map(normalize_path, sitedirs))self.basedir = normalize_path(os.path.dirname(self.filename))self._load()Environment.__init__(self, [], None, None)for path in yield_lines(self.paths):list(map(self.add, find_distributions(path, True)))def _load(self):self.paths = []saw_import = Falseseen = dict.fromkeys(self.sitedirs)if os.path.isfile(self.filename):f = open(self.filename, 'rt')for line in f:if line.startswith('import'):saw_import = Truecontinuepath = line.rstrip()self.paths.append(path)if not path.strip() or path.strip().startswith('#'):continue# skip non-existent paths, in case somebody deleted a package# manually, and duplicate paths as wellpath = self.paths[-1] = normalize_path(os.path.join(self.basedir, path))if not os.path.exists(path) or path in seen:self.paths.pop() # skip itself.dirty = True # we cleaned up, so we're dirty now :)continueseen[path] = 1f.close()if self.paths and not saw_import:self.dirty = True # ensure anything we touch has import wrapperswhile self.paths and not self.paths[-1].strip():self.paths.pop()def save(self):"""Write changed .pth file back to disk"""if not self.dirty:returnrel_paths = list(map(self.make_relative, self.paths))if rel_paths:log.debug("Saving %s", self.filename)lines = self._wrap_lines(rel_paths)data = '\n'.join(lines) + '\n'if os.path.islink(self.filename):os.unlink(self.filename)with open(self.filename, 'wt') as f:f.write(data)elif os.path.exists(self.filename):log.debug("Deleting empty %s", self.filename)os.unlink(self.filename)self.dirty = False@staticmethoddef _wrap_lines(lines):return linesdef add(self, dist):"""Add `dist` to the distribution map"""new_path = (dist.location not in self.paths and (dist.location not in self.sitedirs or# account for '.' being in PYTHONPATHdist.location == os.getcwd()))if new_path:self.paths.append(dist.location)self.dirty = TrueEnvironment.add(self, dist)def remove(self, dist):"""Remove `dist` from the distribution map"""while dist.location in self.paths:self.paths.remove(dist.location)self.dirty = TrueEnvironment.remove(self, dist)def make_relative(self, path):npath, last = os.path.split(normalize_path(path))baselen = len(self.basedir)parts = [last]sep = os.altsep == '/' and '/' or os.sepwhile len(npath) >= baselen:if npath == self.basedir:parts.append(os.curdir)parts.reverse()return sep.join(parts)npath, last = os.path.split(npath)parts.append(last)else:return pathclass RewritePthDistributions(PthDistributions):@classmethoddef _wrap_lines(cls, lines):yield cls.preludefor line in lines:yield lineyield cls.postludeprelude = _one_liner("""import syssys.__plen = len(sys.path)""")postlude = _one_liner("""import sysnew = sys.path[sys.__plen:]del sys.path[sys.__plen:]p = getattr(sys, '__egginsert', 0)sys.path[p:p] = newsys.__egginsert = p + len(new)""")if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite':PthDistributions = RewritePthDistributionsdef _first_line_re():"""Return a regular expression based on first_line_re suitable for matchingstrings."""if isinstance(first_line_re.pattern, str):return first_line_re# first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.return re.compile(first_line_re.pattern.decode())def auto_chmod(func, arg, exc):if func in [os.unlink, os.remove] and os.name == 'nt':chmod(arg, stat.S_IWRITE)return func(arg)et, ev, _ = sys.exc_info()six.reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg))))def update_dist_caches(dist_path, fix_zipimporter_caches):"""Fix any globally cached `dist_path` related data`dist_path` should be a path of a newly installed egg distribution (zippedor unzipped).sys.path_importer_cache contains finder objects that have been cached whenimporting data from the original distribution. Any such finders need to becleared since the replacement distribution might be packaged differently,e.g. a zipped egg distribution might get replaced with an unzipped eggfolder or vice versa. Having the old finders cached may then cause Pythonto attempt loading modules from the replacement distribution using anincorrect loader.zipimport.zipimporter objects are Python loaders charged with importingdata packaged inside zip archives. If stale loaders referencing theoriginal distribution, are left behind, they can fail to load modules fromthe replacement distribution. E.g. if an old zipimport.zipimporter instanceis used to load data from a new zipped egg archive, it may cause theoperation to attempt to locate the requested data in the wrong location -one indicated by the original distribution's zip archive directoryinformation. Such an operation may then fail outright, e.g. report havingread a 'bad local file header', or even worse, it may fail silently &return invalid data.zipimport._zip_directory_cache contains cached zip archive directoryinformation for all existing zipimport.zipimporter instances and all suchinstances connected to the same archive share the same cached directoryinformation.If asked, and the underlying Python implementation allows it, we can fixall existing zipimport.zipimporter instances instead of having to trackthem down and remove them one by one, by updating their shared cached ziparchive directory information. This, of course, assumes that thereplacement distribution is packaged as a zipped egg.If not asked to fix existing zipimport.zipimporter instances, we still doour best to clear any remaining zipimport.zipimporter related cached datathat might somehow later get used when attempting to load data from the newdistribution and thus cause such load operations to fail. Note that whentracking down such remaining stale data, we can not catch every conceivableusage from here, and we clear only those that we know of and have found tocause problems if left alive. Any remaining caches should be updated bywhomever is in charge of maintaining them, i.e. they should be ready tohandle us replacing their zip archives with new distributions at runtime."""# There are several other known sources of stale zipimport.zipimporter# instances that we do not clear here, but might if ever given a reason to# do so:# * Global setuptools pkg_resources.working_set (a.k.a. 'master working# set') may contain distributions which may in turn contain their# zipimport.zipimporter loaders.# * Several zipimport.zipimporter loaders held by local variables further# up the function call stack when running the setuptools installation.# * Already loaded modules may have their __loader__ attribute set to the# exact loader instance used when importing them. Python 3.4 docs state# that this information is intended mostly for introspection and so is# not expected to cause us problems.normalized_path = normalize_path(dist_path)_uncache(normalized_path, sys.path_importer_cache)if fix_zipimporter_caches:_replace_zip_directory_cache_data(normalized_path)else:# Here, even though we do not want to fix existing and now stale# zipimporter cache information, we still want to remove it. Related to# Python's zip archive directory information cache, we clear each of# its stale entries in two phases:# 1. Clear the entry so attempting to access zip archive information# via any existing stale zipimport.zipimporter instances fails.# 2. Remove the entry from the cache so any newly constructed# zipimport.zipimporter instances do not end up using old stale# zip archive directory information.# This whole stale data removal step does not seem strictly necessary,# but has been left in because it was done before we started replacing# the zip archive directory information cache content if possible, and# there are no relevant unit tests that we can depend on to tell us if# this is really needed._remove_and_clear_zip_directory_cache_data(normalized_path)def _collect_zipimporter_cache_entries(normalized_path, cache):"""Return zipimporter cache entry keys related to a given normalized path.Alternative path spellings (e.g. those using different character case orthose using alternative path separators) related to the same path areincluded. Any sub-path entries are included as well, i.e. thosecorresponding to zip archives embedded in other zip archives."""result = []prefix_len = len(normalized_path)for p in cache:np = normalize_path(p)if (np.startswith(normalized_path) andnp[prefix_len:prefix_len + 1] in (os.sep, '')):result.append(p)return resultdef _update_zipimporter_cache(normalized_path, cache, updater=None):"""Update zipimporter cache data for a given normalized path.Any sub-path entries are processed as well, i.e. those corresponding to ziparchives embedded in other zip archives.Given updater is a callable taking a cache entry key and the original entry(after already removing the entry from the cache), and expected to updatethe entry and possibly return a new one to be inserted in its place.Returning None indicates that the entry should not be replaced with a newone. If no updater is given, the cache entries are simply removed withoutany additional processing, the same as if the updater simply returned None."""for p in _collect_zipimporter_cache_entries(normalized_path, cache):# N.B. pypy's custom zipimport._zip_directory_cache implementation does# not support the complete dict interface:# * Does not support item assignment, thus not allowing this function# to be used only for removing existing cache entries.# * Does not support the dict.pop() method, forcing us to use the# get/del patterns instead. For more detailed information see the# following links:# https://github.com/pypa/setuptools/issues/202#issuecomment-202913420# http://bit.ly/2h9itJXold_entry = cache[p]del cache[p]new_entry = updater and updater(p, old_entry)if new_entry is not None:cache[p] = new_entrydef _uncache(normalized_path, cache):_update_zipimporter_cache(normalized_path, cache)def _remove_and_clear_zip_directory_cache_data(normalized_path):def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):old_entry.clear()_update_zipimporter_cache(normalized_path, zipimport._zip_directory_cache,updater=clear_and_remove_cached_zip_archive_directory_data)# PyPy Python implementation does not allow directly writing to the# zipimport._zip_directory_cache and so prevents us from attempting to correct# its content. The best we can do there is clear the problematic cache content# and have PyPy repopulate it as needed. The downside is that if there are any# stale zipimport.zipimporter instances laying around, attempting to use them# will fail due to not having its zip archive directory information available# instead of being automatically corrected to use the new correct zip archive# directory information.if '__pypy__' in sys.builtin_module_names:_replace_zip_directory_cache_data = \_remove_and_clear_zip_directory_cache_dataelse:def _replace_zip_directory_cache_data(normalized_path):def replace_cached_zip_archive_directory_data(path, old_entry):# N.B. In theory, we could load the zip directory information just# once for all updated path spellings, and then copy it locally and# update its contained path strings to contain the correct# spelling, but that seems like a way too invasive move (this cache# structure is not officially documented anywhere and could in# theory change with new Python releases) for no significant# benefit.old_entry.clear()zipimport.zipimporter(path)old_entry.update(zipimport._zip_directory_cache[path])return old_entry_update_zipimporter_cache(normalized_path, zipimport._zip_directory_cache,updater=replace_cached_zip_archive_directory_data)def is_python(text, filename='<string>'):"Is this string a valid Python script?"try:compile(text, filename, 'exec')except (SyntaxError, TypeError):return Falseelse:return Truedef is_sh(executable):"""Determine if the specified executable is a .sh (contains a #! line)"""try:with io.open(executable, encoding='latin-1') as fp:magic = fp.read(2)except (OSError, IOError):return executablereturn magic == '#!'def nt_quote_arg(arg):"""Quote a command line argument according to Windows parsing rules"""return subprocess.list2cmdline([arg])def is_python_script(script_text, filename):"""Is this text, as a whole, a Python script? (as opposed to shell/bat/etc."""if filename.endswith('.py') or filename.endswith('.pyw'):return True # extension says it's Pythonif is_python(script_text, filename):return True # it's syntactically valid Pythonif script_text.startswith('#!'):# It begins with a '#!' line, so check if 'python' is in it somewherereturn 'python' in script_text.splitlines()[0].lower()return False # Not any Python I can recognizetry:from os import chmod as _chmodexcept ImportError:# Jython compatibilitydef _chmod(*args):passdef chmod(path, mode):log.debug("changing mode of %s to %o", path, mode)try:_chmod(path, mode)except os.error as e:log.debug("chmod failed: %s", e)class CommandSpec(list):"""A command spec for a #! header, specified as a list of arguments akin tothose passed to Popen."""options = []split_args = dict()@classmethoddef best(cls):"""Choose the best CommandSpec class based on environmental conditions."""return cls@classmethoddef _sys_executable(cls):_default = os.path.normpath(sys.executable)return os.environ.get('__PYVENV_LAUNCHER__', _default)@classmethoddef from_param(cls, param):"""Construct a CommandSpec from a parameter to build_scripts, which maybe None."""if isinstance(param, cls):return paramif isinstance(param, list):return cls(param)if param is None:return cls.from_environment()# otherwise, assume it's a string.return cls.from_string(param)@classmethoddef from_environment(cls):return cls([cls._sys_executable()])@classmethoddef from_string(cls, string):"""Construct a command spec from a simple string representing a commandline parseable by shlex.split."""items = shlex.split(string, **cls.split_args)return cls(items)def install_options(self, script_text):self.options = shlex.split(self._extract_options(script_text))cmdline = subprocess.list2cmdline(self)if not isascii(cmdline):self.options[:0] = ['-x']@staticmethoddef _extract_options(orig_script):"""Extract any options from the first line of the script."""first = (orig_script + '\n').splitlines()[0]match = _first_line_re().match(first)options = match.group(1) or '' if match else ''return options.strip()def as_header(self):return self._render(self + list(self.options))@staticmethoddef _strip_quotes(item):_QUOTES = '"\''for q in _QUOTES:if item.startswith(q) and item.endswith(q):return item[1:-1]return item@staticmethoddef _render(items):cmdline = subprocess.list2cmdline(CommandSpec._strip_quotes(item.strip()) for item in items)return '#!' + cmdline + '\n'# For pbr compat; will be removed in a future version.sys_executable = CommandSpec._sys_executable()class WindowsCommandSpec(CommandSpec):split_args = dict(posix=False)class ScriptWriter(object):"""Encapsulates behavior around writing entry point scripts for console andgui apps."""template = textwrap.dedent(r"""# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r__requires__ = %(spec)rimport reimport sysfrom pkg_resources import load_entry_pointif __name__ == '__main__':sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])sys.exit(load_entry_point(%(spec)r, %(group)r, %(name)r)())""").lstrip()command_spec_class = CommandSpec@classmethoddef get_script_args(cls, dist, executable=None, wininst=False):# for backward compatibilitywarnings.warn("Use get_args", DeprecationWarning)writer = (WindowsScriptWriter if wininst else ScriptWriter).best()header = cls.get_script_header("", executable, wininst)return writer.get_args(dist, header)@classmethoddef get_script_header(cls, script_text, executable=None, wininst=False):# for backward compatibilitywarnings.warn("Use get_header", DeprecationWarning)if wininst:executable = "python.exe"cmd = cls.command_spec_class.best().from_param(executable)cmd.install_options(script_text)return cmd.as_header()@classmethoddef get_args(cls, dist, header=None):"""Yield write_script() argument tuples for a distribution'sconsole_scripts and gui_scripts entry points."""if header is None:header = cls.get_header()spec = str(dist.as_requirement())for type_ in 'console', 'gui':group = type_ + '_scripts'for name, ep in dist.get_entry_map(group).items():cls._ensure_safe_name(name)script_text = cls.template % locals()args = cls._get_script_args(type_, name, header, script_text)for res in args:yield res@staticmethoddef _ensure_safe_name(name):"""Prevent paths in *_scripts entry point names."""has_path_sep = re.search(r'[\\/]', name)if has_path_sep:raise ValueError("Path separators not allowed in script names")@classmethoddef get_writer(cls, force_windows):# for backward compatibilitywarnings.warn("Use best", DeprecationWarning)return WindowsScriptWriter.best() if force_windows else cls.best()@classmethoddef best(cls):"""Select the best ScriptWriter for this environment."""if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'):return WindowsScriptWriter.best()else:return cls@classmethoddef _get_script_args(cls, type_, name, header, script_text):# Simply write the stub with no extension.yield (name, header + script_text)@classmethoddef get_header(cls, script_text="", executable=None):"""Create a #! line, getting options (if any) from script_text"""cmd = cls.command_spec_class.best().from_param(executable)cmd.install_options(script_text)return cmd.as_header()class WindowsScriptWriter(ScriptWriter):command_spec_class = WindowsCommandSpec@classmethoddef get_writer(cls):# for backward compatibilitywarnings.warn("Use best", DeprecationWarning)return cls.best()@classmethoddef best(cls):"""Select the best ScriptWriter suitable for Windows"""writer_lookup = dict(executable=WindowsExecutableLauncherWriter,natural=cls,)# for compatibility, use the executable launcher by defaultlauncher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')return writer_lookup[launcher]@classmethoddef _get_script_args(cls, type_, name, header, script_text):"For Windows, add a .py extension"ext = dict(console='.pya', gui='.pyw')[type_]if ext not in os.environ['PATHEXT'].lower().split(';'):msg = ("{ext} not listed in PATHEXT; scripts will not be ""recognized as executables.").format(**locals())warnings.warn(msg, UserWarning)old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']old.remove(ext)header = cls._adjust_header(type_, header)blockers = [name + x for x in old]yield name + ext, header + script_text, 't', blockers@classmethoddef _adjust_header(cls, type_, orig_header):"""Make sure 'pythonw' is used for gui and and 'python' is used forconsole (regardless of what sys.executable is)."""pattern = 'pythonw.exe'repl = 'python.exe'if type_ == 'gui':pattern, repl = repl, patternpattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)new_header = pattern_ob.sub(string=orig_header, repl=repl)return new_header if cls._use_header(new_header) else orig_header@staticmethoddef _use_header(new_header):"""Should _adjust_header use the replaced header?On non-windows systems, always use. OnWindows systems, only use the replaced header if it resolvesto an executable on the system."""clean_header = new_header[2:-1].strip('"')return sys.platform != 'win32' or find_executable(clean_header)class WindowsExecutableLauncherWriter(WindowsScriptWriter):@classmethoddef _get_script_args(cls, type_, name, header, script_text):"""For Windows, add a .py extension and an .exe launcher"""if type_ == 'gui':launcher_type = 'gui'ext = '-script.pyw'old = ['.pyw']else:launcher_type = 'cli'ext = '-script.py'old = ['.py', '.pyc', '.pyo']hdr = cls._adjust_header(type_, header)blockers = [name + x for x in old]yield (name + ext, hdr + script_text, 't', blockers)yield (name + '.exe', get_win_launcher(launcher_type),'b' # write in binary mode)if not is_64bit():# install a manifest for the launcher to prevent Windows# from detecting it as an installer (which it will for# launchers like easy_install.exe). Consider only# adding a manifest for launchers detected as installers.# See Distribute #143 for details.m_name = name + '.exe.manifest'yield (m_name, load_launcher_manifest(name), 't')# for backward-compatibilityget_script_args = ScriptWriter.get_script_argsget_script_header = ScriptWriter.get_script_headerdef get_win_launcher(type):"""Load the Windows launcher (executable) suitable for launching a script.`type` should be either 'cli' or 'gui'Returns the executable as a byte string."""launcher_fn = '%s.exe' % typeif is_64bit():launcher_fn = launcher_fn.replace(".", "-64.")else:launcher_fn = launcher_fn.replace(".", "-32.")return resource_string('setuptools', launcher_fn)def load_launcher_manifest(name):manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')if six.PY2:return manifest % vars()else:return manifest.decode('utf-8') % vars()def rmtree(path, ignore_errors=False, onerror=auto_chmod):return shutil.rmtree(path, ignore_errors, onerror)def current_umask():tmp = os.umask(0o022)os.umask(tmp)return tmpdef bootstrap():# This function is called when setuptools*.egg is run using /bin/shimport setuptoolsargv0 = os.path.dirname(setuptools.__path__[0])sys.argv[0] = argv0sys.argv.append(argv0)main()def main(argv=None, **kw):from setuptools import setupfrom setuptools.dist import Distributionclass DistributionWithoutHelpCommands(Distribution):common_usage = ""def _show_help(self, *args, **kw):with _patch_usage():Distribution._show_help(self, *args, **kw)if argv is None:argv = sys.argv[1:]with _patch_usage():setup(script_args=['-q', 'easy_install', '-v'] + argv,script_name=sys.argv[0] or 'easy_install',distclass=DistributionWithoutHelpCommands,**kw)@contextlib.contextmanagerdef _patch_usage():import distutils.coreUSAGE = textwrap.dedent("""usage: %(script)s [options] requirement_or_url ...or: %(script)s --help""").lstrip()def gen_usage(script_name):return USAGE % dict(script=os.path.basename(script_name),)saved = distutils.core.gen_usagedistutils.core.gen_usage = gen_usagetry:yieldfinally:distutils.core.gen_usage = saved
"""Create a dist_info directoryAs defined in the wheel specification"""import osfrom distutils.core import Commandfrom distutils import logclass dist_info(Command):description = 'create a .dist-info directory'user_options = [('egg-base=', 'e', "directory containing .egg-info directories"" (default: top of the source tree)"),]def initialize_options(self):self.egg_base = Nonedef finalize_options(self):passdef run(self):egg_info = self.get_finalized_command('egg_info')egg_info.egg_base = self.egg_baseegg_info.finalize_options()egg_info.run()dist_info_dir = egg_info.egg_info[:-len('.egg-info')] + '.dist-info'log.info("creating '{}'".format(os.path.abspath(dist_info_dir)))bdist_wheel = self.get_finalized_command('bdist_wheel')bdist_wheel.egg2dist(egg_info.egg_info, dist_info_dir)
from distutils.util import convert_pathfrom distutils import logfrom distutils.errors import DistutilsError, DistutilsOptionErrorimport osimport globimport iofrom setuptools.extern import sixfrom pkg_resources import Distribution, PathMetadata, normalize_pathfrom setuptools.command.easy_install import easy_installfrom setuptools import namespacesimport setuptoolsclass develop(namespaces.DevelopInstaller, easy_install):"""Set up package for development"""description = "install package in 'development mode'"user_options = easy_install.user_options + [("uninstall", "u", "Uninstall this source package"),("egg-path=", None, "Set the path to be used in the .egg-link file"),]boolean_options = easy_install.boolean_options + ['uninstall']command_consumes_arguments = False # override basedef run(self):if self.uninstall:self.multi_version = Trueself.uninstall_link()self.uninstall_namespaces()else:self.install_for_development()self.warn_deprecated_options()def initialize_options(self):self.uninstall = Noneself.egg_path = Noneeasy_install.initialize_options(self)self.setup_path = Noneself.always_copy_from = '.' # always copy eggs installed in curdirdef finalize_options(self):ei = self.get_finalized_command("egg_info")if ei.broken_egg_info:template = "Please rename %r to %r before using 'develop'"args = ei.egg_info, ei.broken_egg_inforaise DistutilsError(template % args)self.args = [ei.egg_name]easy_install.finalize_options(self)self.expand_basedirs()self.expand_dirs()# pick up setup-dir .egg files only: no .egg-infoself.package_index.scan(glob.glob('*.egg'))egg_link_fn = ei.egg_name + '.egg-link'self.egg_link = os.path.join(self.install_dir, egg_link_fn)self.egg_base = ei.egg_baseif self.egg_path is None:self.egg_path = os.path.abspath(ei.egg_base)target = normalize_path(self.egg_base)egg_path = normalize_path(os.path.join(self.install_dir,self.egg_path))if egg_path != target:raise DistutilsOptionError("--egg-path must be a relative path from the install"" directory to " + target)# Make a distribution for the package's sourceself.dist = Distribution(target,PathMetadata(target, os.path.abspath(ei.egg_info)),project_name=ei.egg_name)self.setup_path = self._resolve_setup_path(self.egg_base,self.install_dir,self.egg_path,)@staticmethoddef _resolve_setup_path(egg_base, install_dir, egg_path):"""Generate a path from egg_base back to '.' where thesetup script resides and ensure that path points to thesetup path from $install_dir/$egg_path."""path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')if path_to_setup != os.curdir:path_to_setup = '../' * (path_to_setup.count('/') + 1)resolved = normalize_path(os.path.join(install_dir, egg_path, path_to_setup))if resolved != normalize_path(os.curdir):raise DistutilsOptionError("Can't get a consistent path to setup script from"" installation directory", resolved, normalize_path(os.curdir))return path_to_setupdef install_for_development(self):if six.PY3 and getattr(self.distribution, 'use_2to3', False):# If we run 2to3 we can not do this inplace:# Ensure metadata is up-to-dateself.reinitialize_command('build_py', inplace=0)self.run_command('build_py')bpy_cmd = self.get_finalized_command("build_py")build_path = normalize_path(bpy_cmd.build_lib)# Build extensionsself.reinitialize_command('egg_info', egg_base=build_path)self.run_command('egg_info')self.reinitialize_command('build_ext', inplace=0)self.run_command('build_ext')# Fixup egg-link and easy-install.pthei_cmd = self.get_finalized_command("egg_info")self.egg_path = build_pathself.dist.location = build_path# XXXself.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)else:# Without 2to3 inplace works fine:self.run_command('egg_info')# Build extensions in-placeself.reinitialize_command('build_ext', inplace=1)self.run_command('build_ext')self.install_site_py() # ensure that target dir is site-safeif setuptools.bootstrap_install_from:self.easy_install(setuptools.bootstrap_install_from)setuptools.bootstrap_install_from = Noneself.install_namespaces()# create an .egg-link in the installation dir, pointing to our egglog.info("Creating %s (link to %s)", self.egg_link, self.egg_base)if not self.dry_run:with open(self.egg_link, "w") as f:f.write(self.egg_path + "\n" + self.setup_path)# postprocess the installed distro, fixing up .pth, installing scripts,# and handling requirementsself.process_distribution(None, self.dist, not self.no_deps)def uninstall_link(self):if os.path.exists(self.egg_link):log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)egg_link_file = open(self.egg_link)contents = [line.rstrip() for line in egg_link_file]egg_link_file.close()if contents not in ([self.egg_path],[self.egg_path, self.setup_path]):log.warn("Link points to %s: uninstall aborted", contents)returnif not self.dry_run:os.unlink(self.egg_link)if not self.dry_run:self.update_pth(self.dist) # remove any .pth link to usif self.distribution.scripts:# XXX should also check for entry point scripts!log.warn("Note: you must uninstall or replace scripts manually!")def install_egg_scripts(self, dist):if dist is not self.dist:# Installing a dependency, so fall back to normal behaviorreturn easy_install.install_egg_scripts(self, dist)# create wrapper scripts in the script dir, pointing to dist.scripts# new-style...self.install_wrapper_scripts(dist)# ...and old-stylefor script_name in self.distribution.scripts or []:script_path = os.path.abspath(convert_path(script_name))script_name = os.path.basename(script_path)with io.open(script_path) as strm:script_text = strm.read()self.install_script(dist, script_name, script_text, script_path)def install_wrapper_scripts(self, dist):dist = VersionlessRequirement(dist)return easy_install.install_wrapper_scripts(self, dist)class VersionlessRequirement(object):"""Adapt a pkg_resources.Distribution to simply return the projectname as the 'requirement' so that scripts will work acrossmultiple versions.>>> dist = Distribution(project_name='foo', version='1.0')>>> str(dist.as_requirement())'foo==1.0'>>> adapted_dist = VersionlessRequirement(dist)>>> str(adapted_dist.as_requirement())'foo'"""def __init__(self, dist):self.__dist = distdef __getattr__(self, name):return getattr(self.__dist, name)def as_requirement(self):return self.project_name
from glob import globfrom distutils.util import convert_pathimport distutils.command.build_py as origimport osimport fnmatchimport textwrapimport ioimport distutils.errorsimport itertoolsfrom setuptools.extern import sixfrom setuptools.extern.six.moves import map, filter, filterfalsetry:from setuptools.lib2to3_ex import Mixin2to3except ImportError:class Mixin2to3:def run_2to3(self, files, doctests=True):"do nothing"class build_py(orig.build_py, Mixin2to3):"""Enhanced 'build_py' command that includes data files with packagesThe data files are specified via a 'package_data' argument to 'setup()'.See 'setuptools.dist.Distribution' for more details.Also, this version of the 'build_py' command allows you to specify both'py_modules' and 'packages' in the same setup operation."""def finalize_options(self):orig.build_py.finalize_options(self)self.package_data = self.distribution.package_dataself.exclude_package_data = (self.distribution.exclude_package_data or{})if 'data_files' in self.__dict__:del self.__dict__['data_files']self.__updated_files = []self.__doctests_2to3 = []def run(self):"""Build modules, packages, and copy data files to build directory"""if not self.py_modules and not self.packages:returnif self.py_modules:self.build_modules()if self.packages:self.build_packages()self.build_package_data()self.run_2to3(self.__updated_files, False)self.run_2to3(self.__updated_files, True)self.run_2to3(self.__doctests_2to3, True)# Only compile actual .py files, using our base class' idea of what our# output files are.self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))def __getattr__(self, attr):"lazily compute data files"if attr == 'data_files':self.data_files = self._get_data_files()return self.data_filesreturn orig.build_py.__getattr__(self, attr)def build_module(self, module, module_file, package):if six.PY2 and isinstance(package, six.string_types):# avoid errors on Python 2 when unicode is passed (#190)package = package.split('.')outfile, copied = orig.build_py.build_module(self, module, module_file,package)if copied:self.__updated_files.append(outfile)return outfile, copieddef _get_data_files(self):"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""self.analyze_manifest()return list(map(self._get_pkg_data_files, self.packages or ()))def _get_pkg_data_files(self, package):# Locate package source directorysrc_dir = self.get_package_dir(package)# Compute package build directorybuild_dir = os.path.join(*([self.build_lib] + package.split('.')))# Strip directory from globbed filenamesfilenames = [os.path.relpath(file, src_dir)for file in self.find_data_files(package, src_dir)]return package, src_dir, build_dir, filenamesdef find_data_files(self, package, src_dir):"""Return filenames for package's data files in 'src_dir'"""patterns = self._get_platform_patterns(self.package_data,package,src_dir,)globs_expanded = map(glob, patterns)# flatten the expanded globs into an iterable of matchesglobs_matches = itertools.chain.from_iterable(globs_expanded)glob_files = filter(os.path.isfile, globs_matches)files = itertools.chain(self.manifest_files.get(package, []),glob_files,)return self.exclude_data_files(package, src_dir, files)def build_package_data(self):"""Copy data files into build directory"""for package, src_dir, build_dir, filenames in self.data_files:for filename in filenames:target = os.path.join(build_dir, filename)self.mkpath(os.path.dirname(target))srcfile = os.path.join(src_dir, filename)outf, copied = self.copy_file(srcfile, target)srcfile = os.path.abspath(srcfile)if (copied andsrcfile in self.distribution.convert_2to3_doctests):self.__doctests_2to3.append(outf)def analyze_manifest(self):self.manifest_files = mf = {}if not self.distribution.include_package_data:returnsrc_dirs = {}for package in self.packages or ():# Locate package source directorysrc_dirs[assert_relative(self.get_package_dir(package))] = packageself.run_command('egg_info')ei_cmd = self.get_finalized_command('egg_info')for path in ei_cmd.filelist.files:d, f = os.path.split(assert_relative(path))prev = Noneoldf = fwhile d and d != prev and d not in src_dirs:prev = dd, df = os.path.split(d)f = os.path.join(df, f)if d in src_dirs:if path.endswith('.py') and f == oldf:continue # it's a module, not datamf.setdefault(src_dirs[d], []).append(path)def get_data_files(self):pass # Lazily compute data files in _get_data_files() function.def check_package(self, package, package_dir):"""Check namespace packages' __init__ for declare_namespace"""try:return self.packages_checked[package]except KeyError:passinit_py = orig.build_py.check_package(self, package, package_dir)self.packages_checked[package] = init_pyif not init_py or not self.distribution.namespace_packages:return init_pyfor pkg in self.distribution.namespace_packages:if pkg == package or pkg.startswith(package + '.'):breakelse:return init_pywith io.open(init_py, 'rb') as f:contents = f.read()if b'declare_namespace' not in contents:raise distutils.errors.DistutilsError("Namespace package problem: %s is a namespace package, but ""its\n__init__.py does not call declare_namespace()! Please "'fix it.\n(See the setuptools manual under ''"Namespace Packages" for details.)\n"' % (package,))return init_pydef initialize_options(self):self.packages_checked = {}orig.build_py.initialize_options(self)def get_package_dir(self, package):res = orig.build_py.get_package_dir(self, package)if self.distribution.src_root is not None:return os.path.join(self.distribution.src_root, res)return resdef exclude_data_files(self, package, src_dir, files):"""Filter filenames for package's data files in 'src_dir'"""files = list(files)patterns = self._get_platform_patterns(self.exclude_package_data,package,src_dir,)match_groups = (fnmatch.filter(files, pattern)for pattern in patterns)# flatten the groups of matches into an iterable of matchesmatches = itertools.chain.from_iterable(match_groups)bad = set(matches)keepers = (fnfor fn in filesif fn not in bad)# ditch dupesreturn list(_unique_everseen(keepers))@staticmethoddef _get_platform_patterns(spec, package, src_dir):"""yield platform-specific path patterns (suitable for globor fn_match) from a glob-based spec (such asself.package_data or self.exclude_package_data)matching package in src_dir."""raw_patterns = itertools.chain(spec.get('', []),spec.get(package, []),)return (# Each pattern has to be converted to a platform-specific pathos.path.join(src_dir, convert_path(pattern))for pattern in raw_patterns)# from Python docsdef _unique_everseen(iterable, key=None):"List unique elements, preserving order. Remember all elements ever seen."# unique_everseen('AAAABBBCCDAABBB') --> A B C D# unique_everseen('ABBCcAD', str.lower) --> A B C Dseen = set()seen_add = seen.addif key is None:for element in filterfalse(seen.__contains__, iterable):seen_add(element)yield elementelse:for element in iterable:k = key(element)if k not in seen:seen_add(k)yield elementdef assert_relative(path):if not os.path.isabs(path):return pathfrom distutils.errors import DistutilsSetupErrormsg = textwrap.dedent("""Error: setup script specifies an absolute path:%ssetup() arguments must *always* be /-separated paths relative to thesetup.py directory, *never* absolute paths.""").lstrip() % pathraise DistutilsSetupError(msg)
import osimport sysimport itertoolsimport impfrom distutils.command.build_ext import build_ext as _du_build_extfrom distutils.file_util import copy_filefrom distutils.ccompiler import new_compilerfrom distutils.sysconfig import customize_compiler, get_config_varfrom distutils.errors import DistutilsErrorfrom distutils import logfrom setuptools.extension import Libraryfrom setuptools.extern import sixtry:# Attempt to use Cython for building extensions, if availablefrom Cython.Distutils.build_ext import build_ext as _build_extexcept ImportError:_build_ext = _du_build_ext# make sure _config_vars is initializedget_config_var("LDSHARED")from distutils.sysconfig import _config_vars as _CONFIG_VARSdef _customize_compiler_for_shlib(compiler):if sys.platform == "darwin":# building .dylib requires additional compiler flags on OSX; here we# temporarily substitute the pyconfig.h variables so that distutils'# 'customize_compiler' uses them before we build the shared libraries.tmp = _CONFIG_VARS.copy()try:# XXX Help! I don't have any idea whether these are right..._CONFIG_VARS['LDSHARED'] = ("gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")_CONFIG_VARS['CCSHARED'] = " -dynamiclib"_CONFIG_VARS['SO'] = ".dylib"customize_compiler(compiler)finally:_CONFIG_VARS.clear()_CONFIG_VARS.update(tmp)else:customize_compiler(compiler)have_rtld = Falseuse_stubs = Falselibtype = 'shared'if sys.platform == "darwin":use_stubs = Trueelif os.name != 'nt':try:import dluse_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')except ImportError:passif_dl = lambda s: s if have_rtld else ''def get_abi3_suffix():"""Return the file extension for an abi3-compliant Extension()"""for suffix, _, _ in (s for s in imp.get_suffixes() if s[2] == imp.C_EXTENSION):if '.abi3' in suffix: # Unixreturn suffixelif suffix == '.pyd': # Windowsreturn suffixclass build_ext(_build_ext):def run(self):"""Build extensions in build directory, then copy if --inplace"""old_inplace, self.inplace = self.inplace, 0_build_ext.run(self)self.inplace = old_inplaceif old_inplace:self.copy_extensions_to_source()def copy_extensions_to_source(self):build_py = self.get_finalized_command('build_py')for ext in self.extensions:fullname = self.get_ext_fullname(ext.name)filename = self.get_ext_filename(fullname)modpath = fullname.split('.')package = '.'.join(modpath[:-1])package_dir = build_py.get_package_dir(package)dest_filename = os.path.join(package_dir,os.path.basename(filename))src_filename = os.path.join(self.build_lib, filename)# Always copy, even if source is older than destination, to ensure# that the right extensions for the current Python/platform are# used.copy_file(src_filename, dest_filename, verbose=self.verbose,dry_run=self.dry_run)if ext._needs_stub:self.write_stub(package_dir or os.curdir, ext, True)def get_ext_filename(self, fullname):filename = _build_ext.get_ext_filename(self, fullname)if fullname in self.ext_map:ext = self.ext_map[fullname]use_abi3 = (six.PY3and getattr(ext, 'py_limited_api')and get_abi3_suffix())if use_abi3:so_ext = _get_config_var_837('EXT_SUFFIX')filename = filename[:-len(so_ext)]filename = filename + get_abi3_suffix()if isinstance(ext, Library):fn, ext = os.path.splitext(filename)return self.shlib_compiler.library_filename(fn, libtype)elif use_stubs and ext._links_to_dynamic:d, fn = os.path.split(filename)return os.path.join(d, 'dl-' + fn)return filenamedef initialize_options(self):_build_ext.initialize_options(self)self.shlib_compiler = Noneself.shlibs = []self.ext_map = {}def finalize_options(self):_build_ext.finalize_options(self)self.extensions = self.extensions or []self.check_extensions_list(self.extensions)self.shlibs = [ext for ext in self.extensionsif isinstance(ext, Library)]if self.shlibs:self.setup_shlib_compiler()for ext in self.extensions:ext._full_name = self.get_ext_fullname(ext.name)for ext in self.extensions:fullname = ext._full_nameself.ext_map[fullname] = ext# distutils 3.1 will also ask for module names# XXX what to do with conflicts?self.ext_map[fullname.split('.')[-1]] = extltd = self.shlibs and self.links_to_dynamic(ext) or Falsens = ltd and use_stubs and not isinstance(ext, Library)ext._links_to_dynamic = ltdext._needs_stub = nsfilename = ext._file_name = self.get_ext_filename(fullname)libdir = os.path.dirname(os.path.join(self.build_lib, filename))if ltd and libdir not in ext.library_dirs:ext.library_dirs.append(libdir)if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:ext.runtime_library_dirs.append(os.curdir)def setup_shlib_compiler(self):compiler = self.shlib_compiler = new_compiler(compiler=self.compiler, dry_run=self.dry_run, force=self.force)_customize_compiler_for_shlib(compiler)if self.include_dirs is not None:compiler.set_include_dirs(self.include_dirs)if self.define is not None:# 'define' option is a list of (name,value) tuplesfor (name, value) in self.define:compiler.define_macro(name, value)if self.undef is not None:for macro in self.undef:compiler.undefine_macro(macro)if self.libraries is not None:compiler.set_libraries(self.libraries)if self.library_dirs is not None:compiler.set_library_dirs(self.library_dirs)if self.rpath is not None:compiler.set_runtime_library_dirs(self.rpath)if self.link_objects is not None:compiler.set_link_objects(self.link_objects)# hack so distutils' build_extension() builds a library insteadcompiler.link_shared_object = link_shared_object.__get__(compiler)def get_export_symbols(self, ext):if isinstance(ext, Library):return ext.export_symbolsreturn _build_ext.get_export_symbols(self, ext)def build_extension(self, ext):ext._convert_pyx_sources_to_lang()_compiler = self.compilertry:if isinstance(ext, Library):self.compiler = self.shlib_compiler_build_ext.build_extension(self, ext)if ext._needs_stub:cmd = self.get_finalized_command('build_py').build_libself.write_stub(cmd, ext)finally:self.compiler = _compilerdef links_to_dynamic(self, ext):"""Return true if 'ext' links to a dynamic lib in the same package"""# XXX this should check to ensure the lib is actually being built# XXX as dynamic, and not just using a locally-found version or a# XXX static-compiled versionlibnames = dict.fromkeys([lib._full_name for lib in self.shlibs])pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])return any(pkg + libname in libnames for libname in ext.libraries)def get_outputs(self):return _build_ext.get_outputs(self) + self.__get_stubs_outputs()def __get_stubs_outputs(self):# assemble the base name for each extension that needs a stubns_ext_bases = (os.path.join(self.build_lib, *ext._full_name.split('.'))for ext in self.extensionsif ext._needs_stub)# pair each base with the extensionpairs = itertools.product(ns_ext_bases, self.__get_output_extensions())return list(base + fnext for base, fnext in pairs)def __get_output_extensions(self):yield '.py'yield '.pyc'if self.get_finalized_command('build_py').optimize:yield '.pyo'def write_stub(self, output_dir, ext, compile=False):log.info("writing stub loader for %s to %s", ext._full_name,output_dir)stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +'.py')if compile and os.path.exists(stub_file):raise DistutilsError(stub_file + " already exists! Please delete.")if not self.dry_run:f = open(stub_file, 'w')f.write('\n'.join(["def __bootstrap__():"," global __bootstrap__, __file__, __loader__"," import sys, os, pkg_resources, imp" + if_dl(", dl")," __file__ = pkg_resources.resource_filename""(__name__,%r)"% os.path.basename(ext._file_name)," del __bootstrap__"," if '__loader__' in globals():"," del __loader__",if_dl(" old_flags = sys.getdlopenflags()")," old_dir = os.getcwd()"," try:"," os.chdir(os.path.dirname(__file__))",if_dl(" sys.setdlopenflags(dl.RTLD_NOW)")," imp.load_dynamic(__name__,__file__)"," finally:",if_dl(" sys.setdlopenflags(old_flags)")," os.chdir(old_dir)","__bootstrap__()","" # terminal \n]))f.close()if compile:from distutils.util import byte_compilebyte_compile([stub_file], optimize=0,force=True, dry_run=self.dry_run)optimize = self.get_finalized_command('install_lib').optimizeif optimize > 0:byte_compile([stub_file], optimize=optimize,force=True, dry_run=self.dry_run)if os.path.exists(stub_file) and not self.dry_run:os.unlink(stub_file)if use_stubs or os.name == 'nt':# Build shared libraries#def link_shared_object(self, objects, output_libname, output_dir=None, libraries=None,library_dirs=None, runtime_library_dirs=None, export_symbols=None,debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,target_lang=None):self.link(self.SHARED_LIBRARY, objects, output_libname,output_dir, libraries, library_dirs, runtime_library_dirs,export_symbols, debug, extra_preargs, extra_postargs,build_temp, target_lang)else:# Build static libraries everywhere elselibtype = 'static'def link_shared_object(self, objects, output_libname, output_dir=None, libraries=None,library_dirs=None, runtime_library_dirs=None, export_symbols=None,debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,target_lang=None):# XXX we need to either disallow these attrs on Library instances,# or warn/abort here if set, or something...# libraries=None, library_dirs=None, runtime_library_dirs=None,# export_symbols=None, extra_preargs=None, extra_postargs=None,# build_temp=Noneassert output_dir is None # distutils build_ext doesn't pass thisoutput_dir, filename = os.path.split(output_libname)basename, ext = os.path.splitext(filename)if self.library_filename("x").startswith('lib'):# strip 'lib' prefix; this is kludgy if some platform uses# a different prefixbasename = basename[3:]self.create_static_lib(objects, basename, output_dir, debug, target_lang)def _get_config_var_837(name):"""In https://github.com/pypa/setuptools/pull/837, we discoveredPython 3.3.0 exposes the extension suffix under the name 'SO'."""if sys.version_info < (3, 3, 1):name = 'SO'return get_config_var(name)
import distutils.command.build_clib as origfrom distutils.errors import DistutilsSetupErrorfrom distutils import logfrom setuptools.dep_util import newer_pairwise_groupclass build_clib(orig.build_clib):"""Override the default build_clib behaviour to do the following:1. Implement a rudimentary timestamp-based dependency systemso 'compile()' doesn't run every time.2. Add more keys to the 'build_info' dictionary:* obj_deps - specify dependencies for each object compiled.this should be a dictionary mapping a keywith the source filename to a list ofdependencies. Use an empty string for globaldependencies.* cflags - specify a list of additional flags to pass tothe compiler."""def build_libraries(self, libraries):for (lib_name, build_info) in libraries:sources = build_info.get('sources')if sources is None or not isinstance(sources, (list, tuple)):raise DistutilsSetupError("in 'libraries' option (library '%s'), ""'sources' must be present and must be ""a list of source filenames" % lib_name)sources = list(sources)log.info("building '%s' library", lib_name)# Make sure everything is the correct type.# obj_deps should be a dictionary of keys as sources# and a list/tuple of files that are its dependencies.obj_deps = build_info.get('obj_deps', dict())if not isinstance(obj_deps, dict):raise DistutilsSetupError("in 'libraries' option (library '%s'), ""'obj_deps' must be a dictionary of ""type 'source: list'" % lib_name)dependencies = []# Get the global dependencies that are specified by the '' key.# These will go into every source's dependency list.global_deps = obj_deps.get('', list())if not isinstance(global_deps, (list, tuple)):raise DistutilsSetupError("in 'libraries' option (library '%s'), ""'obj_deps' must be a dictionary of ""type 'source: list'" % lib_name)# Build the list to be used by newer_pairwise_group# each source will be auto-added to its dependencies.for source in sources:src_deps = [source]src_deps.extend(global_deps)extra_deps = obj_deps.get(source, list())if not isinstance(extra_deps, (list, tuple)):raise DistutilsSetupError("in 'libraries' option (library '%s'), ""'obj_deps' must be a dictionary of ""type 'source: list'" % lib_name)src_deps.extend(extra_deps)dependencies.append(src_deps)expected_objects = self.compiler.object_filenames(sources,output_dir=self.build_temp)if newer_pairwise_group(dependencies, expected_objects) != ([], []):# First, compile the source code to object files in the library# directory. (This should probably change to putting object# files in a temporary build directory.)macros = build_info.get('macros')include_dirs = build_info.get('include_dirs')cflags = build_info.get('cflags')objects = self.compiler.compile(sources,output_dir=self.build_temp,macros=macros,include_dirs=include_dirs,extra_postargs=cflags,debug=self.debug)# Now "link" the object files together into a static library.# (On Unix at least, this isn't really linking -- it just# builds an archive. Whatever.)self.compiler.create_static_lib(expected_objects,lib_name,output_dir=self.build_clib,debug=self.debug)
import distutils.command.bdist_wininst as origclass bdist_wininst(orig.bdist_wininst):def reinitialize_command(self, command, reinit_subcommands=0):"""Supplement reinitialize_command to work aroundhttp://bugs.python.org/issue20819"""cmd = self.distribution.reinitialize_command(command, reinit_subcommands)if command in ('install', 'install_lib'):cmd.install_lib = Nonereturn cmddef run(self):self._is_running = Truetry:orig.bdist_wininst.run(self)finally:self._is_running = False
import distutils.command.bdist_rpm as origclass bdist_rpm(orig.bdist_rpm):"""Override the default bdist_rpm behavior to do the following:1. Run egg_info to ensure the name and version are properly calculated.2. Always run 'install' using --single-version-externally-managed todisable eggs in RPM distributions.3. Replace dash with underscore in the version numbers for better RPMcompatibility."""def run(self):# ensure distro name is up-to-dateself.run_command('egg_info')orig.bdist_rpm.run(self)def _make_spec_file(self):version = self.distribution.get_version()rpmversion = version.replace('-', '_')spec = orig.bdist_rpm._make_spec_file(self)line23 = '%define version ' + versionline24 = '%define version ' + rpmversionspec = [line.replace("Source0: %{name}-%{version}.tar","Source0: %{name}-%{unmangled_version}.tar").replace("setup.py install ","setup.py install --single-version-externally-managed ").replace("%setup","%setup -n %{name}-%{unmangled_version}").replace(line23, line24)for line in spec]insert_loc = spec.index(line24) + 1unmangled_version = "%define unmangled_version " + versionspec.insert(insert_loc, unmangled_version)return spec
"""setuptools.command.bdist_eggBuild .egg distributions"""from distutils.errors import DistutilsSetupErrorfrom distutils.dir_util import remove_tree, mkpathfrom distutils import logfrom types import CodeTypeimport sysimport osimport reimport textwrapimport marshalfrom setuptools.extern import sixfrom pkg_resources import get_build_platform, Distribution, ensure_directoryfrom pkg_resources import EntryPointfrom setuptools.extension import Libraryfrom setuptools import Commandtry:# Python 2.7 or >=3.2from sysconfig import get_path, get_python_versiondef _get_purelib():return get_path("purelib")except ImportError:from distutils.sysconfig import get_python_lib, get_python_versiondef _get_purelib():return get_python_lib(False)def strip_module(filename):if '.' in filename:filename = os.path.splitext(filename)[0]if filename.endswith('module'):filename = filename[:-6]return filenamedef sorted_walk(dir):"""Do os.walk in a reproducible way,independent of indeterministic filesystem readdir order"""for base, dirs, files in os.walk(dir):dirs.sort()files.sort()yield base, dirs, filesdef write_stub(resource, pyfile):_stub_template = textwrap.dedent("""def __bootstrap__():global __bootstrap__, __loader__, __file__import sys, pkg_resources, imp__file__ = pkg_resources.resource_filename(__name__, %r)__loader__ = None; del __bootstrap__, __loader__imp.load_dynamic(__name__,__file__)__bootstrap__()""").lstrip()with open(pyfile, 'w') as f:f.write(_stub_template % resource)class bdist_egg(Command):description = "create an \"egg\" distribution"user_options = [('bdist-dir=', 'b',"temporary directory for creating the distribution"),('plat-name=', 'p', "platform name to embed in generated filenames ""(default: %s)" % get_build_platform()),('exclude-source-files', None,"remove all .py files from the generated egg"),('keep-temp', 'k',"keep the pseudo-installation tree around after " +"creating the distribution archive"),('dist-dir=', 'd',"directory to put final built distributions in"),('skip-build', None,"skip rebuilding everything (for testing/debugging)"),]boolean_options = ['keep-temp', 'skip-build', 'exclude-source-files']def initialize_options(self):self.bdist_dir = Noneself.plat_name = Noneself.keep_temp = 0self.dist_dir = Noneself.skip_build = 0self.egg_output = Noneself.exclude_source_files = Nonedef finalize_options(self):ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")self.egg_info = ei_cmd.egg_infoif self.bdist_dir is None:bdist_base = self.get_finalized_command('bdist').bdist_baseself.bdist_dir = os.path.join(bdist_base, 'egg')if self.plat_name is None:self.plat_name = get_build_platform()self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))if self.egg_output is None:# Compute filename of the output eggbasename = Distribution(None, None, ei_cmd.egg_name, ei_cmd.egg_version,get_python_version(),self.distribution.has_ext_modules() and self.plat_name).egg_name()self.egg_output = os.path.join(self.dist_dir, basename + '.egg')def do_install_data(self):# Hack for packages that install data to install's --install-libself.get_finalized_command('install').install_lib = self.bdist_dirsite_packages = os.path.normcase(os.path.realpath(_get_purelib()))old, self.distribution.data_files = self.distribution.data_files, []for item in old:if isinstance(item, tuple) and len(item) == 2:if os.path.isabs(item[0]):realpath = os.path.realpath(item[0])normalized = os.path.normcase(realpath)if normalized == site_packages or normalized.startswith(site_packages + os.sep):item = realpath[len(site_packages) + 1:], item[1]# XXX else: raise ???self.distribution.data_files.append(item)try:log.info("installing package data to %s", self.bdist_dir)self.call_command('install_data', force=0, root=None)finally:self.distribution.data_files = olddef get_outputs(self):return [self.egg_output]def call_command(self, cmdname, **kw):"""Invoke reinitialized command `cmdname` with keyword args"""for dirname in INSTALL_DIRECTORY_ATTRS:kw.setdefault(dirname, self.bdist_dir)kw.setdefault('skip_build', self.skip_build)kw.setdefault('dry_run', self.dry_run)cmd = self.reinitialize_command(cmdname, **kw)self.run_command(cmdname)return cmddef run(self):# Generate metadata firstself.run_command("egg_info")# We run install_lib before install_data, because some data hacks# pull their data path from the install_lib command.log.info("installing library code to %s", self.bdist_dir)instcmd = self.get_finalized_command('install')old_root = instcmd.rootinstcmd.root = Noneif self.distribution.has_c_libraries() and not self.skip_build:self.run_command('build_clib')cmd = self.call_command('install_lib', warn_dir=0)instcmd.root = old_rootall_outputs, ext_outputs = self.get_ext_outputs()self.stubs = []to_compile = []for (p, ext_name) in enumerate(ext_outputs):filename, ext = os.path.splitext(ext_name)pyfile = os.path.join(self.bdist_dir, strip_module(filename) +'.py')self.stubs.append(pyfile)log.info("creating stub loader for %s", ext_name)if not self.dry_run:write_stub(os.path.basename(ext_name), pyfile)to_compile.append(pyfile)ext_outputs[p] = ext_name.replace(os.sep, '/')if to_compile:cmd.byte_compile(to_compile)if self.distribution.data_files:self.do_install_data()# Make the EGG-INFO directoryarchive_root = self.bdist_diregg_info = os.path.join(archive_root, 'EGG-INFO')self.mkpath(egg_info)if self.distribution.scripts:script_dir = os.path.join(egg_info, 'scripts')log.info("installing scripts to %s", script_dir)self.call_command('install_scripts', install_dir=script_dir,no_ep=1)self.copy_metadata_to(egg_info)native_libs = os.path.join(egg_info, "native_libs.txt")if all_outputs:log.info("writing %s", native_libs)if not self.dry_run:ensure_directory(native_libs)libs_file = open(native_libs, 'wt')libs_file.write('\n'.join(all_outputs))libs_file.write('\n')libs_file.close()elif os.path.isfile(native_libs):log.info("removing %s", native_libs)if not self.dry_run:os.unlink(native_libs)write_safety_flag(os.path.join(archive_root, 'EGG-INFO'), self.zip_safe())if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):log.warn("WARNING: 'depends.txt' will not be used by setuptools 0.6!\n""Use the install_requires/extras_require setup() args instead.")if self.exclude_source_files:self.zap_pyfiles()# Make the archivemake_zipfile(self.egg_output, archive_root, verbose=self.verbose,dry_run=self.dry_run, mode=self.gen_header())if not self.keep_temp:remove_tree(self.bdist_dir, dry_run=self.dry_run)# Add to 'Distribution.dist_files' so that the "upload" command worksgetattr(self.distribution, 'dist_files', []).append(('bdist_egg', get_python_version(), self.egg_output))def zap_pyfiles(self):log.info("Removing .py files from temporary directory")for base, dirs, files in walk_egg(self.bdist_dir):for name in files:path = os.path.join(base, name)if name.endswith('.py'):log.debug("Deleting %s", path)os.unlink(path)if base.endswith('__pycache__'):path_old = pathpattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc'm = re.match(pattern, name)path_new = os.path.join(base, os.pardir, m.group('name') + '.pyc')log.info("Renaming file from [%s] to [%s]" % (path_old, path_new))try:os.remove(path_new)except OSError:passos.rename(path_old, path_new)def zip_safe(self):safe = getattr(self.distribution, 'zip_safe', None)if safe is not None:return safelog.warn("zip_safe flag not set; analyzing archive contents...")return analyze_egg(self.bdist_dir, self.stubs)def gen_header(self):epm = EntryPoint.parse_map(self.distribution.entry_points or '')ep = epm.get('setuptools.installation', {}).get('eggsecutable')if ep is None:return 'w' # not an eggsecutable, do it the usual way.if not ep.attrs or ep.extras:raise DistutilsSetupError("eggsecutable entry point (%r) cannot have 'extras' ""or refer to a module" % (ep,))pyver = sys.version[:3]pkg = ep.module_namefull = '.'.join(ep.attrs)base = ep.attrs[0]basename = os.path.basename(self.egg_output)header = ("#!/bin/sh\n"'if [ `basename $0` = "%(basename)s" ]\n''then exec python%(pyver)s -c "'"import sys, os; sys.path.insert(0, os.path.abspath('$0')); ""from %(pkg)s import %(base)s; sys.exit(%(full)s())"'" "$@"\n''else\n'' echo $0 is not the correct name for this egg file.\n'' echo Please rename it back to %(basename)s and try again.\n'' exec false\n''fi\n') % locals()if not self.dry_run:mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)f = open(self.egg_output, 'w')f.write(header)f.close()return 'a'def copy_metadata_to(self, target_dir):"Copy metadata (egg info) to the target_dir"# normalize the path (so that a forward-slash in egg_info will# match using startswith below)norm_egg_info = os.path.normpath(self.egg_info)prefix = os.path.join(norm_egg_info, '')for path in self.ei_cmd.filelist.files:if path.startswith(prefix):target = os.path.join(target_dir, path[len(prefix):])ensure_directory(target)self.copy_file(path, target)def get_ext_outputs(self):"""Get a list of relative paths to C extensions in the output distro"""all_outputs = []ext_outputs = []paths = {self.bdist_dir: ''}for base, dirs, files in sorted_walk(self.bdist_dir):for filename in files:if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:all_outputs.append(paths[base] + filename)for filename in dirs:paths[os.path.join(base, filename)] = (paths[base] +filename + '/')if self.distribution.has_ext_modules():build_cmd = self.get_finalized_command('build_ext')for ext in build_cmd.extensions:if isinstance(ext, Library):continuefullname = build_cmd.get_ext_fullname(ext.name)filename = build_cmd.get_ext_filename(fullname)if not os.path.basename(filename).startswith('dl-'):if os.path.exists(os.path.join(self.bdist_dir, filename)):ext_outputs.append(filename)return all_outputs, ext_outputsNATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())def walk_egg(egg_dir):"""Walk an unpacked egg's contents, skipping the metadata directory"""walker = sorted_walk(egg_dir)base, dirs, files = next(walker)if 'EGG-INFO' in dirs:dirs.remove('EGG-INFO')yield base, dirs, filesfor bdf in walker:yield bdfdef analyze_egg(egg_dir, stubs):# check for existing flag in EGG-INFOfor flag, fn in safety_flags.items():if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):return flagif not can_scan():return Falsesafe = Truefor base, dirs, files in walk_egg(egg_dir):for name in files:if name.endswith('.py') or name.endswith('.pyw'):continueelif name.endswith('.pyc') or name.endswith('.pyo'):# always scan, even if we already know we're not safesafe = scan_module(egg_dir, base, name, stubs) and safereturn safedef write_safety_flag(egg_dir, safe):# Write or remove zip safety flag file(s)for flag, fn in safety_flags.items():fn = os.path.join(egg_dir, fn)if os.path.exists(fn):if safe is None or bool(safe) != flag:os.unlink(fn)elif safe is not None and bool(safe) == flag:f = open(fn, 'wt')f.write('\n')f.close()safety_flags = {True: 'zip-safe',False: 'not-zip-safe',}def scan_module(egg_dir, base, name, stubs):"""Check whether module possibly uses unsafe-for-zipfile stuff"""filename = os.path.join(base, name)if filename[:-1] in stubs:return True # Extension modulepkg = base[len(egg_dir) + 1:].replace(os.sep, '.')module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]if sys.version_info < (3, 3):skip = 8 # skip magic & dateelse:skip = 12 # skip magic & date & file sizef = open(filename, 'rb')f.read(skip)code = marshal.load(f)f.close()safe = Truesymbols = dict.fromkeys(iter_symbols(code))for bad in ['__file__', '__path__']:if bad in symbols:log.warn("%s: module references %s", module, bad)safe = Falseif 'inspect' in symbols:for bad in ['getsource', 'getabsfile', 'getsourcefile', 'getfile''getsourcelines', 'findsource', 'getcomments', 'getframeinfo','getinnerframes', 'getouterframes', 'stack', 'trace']:if bad in symbols:log.warn("%s: module MAY be using inspect.%s", module, bad)safe = Falsereturn safedef iter_symbols(code):"""Yield names and strings used by `code` and its nested code objects"""for name in code.co_names:yield namefor const in code.co_consts:if isinstance(const, six.string_types):yield constelif isinstance(const, CodeType):for name in iter_symbols(const):yield namedef can_scan():if not sys.platform.startswith('java') and sys.platform != 'cli':# CPython, PyPy, etc.return Truelog.warn("Unable to analyze compiled code on this platform.")log.warn("Please ask the author to include a 'zip_safe'"" setting (either True or False) in the package's setup.py")# Attribute names of options for commands that might need to be convinced to# install to the egg build directoryINSTALL_DIRECTORY_ATTRS = ['install_lib', 'install_dir', 'install_data', 'install_base']def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True,mode='w'):"""Create a zip file from all the files under 'base_dir'. The outputzip file will be named 'base_dir' + ".zip". Uses either the "zipfile"Python module (if available) or the InfoZIP "zip" utility (if installedand found on the default search path). If neither tool is available,raises DistutilsExecError. Returns the name of the output zip file."""import zipfilemkpath(os.path.dirname(zip_filename), dry_run=dry_run)log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)def visit(z, dirname, names):for name in names:path = os.path.normpath(os.path.join(dirname, name))if os.path.isfile(path):p = path[len(base_dir) + 1:]if not dry_run:z.write(path, p)log.debug("adding '%s'", p)compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STOREDif not dry_run:z = zipfile.ZipFile(zip_filename, mode, compression=compression)for dirname, dirs, files in sorted_walk(base_dir):visit(z, dirname, files)z.close()else:for dirname, dirs, files in sorted_walk(base_dir):visit(None, dirname, files)return zip_filename
from distutils.errors import DistutilsOptionErrorfrom setuptools.extern.six.moves import mapfrom setuptools.command.setopt import edit_config, option_base, config_filedef shquote(arg):"""Quote an argument for later parsing by shlex.split()"""for c in '"', "'", "\\", "#":if c in arg:return repr(arg)if arg.split() != [arg]:return repr(arg)return argclass alias(option_base):"""Define a shortcut that invokes one or more commands"""description = "define a shortcut to invoke one or more commands"command_consumes_arguments = Trueuser_options = [('remove', 'r', 'remove (unset) the alias'),] + option_base.user_optionsboolean_options = option_base.boolean_options + ['remove']def initialize_options(self):option_base.initialize_options(self)self.args = Noneself.remove = Nonedef finalize_options(self):option_base.finalize_options(self)if self.remove and len(self.args) != 1:raise DistutilsOptionError("Must specify exactly one argument (the alias name) when ""using --remove")def run(self):aliases = self.distribution.get_option_dict('aliases')if not self.args:print("Command Aliases")print("---------------")for alias in aliases:print("setup.py alias", format_alias(alias, aliases))returnelif len(self.args) == 1:alias, = self.argsif self.remove:command = Noneelif alias in aliases:print("setup.py alias", format_alias(alias, aliases))returnelse:print("No alias definition found for %r" % alias)returnelse:alias = self.args[0]command = ' '.join(map(shquote, self.args[1:]))edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)def format_alias(name, aliases):source, command = aliases[name]if source == config_file('global'):source = '--global-config 'elif source == config_file('user'):source = '--user-config 'elif source == config_file('local'):source = ''else:source = '--filename=%r' % sourcereturn source + name + ' ' + command
__all__ = ['alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop','easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts','sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts','register', 'bdist_wininst', 'upload_docs', 'upload', 'build_clib','dist_info',]from distutils.command.bdist import bdistimport sysfrom setuptools.command import install_scriptsif 'egg' not in bdist.format_commands:bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")bdist.format_commands.append('egg')del bdist, sys
"""A PEP 517 interface to setuptoolsPreviously, when a user or a command line tool (let's call it a "frontend")needed to make a request of setuptools to take a certain action, forexample, generating a list of installation requirements, the frontend wouldwould call "setup.py egg_info" or "setup.py bdist_wheel" on the command line.PEP 517 defines a different method of interfacing with setuptools. Ratherthan calling "setup.py" directly, the frontend should:1. Set the current directory to the directory with a setup.py file2. Import this module into a safe python interpreter (one in whichsetuptools can potentially set global variables or crash hard).3. Call one of the functions defined in PEP 517.What each function does is defined in PEP 517. However, here is a "casual"definition of the functions (this definition should not be relied on forbug reports or API stability):- `build_wheel`: build a wheel in the folder and return the basename- `get_requires_for_build_wheel`: get the `setup_requires` to build- `prepare_metadata_for_build_wheel`: get the `install_requires`- `build_sdist`: build an sdist in the folder and return the basename- `get_requires_for_build_sdist`: get the `setup_requires` to buildAgain, this is not a formal definition! Just a "taste" of the module."""import osimport sysimport tokenizeimport shutilimport contextlibimport setuptoolsimport distutilsclass SetupRequirementsError(BaseException):def __init__(self, specifiers):self.specifiers = specifiersclass Distribution(setuptools.dist.Distribution):def fetch_build_eggs(self, specifiers):raise SetupRequirementsError(specifiers)@classmethod@contextlib.contextmanagerdef patch(cls):"""Replacedistutils.dist.Distribution with this classfor the duration of this context."""orig = distutils.core.Distributiondistutils.core.Distribution = clstry:yieldfinally:distutils.core.Distribution = origdef _run_setup(setup_script='setup.py'):# Note that we can reuse our build directory between calls# Correctness comes first, then optimization later__file__ = setup_script__name__ = '__main__'f = getattr(tokenize, 'open', open)(__file__)code = f.read().replace('\\r\\n', '\\n')f.close()exec(compile(code, __file__, 'exec'), locals())def _fix_config(config_settings):config_settings = config_settings or {}config_settings.setdefault('--global-option', [])return config_settingsdef _get_build_requires(config_settings):config_settings = _fix_config(config_settings)requirements = ['setuptools', 'wheel']sys.argv = sys.argv[:1] + ['egg_info'] + \config_settings["--global-option"]try:with Distribution.patch():_run_setup()except SetupRequirementsError as e:requirements += e.specifiersreturn requirementsdef _get_immediate_subdirectories(a_dir):return [name for name in os.listdir(a_dir)if os.path.isdir(os.path.join(a_dir, name))]def get_requires_for_build_wheel(config_settings=None):config_settings = _fix_config(config_settings)return _get_build_requires(config_settings)def get_requires_for_build_sdist(config_settings=None):config_settings = _fix_config(config_settings)return _get_build_requires(config_settings)def prepare_metadata_for_build_wheel(metadata_directory, config_settings=None):sys.argv = sys.argv[:1] + ['dist_info', '--egg-base', metadata_directory]_run_setup()dist_info_directory = metadata_directorywhile True:dist_infos = [f for f in os.listdir(dist_info_directory)if f.endswith('.dist-info')]if len(dist_infos) == 0 and \len(_get_immediate_subdirectories(dist_info_directory)) == 1:dist_info_directory = os.path.join(dist_info_directory, os.listdir(dist_info_directory)[0])continueassert len(dist_infos) == 1break# PEP 517 requires that the .dist-info directory be placed in the# metadata_directory. To comply, we MUST copy the directory to the rootif dist_info_directory != metadata_directory:shutil.move(os.path.join(dist_info_directory, dist_infos[0]),metadata_directory)shutil.rmtree(dist_info_directory, ignore_errors=True)return dist_infos[0]def build_wheel(wheel_directory, config_settings=None,metadata_directory=None):config_settings = _fix_config(config_settings)wheel_directory = os.path.abspath(wheel_directory)sys.argv = sys.argv[:1] + ['bdist_wheel'] + \config_settings["--global-option"]_run_setup()if wheel_directory != 'dist':shutil.rmtree(wheel_directory)shutil.copytree('dist', wheel_directory)wheels = [f for f in os.listdir(wheel_directory)if f.endswith('.whl')]assert len(wheels) == 1return wheels[0]def build_sdist(sdist_directory, config_settings=None):config_settings = _fix_config(config_settings)sdist_directory = os.path.abspath(sdist_directory)sys.argv = sys.argv[:1] + ['sdist'] + \config_settings["--global-option"]_run_setup()if sdist_directory != 'dist':shutil.rmtree(sdist_directory)shutil.copytree('dist', sdist_directory)sdists = [f for f in os.listdir(sdist_directory)if f.endswith('.tar.gz')]assert len(sdists) == 1return sdists[0]
"""Utilities for extracting common archive formats"""import zipfileimport tarfileimport osimport shutilimport posixpathimport contextlibfrom distutils.errors import DistutilsErrorfrom pkg_resources import ensure_directory__all__ = ["unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter","UnrecognizedFormat", "extraction_drivers", "unpack_directory",]class UnrecognizedFormat(DistutilsError):"""Couldn't recognize the archive type"""def default_filter(src, dst):"""The default progress/filter callback; returns True for all files"""return dstdef unpack_archive(filename, extract_dir, progress_filter=default_filter,drivers=None):"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat```progress_filter` is a function taking two arguments: a source pathinternal to the archive ('/'-separated), and a filesystem path where itwill be extracted. The callback must return the desired extract path(which may be the same as the one passed in), or else ``None`` to skipthat file or directory. The callback can thus be used to report on theprogress of the extraction, as well as to filter the items extracted oralter their extraction paths.`drivers`, if supplied, must be a non-empty sequence of functions with thesame signature as this function (minus the `drivers` argument), that raise``UnrecognizedFormat`` if they do not support extracting the designatedarchive type. The `drivers` are tried in sequence until one is found thatdoes not raise an error, or until all are exhausted (in which case``UnrecognizedFormat`` is raised). If you do not supply a sequence ofdrivers, the module's ``extraction_drivers`` constant will be used, whichmeans that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in thatorder."""for driver in drivers or extraction_drivers:try:driver(filename, extract_dir, progress_filter)except UnrecognizedFormat:continueelse:returnelse:raise UnrecognizedFormat("Not a recognized archive type: %s" % filename)def unpack_directory(filename, extract_dir, progress_filter=default_filter):""""Unpack" a directory, using the same interface as for archivesRaises ``UnrecognizedFormat`` if `filename` is not a directory"""if not os.path.isdir(filename):raise UnrecognizedFormat("%s is not a directory" % filename)paths = {filename: ('', extract_dir),}for base, dirs, files in os.walk(filename):src, dst = paths[base]for d in dirs:paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)for f in files:target = os.path.join(dst, f)target = progress_filter(src + f, target)if not target:# skip non-filescontinueensure_directory(target)f = os.path.join(base, f)shutil.copyfile(f, target)shutil.copystat(f, target)def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):"""Unpack zip `filename` to `extract_dir`Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determinedby ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanationof the `progress_filter` argument."""if not zipfile.is_zipfile(filename):raise UnrecognizedFormat("%s is not a zip file" % (filename,))with zipfile.ZipFile(filename) as z:for info in z.infolist():name = info.filename# don't extract absolute paths or ones with .. in themif name.startswith('/') or '..' in name.split('/'):continuetarget = os.path.join(extract_dir, *name.split('/'))target = progress_filter(name, target)if not target:continueif name.endswith('/'):# directoryensure_directory(target)else:# fileensure_directory(target)data = z.read(info.filename)with open(target, 'wb') as f:f.write(data)unix_attributes = info.external_attr >> 16if unix_attributes:os.chmod(target, unix_attributes)def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determinedby ``tarfile.open()``). See ``unpack_archive()`` for an explanationof the `progress_filter` argument."""try:tarobj = tarfile.open(filename)except tarfile.TarError:raise UnrecognizedFormat("%s is not a compressed or uncompressed tar file" % (filename,))with contextlib.closing(tarobj):# don't do any chowning!tarobj.chown = lambda *args: Nonefor member in tarobj:name = member.name# don't extract absolute paths or ones with .. in themif not name.startswith('/') and '..' not in name.split('/'):prelim_dst = os.path.join(extract_dir, *name.split('/'))# resolve any links and to extract the link targets as normal# fileswhile member is not None and (member.islnk() or member.issym()):linkpath = member.linknameif member.issym():base = posixpath.dirname(member.name)linkpath = posixpath.join(base, linkpath)linkpath = posixpath.normpath(linkpath)member = tarobj._getmember(linkpath)if member is not None and (member.isfile() or member.isdir()):final_dst = progress_filter(name, prelim_dst)if final_dst:if final_dst.endswith(os.sep):final_dst = final_dst[:-1]try:# XXX Ughtarobj._extract_member(member, final_dst)except tarfile.ExtractError:# chown/chmod/mkfifo/mknode/makedev failedpassreturn Trueextraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
"""Extensions to the 'distutils' for large or complex distributions"""import osimport functoolsimport distutils.coreimport distutils.filelistfrom distutils.util import convert_pathfrom fnmatch import fnmatchcasefrom setuptools.extern.six.moves import filter, mapimport setuptools.versionfrom setuptools.extension import Extensionfrom setuptools.dist import Distribution, Featurefrom setuptools.depends import Requirefrom . import monkey__all__ = ['setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require','find_packages',]__version__ = setuptools.version.__version__bootstrap_install_from = None# If we run 2to3 on .py files, should we also convert docstrings?# Default: yes; assume that we can detect doctests reliablyrun_2to3_on_doctests = True# Standard package names for fixer packageslib2to3_fixer_packages = ['lib2to3.fixes']class PackageFinder(object):"""Generate a list of all Python packages found within a directory"""@classmethoddef find(cls, where='.', exclude=(), include=('*',)):"""Return a list all Python packages found within directory 'where''where' is the root directory which will be searched for packages. Itshould be supplied as a "cross-platform" (i.e. URL-style) path; it willbe converted to the appropriate local path syntax.'exclude' is a sequence of package names to exclude; '*' can be usedas a wildcard in the names, such that 'foo.*' will exclude allsubpackages of 'foo' (but not 'foo' itself).'include' is a sequence of package names to include. If it'sspecified, only the named packages will be included. If it's notspecified, all found packages will be included. 'include' can containshell style wildcard patterns just like 'exclude'."""return list(cls._find_packages_iter(convert_path(where),cls._build_filter('ez_setup', '*__pycache__', *exclude),cls._build_filter(*include)))@classmethoddef _find_packages_iter(cls, where, exclude, include):"""All the packages found in 'where' that pass the 'include' filter, butnot the 'exclude' filter."""for root, dirs, files in os.walk(where, followlinks=True):# Copy dirs to iterate over it, then empty dirs.all_dirs = dirs[:]dirs[:] = []for dir in all_dirs:full_path = os.path.join(root, dir)rel_path = os.path.relpath(full_path, where)package = rel_path.replace(os.path.sep, '.')# Skip directory trees that are not valid packagesif ('.' in dir or not cls._looks_like_package(full_path)):continue# Should this package be included?if include(package) and not exclude(package):yield package# Keep searching subdirectories, as there may be more packages# down there, even if the parent was excluded.dirs.append(dir)@staticmethoddef _looks_like_package(path):"""Does a directory look like a package?"""return os.path.isfile(os.path.join(path, '__init__.py'))@staticmethoddef _build_filter(*patterns):"""Given a list of patterns, return a callable that will be true only ifthe input matches at least one of the patterns."""return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)class PEP420PackageFinder(PackageFinder):@staticmethoddef _looks_like_package(path):return Truefind_packages = PackageFinder.finddef _install_setup_requires(attrs):# Note: do not use `setuptools.Distribution` directly, as# our PEP 517 backend patch `distutils.core.Distribution`.dist = distutils.core.Distribution(dict((k, v) for k, v in attrs.items()if k in ('dependency_links', 'setup_requires')))# Honor setup.cfg's options.dist.parse_config_files(ignore_option_errors=True)if dist.setup_requires:dist.fetch_build_eggs(dist.setup_requires)def setup(**attrs):# Make sure we have any requirements needed to interpret 'attrs'._install_setup_requires(attrs)return distutils.core.setup(**attrs)setup.__doc__ = distutils.core.setup.__doc___Command = monkey.get_unpatched(distutils.core.Command)class Command(_Command):__doc__ = _Command.__doc__command_consumes_arguments = Falsedef __init__(self, dist, **kw):"""Construct the command for dist, updatingvars(self) with any keyword parameters."""_Command.__init__(self, dist)vars(self).update(kw)def reinitialize_command(self, command, reinit_subcommands=0, **kw):cmd = _Command.reinitialize_command(self, command, reinit_subcommands)vars(cmd).update(kw)return cmddef _find_all_simple(path):"""Find all files under 'path'"""results = (os.path.join(base, file)for base, dirs, files in os.walk(path, followlinks=True)for file in files)return filter(os.path.isfile, results)def findall(dir=os.curdir):"""Find all files under 'dir' and return the list of full filenames.Unless dir is '.', return full filenames with dir prepended."""files = _find_all_simple(dir)if dir == os.curdir:make_rel = functools.partial(os.path.relpath, start=dir)files = map(make_rel, files)return list(files)monkey.patch_all()
import osimport errnoimport sysdef _makedirs_31(path, exist_ok=False):try:os.makedirs(path)except OSError as exc:if not exist_ok or exc.errno != errno.EEXIST:raise# rely on compatibility behavior until mode considerations# and exists_ok considerations are disentangled.# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663needs_makedirs = (sys.version_info < (3, 2, 5) or(3, 3) <= sys.version_info < (3, 3, 6) or(3, 4) <= sys.version_info < (3, 4, 1))makedirs = _makedirs_31 if needs_makedirs else os.makedirs
import sysclass VendorImporter:"""A PEP 302 meta path importer for finding optionally-vendoredor otherwise naturally-installed packages from root_name."""def __init__(self, root_name, vendored_names=(), vendor_pkg=None):self.root_name = root_nameself.vendored_names = set(vendored_names)self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')@propertydef search_path(self):"""Search first the vendor package then as a natural package."""yield self.vendor_pkg + '.'yield ''def find_module(self, fullname, path=None):"""Return self when fullname starts with root_name and thetarget module is one vendored through this importer."""root, base, target = fullname.partition(self.root_name + '.')if root:returnif not any(map(target.startswith, self.vendored_names)):returnreturn selfdef load_module(self, fullname):"""Iterate over the search path to locate and load fullname."""root, base, target = fullname.partition(self.root_name + '.')for prefix in self.search_path:try:extant = prefix + target__import__(extant)mod = sys.modules[extant]sys.modules[fullname] = mod# mysterious hack:# Remove the reference to the extant package/module# on later Python versions to cause relative imports# in the vendor package to resolve the same modules# as those going through this importer.if sys.version_info > (3, 3):del sys.modules[extant]return modexcept ImportError:passelse:raise ImportError("The '{target}' package is required; ""normally this is bundled with this package so if you get ""this warning, consult the packager of your ""distribution.".format(**locals()))def install(self):"""Install this importer into sys.meta_path if not already present."""if self not in sys.meta_path:sys.meta_path.append(self)names = 'packaging', 'pyparsing', 'six', 'appdirs'VendorImporter(__name__, names).install()
"""Utilities for writing code that runs on Python 2 and 3"""# Copyright (c) 2010-2015 Benjamin Peterson## Permission is hereby granted, free of charge, to any person obtaining a copy# of this software and associated documentation files (the "Software"), to deal# in the Software without restriction, including without limitation the rights# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell# copies of the Software, and to permit persons to whom the Software is# furnished to do so, subject to the following conditions:## The above copyright notice and this permission notice shall be included in all# copies or substantial portions of the Software.## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE# SOFTWARE.from __future__ import absolute_importimport functoolsimport itertoolsimport operatorimport sysimport types__author__ = "Benjamin Peterson <benjamin@python.org>"__version__ = "1.10.0"# Useful for very coarse version differentiation.PY2 = sys.version_info[0] == 2PY3 = sys.version_info[0] == 3PY34 = sys.version_info[0:2] >= (3, 4)if PY3:string_types = str,integer_types = int,class_types = type,text_type = strbinary_type = bytesMAXSIZE = sys.maxsizeelse:string_types = basestring,integer_types = (int, long)class_types = (type, types.ClassType)text_type = unicodebinary_type = strif sys.platform.startswith("java"):# Jython always uses 32 bits.MAXSIZE = int((1 << 31) - 1)else:# It's possible to have sizeof(long) != sizeof(Py_ssize_t).class X(object):def __len__(self):return 1 << 31try:len(X())except OverflowError:# 32-bitMAXSIZE = int((1 << 31) - 1)else:# 64-bitMAXSIZE = int((1 << 63) - 1)del Xdef _add_doc(func, doc):"""Add documentation to a function."""func.__doc__ = docdef _import_module(name):"""Import module, returning the module after the last dot."""__import__(name)return sys.modules[name]class _LazyDescr(object):def __init__(self, name):self.name = namedef __get__(self, obj, tp):result = self._resolve()setattr(obj, self.name, result) # Invokes __set__.try:# This is a bit ugly, but it avoids running this again by# removing this descriptor.delattr(obj.__class__, self.name)except AttributeError:passreturn resultclass MovedModule(_LazyDescr):def __init__(self, name, old, new=None):super(MovedModule, self).__init__(name)if PY3:if new is None:new = nameself.mod = newelse:self.mod = olddef _resolve(self):return _import_module(self.mod)def __getattr__(self, attr):_module = self._resolve()value = getattr(_module, attr)setattr(self, attr, value)return valueclass _LazyModule(types.ModuleType):def __init__(self, name):super(_LazyModule, self).__init__(name)self.__doc__ = self.__class__.__doc__def __dir__(self):attrs = ["__doc__", "__name__"]attrs += [attr.name for attr in self._moved_attributes]return attrs# Subclasses should override this_moved_attributes = []class MovedAttribute(_LazyDescr):def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):super(MovedAttribute, self).__init__(name)if PY3:if new_mod is None:new_mod = nameself.mod = new_modif new_attr is None:if old_attr is None:new_attr = nameelse:new_attr = old_attrself.attr = new_attrelse:self.mod = old_modif old_attr is None:old_attr = nameself.attr = old_attrdef _resolve(self):module = _import_module(self.mod)return getattr(module, self.attr)class _SixMetaPathImporter(object):"""A meta path importer to import six.moves and its submodules.This class implements a PEP302 finder and loader. It should be compatiblewith Python 2.5 and all existing versions of Python3"""def __init__(self, six_module_name):self.name = six_module_nameself.known_modules = {}def _add_module(self, mod, *fullnames):for fullname in fullnames:self.known_modules[self.name + "." + fullname] = moddef _get_module(self, fullname):return self.known_modules[self.name + "." + fullname]def find_module(self, fullname, path=None):if fullname in self.known_modules:return selfreturn Nonedef __get_module(self, fullname):try:return self.known_modules[fullname]except KeyError:raise ImportError("This loader does not know module " + fullname)def load_module(self, fullname):try:# in case of a reloadreturn sys.modules[fullname]except KeyError:passmod = self.__get_module(fullname)if isinstance(mod, MovedModule):mod = mod._resolve()else:mod.__loader__ = selfsys.modules[fullname] = modreturn moddef is_package(self, fullname):"""Return true, if the named module is a package.We need this method to get correct spec objects withPython 3.4 (see PEP451)"""return hasattr(self.__get_module(fullname), "__path__")def get_code(self, fullname):"""Return NoneRequired, if is_package is implemented"""self.__get_module(fullname) # eventually raises ImportErrorreturn Noneget_source = get_code # same as get_code_importer = _SixMetaPathImporter(__name__)class _MovedItems(_LazyModule):"""Lazy loading of moved objects"""__path__ = [] # mark as package_moved_attributes = [MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),MovedAttribute("intern", "__builtin__", "sys"),MovedAttribute("map", "itertools", "builtins", "imap", "map"),MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),MovedAttribute("reduce", "__builtin__", "functools"),MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),MovedAttribute("StringIO", "StringIO", "io"),MovedAttribute("UserDict", "UserDict", "collections"),MovedAttribute("UserList", "UserList", "collections"),MovedAttribute("UserString", "UserString", "collections"),MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),MovedModule("builtins", "__builtin__"),MovedModule("configparser", "ConfigParser"),MovedModule("copyreg", "copy_reg"),MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),MovedModule("http_cookies", "Cookie", "http.cookies"),MovedModule("html_entities", "htmlentitydefs", "html.entities"),MovedModule("html_parser", "HTMLParser", "html.parser"),MovedModule("http_client", "httplib", "http.client"),MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),MovedModule("cPickle", "cPickle", "pickle"),MovedModule("queue", "Queue"),MovedModule("reprlib", "repr"),MovedModule("socketserver", "SocketServer"),MovedModule("_thread", "thread", "_thread"),MovedModule("tkinter", "Tkinter"),MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),MovedModule("tkinter_tix", "Tix", "tkinter.tix"),MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),MovedModule("tkinter_colorchooser", "tkColorChooser","tkinter.colorchooser"),MovedModule("tkinter_commondialog", "tkCommonDialog","tkinter.commondialog"),MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),MovedModule("tkinter_font", "tkFont", "tkinter.font"),MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),MovedModule("tkinter_tksimpledialog", "tkSimpleDialog","tkinter.simpledialog"),MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),]# Add windows specific modules.if sys.platform == "win32":_moved_attributes += [MovedModule("winreg", "_winreg"),]for attr in _moved_attributes:setattr(_MovedItems, attr.name, attr)if isinstance(attr, MovedModule):_importer._add_module(attr, "moves." + attr.name)del attr_MovedItems._moved_attributes = _moved_attributesmoves = _MovedItems(__name__ + ".moves")_importer._add_module(moves, "moves")class Module_six_moves_urllib_parse(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_parse"""_urllib_parse_moved_attributes = [MovedAttribute("ParseResult", "urlparse", "urllib.parse"),MovedAttribute("SplitResult", "urlparse", "urllib.parse"),MovedAttribute("parse_qs", "urlparse", "urllib.parse"),MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),MovedAttribute("urldefrag", "urlparse", "urllib.parse"),MovedAttribute("urljoin", "urlparse", "urllib.parse"),MovedAttribute("urlparse", "urlparse", "urllib.parse"),MovedAttribute("urlsplit", "urlparse", "urllib.parse"),MovedAttribute("urlunparse", "urlparse", "urllib.parse"),MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),MovedAttribute("quote", "urllib", "urllib.parse"),MovedAttribute("quote_plus", "urllib", "urllib.parse"),MovedAttribute("unquote", "urllib", "urllib.parse"),MovedAttribute("unquote_plus", "urllib", "urllib.parse"),MovedAttribute("urlencode", "urllib", "urllib.parse"),MovedAttribute("splitquery", "urllib", "urllib.parse"),MovedAttribute("splittag", "urllib", "urllib.parse"),MovedAttribute("splituser", "urllib", "urllib.parse"),MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),MovedAttribute("uses_params", "urlparse", "urllib.parse"),MovedAttribute("uses_query", "urlparse", "urllib.parse"),MovedAttribute("uses_relative", "urlparse", "urllib.parse"),]for attr in _urllib_parse_moved_attributes:setattr(Module_six_moves_urllib_parse, attr.name, attr)del attrModule_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),"moves.urllib_parse", "moves.urllib.parse")class Module_six_moves_urllib_error(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_error"""_urllib_error_moved_attributes = [MovedAttribute("URLError", "urllib2", "urllib.error"),MovedAttribute("HTTPError", "urllib2", "urllib.error"),MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),]for attr in _urllib_error_moved_attributes:setattr(Module_six_moves_urllib_error, attr.name, attr)del attrModule_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),"moves.urllib_error", "moves.urllib.error")class Module_six_moves_urllib_request(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_request"""_urllib_request_moved_attributes = [MovedAttribute("urlopen", "urllib2", "urllib.request"),MovedAttribute("install_opener", "urllib2", "urllib.request"),MovedAttribute("build_opener", "urllib2", "urllib.request"),MovedAttribute("pathname2url", "urllib", "urllib.request"),MovedAttribute("url2pathname", "urllib", "urllib.request"),MovedAttribute("getproxies", "urllib", "urllib.request"),MovedAttribute("Request", "urllib2", "urllib.request"),MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),MovedAttribute("BaseHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),MovedAttribute("FileHandler", "urllib2", "urllib.request"),MovedAttribute("FTPHandler", "urllib2", "urllib.request"),MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),MovedAttribute("urlretrieve", "urllib", "urllib.request"),MovedAttribute("urlcleanup", "urllib", "urllib.request"),MovedAttribute("URLopener", "urllib", "urllib.request"),MovedAttribute("FancyURLopener", "urllib", "urllib.request"),MovedAttribute("proxy_bypass", "urllib", "urllib.request"),]for attr in _urllib_request_moved_attributes:setattr(Module_six_moves_urllib_request, attr.name, attr)del attrModule_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),"moves.urllib_request", "moves.urllib.request")class Module_six_moves_urllib_response(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_response"""_urllib_response_moved_attributes = [MovedAttribute("addbase", "urllib", "urllib.response"),MovedAttribute("addclosehook", "urllib", "urllib.response"),MovedAttribute("addinfo", "urllib", "urllib.response"),MovedAttribute("addinfourl", "urllib", "urllib.response"),]for attr in _urllib_response_moved_attributes:setattr(Module_six_moves_urllib_response, attr.name, attr)del attrModule_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),"moves.urllib_response", "moves.urllib.response")class Module_six_moves_urllib_robotparser(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_robotparser"""_urllib_robotparser_moved_attributes = [MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),]for attr in _urllib_robotparser_moved_attributes:setattr(Module_six_moves_urllib_robotparser, attr.name, attr)del attrModule_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),"moves.urllib_robotparser", "moves.urllib.robotparser")class Module_six_moves_urllib(types.ModuleType):"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""__path__ = [] # mark as packageparse = _importer._get_module("moves.urllib_parse")error = _importer._get_module("moves.urllib_error")request = _importer._get_module("moves.urllib_request")response = _importer._get_module("moves.urllib_response")robotparser = _importer._get_module("moves.urllib_robotparser")def __dir__(self):return ['parse', 'error', 'request', 'response', 'robotparser']_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),"moves.urllib")def add_move(move):"""Add an item to six.moves."""setattr(_MovedItems, move.name, move)def remove_move(name):"""Remove item from six.moves."""try:delattr(_MovedItems, name)except AttributeError:try:del moves.__dict__[name]except KeyError:raise AttributeError("no such move, %r" % (name,))if PY3:_meth_func = "__func__"_meth_self = "__self__"_func_closure = "__closure__"_func_code = "__code__"_func_defaults = "__defaults__"_func_globals = "__globals__"else:_meth_func = "im_func"_meth_self = "im_self"_func_closure = "func_closure"_func_code = "func_code"_func_defaults = "func_defaults"_func_globals = "func_globals"try:advance_iterator = nextexcept NameError:def advance_iterator(it):return it.next()next = advance_iteratortry:callable = callableexcept NameError:def callable(obj):return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)if PY3:def get_unbound_function(unbound):return unboundcreate_bound_method = types.MethodTypedef create_unbound_method(func, cls):return funcIterator = objectelse:def get_unbound_function(unbound):return unbound.im_funcdef create_bound_method(func, obj):return types.MethodType(func, obj, obj.__class__)def create_unbound_method(func, cls):return types.MethodType(func, None, cls)class Iterator(object):def next(self):return type(self).__next__(self)callable = callable_add_doc(get_unbound_function,"""Get the function out of a possibly unbound function""")get_method_function = operator.attrgetter(_meth_func)get_method_self = operator.attrgetter(_meth_self)get_function_closure = operator.attrgetter(_func_closure)get_function_code = operator.attrgetter(_func_code)get_function_defaults = operator.attrgetter(_func_defaults)get_function_globals = operator.attrgetter(_func_globals)if PY3:def iterkeys(d, **kw):return iter(d.keys(**kw))def itervalues(d, **kw):return iter(d.values(**kw))def iteritems(d, **kw):return iter(d.items(**kw))def iterlists(d, **kw):return iter(d.lists(**kw))viewkeys = operator.methodcaller("keys")viewvalues = operator.methodcaller("values")viewitems = operator.methodcaller("items")else:def iterkeys(d, **kw):return d.iterkeys(**kw)def itervalues(d, **kw):return d.itervalues(**kw)def iteritems(d, **kw):return d.iteritems(**kw)def iterlists(d, **kw):return d.iterlists(**kw)viewkeys = operator.methodcaller("viewkeys")viewvalues = operator.methodcaller("viewvalues")viewitems = operator.methodcaller("viewitems")_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")_add_doc(itervalues, "Return an iterator over the values of a dictionary.")_add_doc(iteritems,"Return an iterator over the (key, value) pairs of a dictionary.")_add_doc(iterlists,"Return an iterator over the (key, [values]) pairs of a dictionary.")if PY3:def b(s):return s.encode("latin-1")def u(s):return sunichr = chrimport structint2byte = struct.Struct(">B").packdel structbyte2int = operator.itemgetter(0)indexbytes = operator.getitemiterbytes = iterimport ioStringIO = io.StringIOBytesIO = io.BytesIO_assertCountEqual = "assertCountEqual"if sys.version_info[1] <= 1:_assertRaisesRegex = "assertRaisesRegexp"_assertRegex = "assertRegexpMatches"else:_assertRaisesRegex = "assertRaisesRegex"_assertRegex = "assertRegex"else:def b(s):return s# Workaround for standalone backslashdef u(s):return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")unichr = unichrint2byte = chrdef byte2int(bs):return ord(bs[0])def indexbytes(buf, i):return ord(buf[i])iterbytes = functools.partial(itertools.imap, ord)import StringIOStringIO = BytesIO = StringIO.StringIO_assertCountEqual = "assertItemsEqual"_assertRaisesRegex = "assertRaisesRegexp"_assertRegex = "assertRegexpMatches"_add_doc(b, """Byte literal""")_add_doc(u, """Text literal""")def assertCountEqual(self, *args, **kwargs):return getattr(self, _assertCountEqual)(*args, **kwargs)def assertRaisesRegex(self, *args, **kwargs):return getattr(self, _assertRaisesRegex)(*args, **kwargs)def assertRegex(self, *args, **kwargs):return getattr(self, _assertRegex)(*args, **kwargs)if PY3:exec_ = getattr(moves.builtins, "exec")def reraise(tp, value, tb=None):if value is None:value = tp()if value.__traceback__ is not tb:raise value.with_traceback(tb)raise valueelse:def exec_(_code_, _globs_=None, _locs_=None):"""Execute code in a namespace."""if _globs_ is None:frame = sys._getframe(1)_globs_ = frame.f_globalsif _locs_ is None:_locs_ = frame.f_localsdel frameelif _locs_ is None:_locs_ = _globs_exec("""exec _code_ in _globs_, _locs_""")exec_("""def reraise(tp, value, tb=None):raise tp, value, tb""")if sys.version_info[:2] == (3, 2):exec_("""def raise_from(value, from_value):if from_value is None:raise valueraise value from from_value""")elif sys.version_info[:2] > (3, 2):exec_("""def raise_from(value, from_value):raise value from from_value""")else:def raise_from(value, from_value):raise valueprint_ = getattr(moves.builtins, "print", None)if print_ is None:def print_(*args, **kwargs):"""The new-style print function for Python 2.4 and 2.5."""fp = kwargs.pop("file", sys.stdout)if fp is None:returndef write(data):if not isinstance(data, basestring):data = str(data)# If the file has an encoding, encode unicode with it.if (isinstance(fp, file) andisinstance(data, unicode) andfp.encoding is not None):errors = getattr(fp, "errors", None)if errors is None:errors = "strict"data = data.encode(fp.encoding, errors)fp.write(data)want_unicode = Falsesep = kwargs.pop("sep", None)if sep is not None:if isinstance(sep, unicode):want_unicode = Trueelif not isinstance(sep, str):raise TypeError("sep must be None or a string")end = kwargs.pop("end", None)if end is not None:if isinstance(end, unicode):want_unicode = Trueelif not isinstance(end, str):raise TypeError("end must be None or a string")if kwargs:raise TypeError("invalid keyword arguments to print()")if not want_unicode:for arg in args:if isinstance(arg, unicode):want_unicode = Truebreakif want_unicode:newline = unicode("\n")space = unicode(" ")else:newline = "\n"space = " "if sep is None:sep = spaceif end is None:end = newlinefor i, arg in enumerate(args):if i:write(sep)write(arg)write(end)if sys.version_info[:2] < (3, 3):_print = print_def print_(*args, **kwargs):fp = kwargs.get("file", sys.stdout)flush = kwargs.pop("flush", False)_print(*args, **kwargs)if flush and fp is not None:fp.flush()_add_doc(reraise, """Reraise an exception.""")if sys.version_info[0:2] < (3, 4):def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,updated=functools.WRAPPER_UPDATES):def wrapper(f):f = functools.wraps(wrapped, assigned, updated)(f)f.__wrapped__ = wrappedreturn freturn wrapperelse:wraps = functools.wrapsdef with_metaclass(meta, *bases):"""Create a base class with a metaclass."""# This requires a bit of explanation: the basic idea is to make a dummy# metaclass for one level of class instantiation that replaces itself with# the actual metaclass.class metaclass(meta):def __new__(cls, name, this_bases, d):return meta(name, bases, d)return type.__new__(metaclass, 'temporary_class', (), {})def add_metaclass(metaclass):"""Class decorator for creating a class with a metaclass."""def wrapper(cls):orig_vars = cls.__dict__.copy()slots = orig_vars.get('__slots__')if slots is not None:if isinstance(slots, str):slots = [slots]for slots_var in slots:orig_vars.pop(slots_var)orig_vars.pop('__dict__', None)orig_vars.pop('__weakref__', None)return metaclass(cls.__name__, cls.__bases__, orig_vars)return wrapperdef python_2_unicode_compatible(klass):"""A decorator that defines __unicode__ and __str__ methods under Python 2.Under Python 3 it does nothing.To support Python 2 and 3 with a single code base, define a __str__ methodreturning text and apply this decorator to the class."""if PY2:if '__str__' not in klass.__dict__:raise ValueError("@python_2_unicode_compatible cannot be applied ""to %s because it doesn't define __str__()." %klass.__name__)klass.__unicode__ = klass.__str__klass.__str__ = lambda self: self.__unicode__().encode('utf-8')return klass# Complete the moves implementation.# This code is at the end of this module to speed up module loading.# Turn this module into a package.__path__ = [] # required for PEP 302 and PEP 451__package__ = __name__ # see PEP 366 @ReservedAssignmentif globals().get("__spec__") is not None:__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable# Remove other six meta path importers, since they cause problems. This can# happen if six is removed from sys.modules and then reloaded. (Setuptools does# this for some reason.)if sys.meta_path:for i, importer in enumerate(sys.meta_path):# Here's some real nastiness: Another "instance" of the six module might# be floating around. Therefore, we can't use isinstance() to check for# the six meta path importer, since the other six instance will have# inserted an importer with different class.if (type(importer).__name__ == "_SixMetaPathImporter" andimporter.name == __name__):del sys.meta_path[i]breakdel i, importer# Finally, add the importer to the meta path import hook.sys.meta_path.append(_importer)
# module pyparsing.py## Copyright (c) 2003-2016 Paul T. McGuire## Permission is hereby granted, free of charge, to any person obtaining# a copy of this software and associated documentation files (the# "Software"), to deal in the Software without restriction, including# without limitation the rights to use, copy, modify, merge, publish,# distribute, sublicense, and/or sell copies of the Software, and to# permit persons to whom the Software is furnished to do so, subject to# the following conditions:## The above copyright notice and this permission notice shall be# included in all copies or substantial portions of the Software.## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.#__doc__ = \"""pyparsing module - Classes and methods to define and execute parsing grammarsThe pyparsing module is an alternative approach to creating and executing simple grammars,vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, youdon't need to learn a new syntax for defining grammars or matching expressions - the parsing moduleprovides a library of classes that you use to construct the grammar directly in Python.Here is a program to parse "Hello, World!" (or any greeting of the formC{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted toL{Literal} expressions)::from pyparsing import Word, alphas# define grammar of a greetinggreet = Word(alphas) + "," + Word(alphas) + "!"hello = "Hello, World!"print (hello, "->", greet.parseString(hello))The program outputs the following::Hello, World! -> ['Hello', ',', 'World', '!']The Python representation of the grammar is quite readable, owing to the self-explanatoryclass names, and the use of '+', '|' and '^' operators.The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or anobject with named attributes.The pyparsing module handles some of the problems that are typically vexing when writing text parsers:- extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.)- quoted strings- embedded comments"""__version__ = "2.1.10"__versionTime__ = "07 Oct 2016 01:31 UTC"__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"import stringfrom weakref import ref as wkrefimport copyimport sysimport warningsimport reimport sre_constantsimport collectionsimport pprintimport tracebackimport typesfrom datetime import datetimetry:from _thread import RLockexcept ImportError:from threading import RLocktry:from collections import OrderedDict as _OrderedDictexcept ImportError:try:from ordereddict import OrderedDict as _OrderedDictexcept ImportError:_OrderedDict = None#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )__all__ = ['And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty','FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal','MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or','ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException','ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException','Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter','White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore','alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col','commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString','dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums','htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno','makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral','nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables','punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity','replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd','stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute','indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass','CloseMatch', 'tokenMap', 'pyparsing_common',]system_version = tuple(sys.version_info)[:3]PY_3 = system_version[0] == 3if PY_3:_MAX_INT = sys.maxsizebasestring = strunichr = chr_ustr = str# build list of single arg builtins, that can be used as parse actionssingleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]else:_MAX_INT = sys.maxintrange = xrangedef _ustr(obj):"""Drop-in replacement for str(obj) that tries to be Unicode friendly. It first triesstr(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). Itthen < returns the unicode object | encodes it with the default encoding | ... >."""if isinstance(obj,unicode):return objtry:# If this works, then _ustr(obj) has the same behaviour as str(obj), so# it won't break any existing code.return str(obj)except UnicodeEncodeError:# Else encode itret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')xmlcharref = Regex('&#\d+;')xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])return xmlcharref.transformString(ret)# build list of single arg builtins, tolerant of Python version, that can be used as parse actionssingleArgBuiltins = []import __builtin__for fname in "sum len sorted reversed list tuple set any all min max".split():try:singleArgBuiltins.append(getattr(__builtin__,fname))except AttributeError:continue_generatorType = type((y for y in range(1)))def _xml_escape(data):"""Escape &, <, >, ", ', etc. in a string of data."""# ampersand must be replaced firstfrom_symbols = '&><"\''to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split())for from_,to_ in zip(from_symbols, to_symbols):data = data.replace(from_, to_)return dataclass _Constants(object):passalphas = string.ascii_uppercase + string.ascii_lowercasenums = "0123456789"hexnums = nums + "ABCDEFabcdef"alphanums = alphas + nums_bslash = chr(92)printables = "".join(c for c in string.printable if c not in string.whitespace)class ParseBaseException(Exception):"""base exception class for all parsing runtime exceptions"""# Performance tuning: we construct a *lot* of these, so keep this# constructor as small and fast as possibledef __init__( self, pstr, loc=0, msg=None, elem=None ):self.loc = locif msg is None:self.msg = pstrself.pstr = ""else:self.msg = msgself.pstr = pstrself.parserElement = elemself.args = (pstr, loc, msg)@classmethoddef _from_exception(cls, pe):"""internal factory method to simplify creating one type of ParseExceptionfrom another - avoids having __init__ signature conflicts among subclasses"""return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)def __getattr__( self, aname ):"""supported attributes by name are:- lineno - returns the line number of the exception text- col - returns the column number of the exception text- line - returns the line containing the exception text"""if( aname == "lineno" ):return lineno( self.loc, self.pstr )elif( aname in ("col", "column") ):return col( self.loc, self.pstr )elif( aname == "line" ):return line( self.loc, self.pstr )else:raise AttributeError(aname)def __str__( self ):return "%s (at char %d), (line:%d, col:%d)" % \( self.msg, self.loc, self.lineno, self.column )def __repr__( self ):return _ustr(self)def markInputline( self, markerString = ">!<" ):"""Extracts the exception line from the input string, and marksthe location of the exception with a special symbol."""line_str = self.lineline_column = self.column - 1if markerString:line_str = "".join((line_str[:line_column],markerString, line_str[line_column:]))return line_str.strip()def __dir__(self):return "lineno col line".split() + dir(type(self))class ParseException(ParseBaseException):"""Exception thrown when parse expressions don't match class;supported attributes by name are:- lineno - returns the line number of the exception text- col - returns the column number of the exception text- line - returns the line containing the exception textExample::try:Word(nums).setName("integer").parseString("ABC")except ParseException as pe:print(pe)print("column: {}".format(pe.col))prints::Expected integer (at char 0), (line:1, col:1)column: 1"""passclass ParseFatalException(ParseBaseException):"""user-throwable exception thrown when inconsistent parse contentis found; stops all parsing immediately"""passclass ParseSyntaxException(ParseFatalException):"""just like L{ParseFatalException}, but thrown internally when anL{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stopimmediately because an unbacktrackable syntax error has been found"""pass#~ class ReparseException(ParseBaseException):#~ """Experimental class - parse actions can raise this exception to cause#~ pyparsing to reparse the input string:#~ - with a modified input string, and/or#~ - with a modified start location#~ Set the values of the ReparseException in the constructor, and raise the#~ exception in a parse action to cause pyparsing to use the new string/location.#~ Setting the values as None causes no change to be made.#~ """#~ def __init_( self, newstring, restartLoc ):#~ self.newParseText = newstring#~ self.reparseLoc = restartLocclass RecursiveGrammarException(Exception):"""exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive"""def __init__( self, parseElementList ):self.parseElementTrace = parseElementListdef __str__( self ):return "RecursiveGrammarException: %s" % self.parseElementTraceclass _ParseResultsWithOffset(object):def __init__(self,p1,p2):self.tup = (p1,p2)def __getitem__(self,i):return self.tup[i]def __repr__(self):return repr(self.tup[0])def setOffset(self,i):self.tup = (self.tup[0],i)class ParseResults(object):"""Structured parse results, to provide multiple means of access to the parsed data:- as a list (C{len(results)})- by list index (C{results[0], results[1]}, etc.)- by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName})Example::integer = Word(nums)date_str = (integer.setResultsName("year") + '/'+ integer.setResultsName("month") + '/'+ integer.setResultsName("day"))# equivalent form:# date_str = integer("year") + '/' + integer("month") + '/' + integer("day")# parseString returns a ParseResults objectresult = date_str.parseString("1999/12/31")def test(s, fn=repr):print("%s -> %s" % (s, fn(eval(s))))test("list(result)")test("result[0]")test("result['month']")test("result.day")test("'month' in result")test("'minutes' in result")test("result.dump()", str)prints::list(result) -> ['1999', '/', '12', '/', '31']result[0] -> '1999'result['month'] -> '12'result.day -> '31''month' in result -> True'minutes' in result -> Falseresult.dump() -> ['1999', '/', '12', '/', '31']- day: 31- month: 12- year: 1999"""def __new__(cls, toklist=None, name=None, asList=True, modal=True ):if isinstance(toklist, cls):return toklistretobj = object.__new__(cls)retobj.__doinit = Truereturn retobj# Performance tuning: we construct a *lot* of these, so keep this# constructor as small and fast as possibledef __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ):if self.__doinit:self.__doinit = Falseself.__name = Noneself.__parent = Noneself.__accumNames = {}self.__asList = asListself.__modal = modalif toklist is None:toklist = []if isinstance(toklist, list):self.__toklist = toklist[:]elif isinstance(toklist, _generatorType):self.__toklist = list(toklist)else:self.__toklist = [toklist]self.__tokdict = dict()if name is not None and name:if not modal:self.__accumNames[name] = 0if isinstance(name,int):name = _ustr(name) # will always return a str, but use _ustr for consistencyself.__name = nameif not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])):if isinstance(toklist,basestring):toklist = [ toklist ]if asList:if isinstance(toklist,ParseResults):self[name] = _ParseResultsWithOffset(toklist.copy(),0)else:self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)self[name].__name = nameelse:try:self[name] = toklist[0]except (KeyError,TypeError,IndexError):self[name] = toklistdef __getitem__( self, i ):if isinstance( i, (int,slice) ):return self.__toklist[i]else:if i not in self.__accumNames:return self.__tokdict[i][-1][0]else:return ParseResults([ v[0] for v in self.__tokdict[i] ])def __setitem__( self, k, v, isinstance=isinstance ):if isinstance(v,_ParseResultsWithOffset):self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]sub = v[0]elif isinstance(k,(int,slice)):self.__toklist[k] = vsub = velse:self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]sub = vif isinstance(sub,ParseResults):sub.__parent = wkref(self)def __delitem__( self, i ):if isinstance(i,(int,slice)):mylen = len( self.__toklist )del self.__toklist[i]# convert int to sliceif isinstance(i, int):if i < 0:i += myleni = slice(i, i+1)# get removed indicesremoved = list(range(*i.indices(mylen)))removed.reverse()# fixup indices in token dictionaryfor name,occurrences in self.__tokdict.items():for j in removed:for k, (value, position) in enumerate(occurrences):occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))else:del self.__tokdict[i]def __contains__( self, k ):return k in self.__tokdictdef __len__( self ): return len( self.__toklist )def __bool__(self): return ( not not self.__toklist )__nonzero__ = __bool__def __iter__( self ): return iter( self.__toklist )def __reversed__( self ): return iter( self.__toklist[::-1] )def _iterkeys( self ):if hasattr(self.__tokdict, "iterkeys"):return self.__tokdict.iterkeys()else:return iter(self.__tokdict)def _itervalues( self ):return (self[k] for k in self._iterkeys())def _iteritems( self ):return ((k, self[k]) for k in self._iterkeys())if PY_3:keys = _iterkeys"""Returns an iterator of all named result keys (Python 3.x only)."""values = _itervalues"""Returns an iterator of all named result values (Python 3.x only)."""items = _iteritems"""Returns an iterator of all named result key-value tuples (Python 3.x only)."""else:iterkeys = _iterkeys"""Returns an iterator of all named result keys (Python 2.x only)."""itervalues = _itervalues"""Returns an iterator of all named result values (Python 2.x only)."""iteritems = _iteritems"""Returns an iterator of all named result key-value tuples (Python 2.x only)."""def keys( self ):"""Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""return list(self.iterkeys())def values( self ):"""Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""return list(self.itervalues())def items( self ):"""Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""return list(self.iteritems())def haskeys( self ):"""Since keys() returns an iterator, this method is helpful in bypassingcode that looks for the existence of any defined results names."""return bool(self.__tokdict)def pop( self, *args, **kwargs):"""Removes and returns item at specified index (default=C{last}).Supports both C{list} and C{dict} semantics for C{pop()}. If passed noargument or an integer argument, it will use C{list} semanticsand pop tokens from the list of parsed tokens. If passed anon-integer argument (most likely a string), it will use C{dict}semantics and pop the corresponding value from any definedresults names. A second default return value argument issupported, just as in C{dict.pop()}.Example::def remove_first(tokens):tokens.pop(0)print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']label = Word(alphas)patt = label("LABEL") + OneOrMore(Word(nums))print(patt.parseString("AAB 123 321").dump())# Use pop() in a parse action to remove named result (note that corresponding value is not# removed from list form of results)def remove_LABEL(tokens):tokens.pop("LABEL")return tokenspatt.addParseAction(remove_LABEL)print(patt.parseString("AAB 123 321").dump())prints::['AAB', '123', '321']- LABEL: AAB['AAB', '123', '321']"""if not args:args = [-1]for k,v in kwargs.items():if k == 'default':args = (args[0], v)else:raise TypeError("pop() got an unexpected keyword argument '%s'" % k)if (isinstance(args[0], int) orlen(args) == 1 orargs[0] in self):index = args[0]ret = self[index]del self[index]return retelse:defaultvalue = args[1]return defaultvaluedef get(self, key, defaultValue=None):"""Returns named result matching the given key, or if there is nosuch name, then returns the given C{defaultValue} or C{None} if noC{defaultValue} is specified.Similar to C{dict.get()}.Example::integer = Word(nums)date_str = integer("year") + '/' + integer("month") + '/' + integer("day")result = date_str.parseString("1999/12/31")print(result.get("year")) # -> '1999'print(result.get("hour", "not specified")) # -> 'not specified'print(result.get("hour")) # -> None"""if key in self:return self[key]else:return defaultValuedef insert( self, index, insStr ):"""Inserts new element at location index in the list of parsed tokens.Similar to C{list.insert()}.Example::print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']# use a parse action to insert the parse location in the front of the parsed resultsdef insert_locn(locn, tokens):tokens.insert(0, locn)print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']"""self.__toklist.insert(index, insStr)# fixup indices in token dictionaryfor name,occurrences in self.__tokdict.items():for k, (value, position) in enumerate(occurrences):occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))def append( self, item ):"""Add single element to end of ParseResults list of elements.Example::print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']# use a parse action to compute the sum of the parsed integers, and add it to the enddef append_sum(tokens):tokens.append(sum(map(int, tokens)))print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]"""self.__toklist.append(item)def extend( self, itemseq ):"""Add sequence of elements to end of ParseResults list of elements.Example::patt = OneOrMore(Word(alphas))# use a parse action to append the reverse of the matched strings, to make a palindromedef make_palindrome(tokens):tokens.extend(reversed([t[::-1] for t in tokens]))return ''.join(tokens)print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'"""if isinstance(itemseq, ParseResults):self += itemseqelse:self.__toklist.extend(itemseq)def clear( self ):"""Clear all elements and results names."""del self.__toklist[:]self.__tokdict.clear()def __getattr__( self, name ):try:return self[name]except KeyError:return ""if name in self.__tokdict:if name not in self.__accumNames:return self.__tokdict[name][-1][0]else:return ParseResults([ v[0] for v in self.__tokdict[name] ])else:return ""def __add__( self, other ):ret = self.copy()ret += otherreturn retdef __iadd__( self, other ):if other.__tokdict:offset = len(self.__toklist)addoffset = lambda a: offset if a<0 else a+offsetotheritems = other.__tokdict.items()otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )for (k,vlist) in otheritems for v in vlist]for k,v in otherdictitems:self[k] = vif isinstance(v[0],ParseResults):v[0].__parent = wkref(self)self.__toklist += other.__toklistself.__accumNames.update( other.__accumNames )return selfdef __radd__(self, other):if isinstance(other,int) and other == 0:# useful for merging many ParseResults using sum() builtinreturn self.copy()else:# this may raise a TypeError - so be itreturn other + selfdef __repr__( self ):return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )def __str__( self ):return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'def _asStringList( self, sep='' ):out = []for item in self.__toklist:if out and sep:out.append(sep)if isinstance( item, ParseResults ):out += item._asStringList()else:out.append( _ustr(item) )return outdef asList( self ):"""Returns the parse results as a nested list of matching tokens, all converted to strings.Example::patt = OneOrMore(Word(alphas))result = patt.parseString("sldkj lsdkj sldkj")# even though the result prints in string-like form, it is actually a pyparsing ParseResultsprint(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj']# Use asList() to create an actual listresult_list = result.asList()print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj']"""return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]def asDict( self ):"""Returns the named parse results as a nested dictionary.Example::integer = Word(nums)date_str = integer("year") + '/' + integer("month") + '/' + integer("day")result = date_str.parseString('12/31/1999')print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})result_dict = result.asDict()print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'}# even though a ParseResults supports dict-like access, sometime you just need to have a dictimport jsonprint(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializableprint(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}"""if PY_3:item_fn = self.itemselse:item_fn = self.iteritemsdef toItem(obj):if isinstance(obj, ParseResults):if obj.haskeys():return obj.asDict()else:return [toItem(v) for v in obj]else:return objreturn dict((k,toItem(v)) for k,v in item_fn())def copy( self ):"""Returns a new copy of a C{ParseResults} object."""ret = ParseResults( self.__toklist )ret.__tokdict = self.__tokdict.copy()ret.__parent = self.__parentret.__accumNames.update( self.__accumNames )ret.__name = self.__namereturn retdef asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):"""(Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names."""nl = "\n"out = []namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items()for v in vlist)nextLevelIndent = indent + " "# collapse out indents if formatting is not desiredif not formatted:indent = ""nextLevelIndent = ""nl = ""selfTag = Noneif doctag is not None:selfTag = doctagelse:if self.__name:selfTag = self.__nameif not selfTag:if namedItemsOnly:return ""else:selfTag = "ITEM"out += [ nl, indent, "<", selfTag, ">" ]for i,res in enumerate(self.__toklist):if isinstance(res,ParseResults):if i in namedItems:out += [ res.asXML(namedItems[i],namedItemsOnly and doctag is None,nextLevelIndent,formatted)]else:out += [ res.asXML(None,namedItemsOnly and doctag is None,nextLevelIndent,formatted)]else:# individual token, see if there is a name for itresTag = Noneif i in namedItems:resTag = namedItems[i]if not resTag:if namedItemsOnly:continueelse:resTag = "ITEM"xmlBodyText = _xml_escape(_ustr(res))out += [ nl, nextLevelIndent, "<", resTag, ">",xmlBodyText,"</", resTag, ">" ]out += [ nl, indent, "</", selfTag, ">" ]return "".join(out)def __lookup(self,sub):for k,vlist in self.__tokdict.items():for v,loc in vlist:if sub is v:return kreturn Nonedef getName(self):"""Returns the results name for this token expression. Useful when severaldifferent expressions might match at a particular location.Example::integer = Word(nums)ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")house_number_expr = Suppress('#') + Word(nums, alphanums)user_data = (Group(house_number_expr)("house_number")| Group(ssn_expr)("ssn")| Group(integer)("age"))user_info = OneOrMore(user_data)result = user_info.parseString("22 111-22-3333 #221B")for item in result:print(item.getName(), ':', item[0])prints::age : 22ssn : 111-22-3333house_number : 221B"""if self.__name:return self.__nameelif self.__parent:par = self.__parent()if par:return par.__lookup(self)else:return Noneelif (len(self) == 1 andlen(self.__tokdict) == 1 andnext(iter(self.__tokdict.values()))[0][1] in (0,-1)):return next(iter(self.__tokdict.keys()))else:return Nonedef dump(self, indent='', depth=0, full=True):"""Diagnostic method for listing out the contents of a C{ParseResults}.Accepts an optional C{indent} argument so that this string can be embeddedin a nested display of other data.Example::integer = Word(nums)date_str = integer("year") + '/' + integer("month") + '/' + integer("day")result = date_str.parseString('12/31/1999')print(result.dump())prints::['12', '/', '31', '/', '1999']- day: 1999- month: 31- year: 12"""out = []NL = '\n'out.append( indent+_ustr(self.asList()) )if full:if self.haskeys():items = sorted((str(k), v) for k,v in self.items())for k,v in items:if out:out.append(NL)out.append( "%s%s- %s: " % (indent,(' '*depth), k) )if isinstance(v,ParseResults):if v:out.append( v.dump(indent,depth+1) )else:out.append(_ustr(v))else:out.append(repr(v))elif any(isinstance(vv,ParseResults) for vv in self):v = selffor i,vv in enumerate(v):if isinstance(vv,ParseResults):out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) ))else:out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv)))return "".join(out)def pprint(self, *args, **kwargs):"""Pretty-printer for parsed results as a list, using the C{pprint} module.Accepts additional positional or keyword args as defined for theC{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})Example::ident = Word(alphas, alphanums)num = Word(nums)func = Forward()term = ident | num | Group('(' + func + ')')func <<= ident + Group(Optional(delimitedList(term)))result = func.parseString("fna a,b,(fnb c,d,200),100")result.pprint(width=40)prints::['fna',['a','b',['(', 'fnb', ['c', 'd', '200'], ')'],'100']]"""pprint.pprint(self.asList(), *args, **kwargs)# add support for pickle protocoldef __getstate__(self):return ( self.__toklist,( self.__tokdict.copy(),self.__parent is not None and self.__parent() or None,self.__accumNames,self.__name ) )def __setstate__(self,state):self.__toklist = state[0](self.__tokdict,par,inAccumNames,self.__name) = state[1]self.__accumNames = {}self.__accumNames.update(inAccumNames)if par is not None:self.__parent = wkref(par)else:self.__parent = Nonedef __getnewargs__(self):return self.__toklist, self.__name, self.__asList, self.__modaldef __dir__(self):return (dir(type(self)) + list(self.keys()))collections.MutableMapping.register(ParseResults)def col (loc,strg):"""Returns current column within a string, counting newlines as line separators.The first column is number 1.Note: the default parsing behavior is to expand tabs in the input stringbefore starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more informationon parsing strings containing C{<TAB>}s, and suggested methods to maintain aconsistent view of the parsed string, the parse location, and line and columnpositions within the parsed string."""s = strgreturn 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc)def lineno(loc,strg):"""Returns current line number within a string, counting newlines as line separators.The first line is number 1.Note: the default parsing behavior is to expand tabs in the input stringbefore starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more informationon parsing strings containing C{<TAB>}s, and suggested methods to maintain aconsistent view of the parsed string, the parse location, and line and columnpositions within the parsed string."""return strg.count("\n",0,loc) + 1def line( loc, strg ):"""Returns the line of text containing loc within a string, counting newlines as line separators."""lastCR = strg.rfind("\n", 0, loc)nextCR = strg.find("\n", loc)if nextCR >= 0:return strg[lastCR+1:nextCR]else:return strg[lastCR+1:]def _defaultStartDebugAction( instring, loc, expr ):print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )))def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))def _defaultExceptionDebugAction( instring, loc, expr, exc ):print ("Exception raised:" + _ustr(exc))def nullDebugAction(*args):"""'Do-nothing' debug action, to suppress debugging output during parsing."""pass# Only works on Python 3.x - nonlocal is toxic to Python 2 installs#~ 'decorator to trim function calls to match the arity of the target'#~ def _trim_arity(func, maxargs=3):#~ if func in singleArgBuiltins:#~ return lambda s,l,t: func(t)#~ limit = 0#~ foundArity = False#~ def wrapper(*args):#~ nonlocal limit,foundArity#~ while 1:#~ try:#~ ret = func(*args[limit:])#~ foundArity = True#~ return ret#~ except TypeError:#~ if limit == maxargs or foundArity:#~ raise#~ limit += 1#~ continue#~ return wrapper# this version is Python 2.x-3.x cross-compatible'decorator to trim function calls to match the arity of the target'def _trim_arity(func, maxargs=2):if func in singleArgBuiltins:return lambda s,l,t: func(t)limit = [0]foundArity = [False]# traceback return data structure changed in Py3.5 - normalize back to plain tuplesif system_version[:2] >= (3,5):def extract_stack(limit=0):# special handling for Python 3.5.0 - extra deep call stack by 1offset = -3 if system_version == (3,5,0) else -2frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]return [(frame_summary.filename, frame_summary.lineno)]def extract_tb(tb, limit=0):frames = traceback.extract_tb(tb, limit=limit)frame_summary = frames[-1]return [(frame_summary.filename, frame_summary.lineno)]else:extract_stack = traceback.extract_stackextract_tb = traceback.extract_tb# synthesize what would be returned by traceback.extract_stack at the call to# user's parse action 'func', so that we don't incur call penalty at parse timeLINE_DIFF = 6# IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND# THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!this_line = extract_stack(limit=2)[-1]pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF)def wrapper(*args):while 1:try:ret = func(*args[limit[0]:])foundArity[0] = Truereturn retexcept TypeError:# re-raise TypeErrors if they did not come from our arity testingif foundArity[0]:raiseelse:try:tb = sys.exc_info()[-1]if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:raisefinally:del tbif limit[0] <= maxargs:limit[0] += 1continueraise# copy func name to wrapper for sensible debug outputfunc_name = "<parse action>"try:func_name = getattr(func, '__name__',getattr(func, '__class__').__name__)except Exception:func_name = str(func)wrapper.__name__ = func_namereturn wrapperclass ParserElement(object):"""Abstract base level parser element class."""DEFAULT_WHITE_CHARS = " \n\t\r"verbose_stacktrace = False@staticmethoddef setDefaultWhitespaceChars( chars ):r"""Overrides the default whitespace charsExample::# default whitespace chars are space, <TAB> and newlineOneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl']# change to just treat newline as significantParserElement.setDefaultWhitespaceChars(" \t")OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def']"""ParserElement.DEFAULT_WHITE_CHARS = chars@staticmethoddef inlineLiteralsUsing(cls):"""Set class to be used for inclusion of string literals into a parser.Example::# default literal class used is Literalinteger = Word(nums)date_str = integer("year") + '/' + integer("month") + '/' + integer("day")date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31']# change to SuppressParserElement.inlineLiteralsUsing(Suppress)date_str = integer("year") + '/' + integer("month") + '/' + integer("day")date_str.parseString("1999/12/31") # -> ['1999', '12', '31']"""ParserElement._literalStringClass = clsdef __init__( self, savelist=False ):self.parseAction = list()self.failAction = None#~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcallself.strRepr = Noneself.resultsName = Noneself.saveAsList = savelistself.skipWhitespace = Trueself.whiteChars = ParserElement.DEFAULT_WHITE_CHARSself.copyDefaultWhiteChars = Trueself.mayReturnEmpty = False # used when checking for left-recursionself.keepTabs = Falseself.ignoreExprs = list()self.debug = Falseself.streamlined = Falseself.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse indexself.errmsg = ""self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)self.debugActions = ( None, None, None ) #custom debug actionsself.re = Noneself.callPreparse = True # used to avoid redundant calls to preParseself.callDuringTry = Falsedef copy( self ):"""Make a copy of this C{ParserElement}. Useful for defining different parse actionsfor the same parsing pattern, using copies of the original parse element.Example::integer = Word(nums).setParseAction(lambda toks: int(toks[0]))integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))prints::[5120, 100, 655360, 268435456]Equivalent form of C{expr.copy()} is just C{expr()}::integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")"""cpy = copy.copy( self )cpy.parseAction = self.parseAction[:]cpy.ignoreExprs = self.ignoreExprs[:]if self.copyDefaultWhiteChars:cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARSreturn cpydef setName( self, name ):"""Define name for this expression, makes debugging and exception messages clearer.Example::Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1)"""self.name = nameself.errmsg = "Expected " + self.nameif hasattr(self,"exception"):self.exception.msg = self.errmsgreturn selfdef setResultsName( self, name, listAllMatches=False ):"""Define name for referencing matching tokens as a nested attributeof the returned parse results.NOTE: this returns a *copy* of the original C{ParserElement} object;this is so that the client can define a basic element, such as aninteger, and reference it in multiple places with different names.You can also set results names using the abbreviated syntax,C{expr("name")} in place of C{expr.setResultsName("name")} -see L{I{__call__}<__call__>}.Example::date_str = (integer.setResultsName("year") + '/'+ integer.setResultsName("month") + '/'+ integer.setResultsName("day"))# equivalent form:date_str = integer("year") + '/' + integer("month") + '/' + integer("day")"""newself = self.copy()if name.endswith("*"):name = name[:-1]listAllMatches=Truenewself.resultsName = namenewself.modalResults = not listAllMatchesreturn newselfdef setBreak(self,breakFlag = True):"""Method to invoke the Python pdb debugger when this element isabout to be parsed. Set C{breakFlag} to True to enable, False todisable."""if breakFlag:_parseMethod = self._parsedef breaker(instring, loc, doActions=True, callPreParse=True):import pdbpdb.set_trace()return _parseMethod( instring, loc, doActions, callPreParse )breaker._originalParseMethod = _parseMethodself._parse = breakerelse:if hasattr(self._parse,"_originalParseMethod"):self._parse = self._parse._originalParseMethodreturn selfdef setParseAction( self, *fns, **kwargs ):"""Define action to perform when successfully matching parse element definition.Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:- s = the original string being parsed (see note below)- loc = the location of the matching substring- toks = a list of the matched tokens, packaged as a C{L{ParseResults}} objectIf the functions in fns modify the tokens, they can return them as the returnvalue from fn, and the modified list of tokens will replace the original.Otherwise, fn does not need to return any value.Optional keyword arguments:- callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testingNote: the default parsing behavior is to expand tabs in the input stringbefore starting the parsing process. See L{I{parseString}<parseString>} for more informationon parsing strings containing C{<TAB>}s, and suggested methods to maintain aconsistent view of the parsed string, the parse location, and line and columnpositions within the parsed string.Example::integer = Word(nums)date_str = integer + '/' + integer + '/' + integerdate_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31']# use parse action to convert to ints at parse timeinteger = Word(nums).setParseAction(lambda toks: int(toks[0]))date_str = integer + '/' + integer + '/' + integer# note that integer fields are now ints, not stringsdate_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31]"""self.parseAction = list(map(_trim_arity, list(fns)))self.callDuringTry = kwargs.get("callDuringTry", False)return selfdef addParseAction( self, *fns, **kwargs ):"""Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.See examples in L{I{copy}<copy>}."""self.parseAction += list(map(_trim_arity, list(fns)))self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)return selfdef addCondition(self, *fns, **kwargs):"""Add a boolean predicate function to expression's list of parse actions. SeeL{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction},functions passed to C{addCondition} need to return boolean success/fail of the condition.Optional keyword arguments:- message = define a custom message to be used in the raised exception- fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseExceptionExample::integer = Word(nums).setParseAction(lambda toks: int(toks[0]))year_int = integer.copy()year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")date_str = year_int + '/' + integer + '/' + integerresult = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)"""msg = kwargs.get("message", "failed user-defined condition")exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseExceptionfor fn in fns:def pa(s,l,t):if not bool(_trim_arity(fn)(s,l,t)):raise exc_type(s,l,msg)self.parseAction.append(pa)self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)return selfdef setFailAction( self, fn ):"""Define action to perform if parsing fails at this expression.Fail acton fn is a callable function that takes the argumentsC{fn(s,loc,expr,err)} where:- s = string being parsed- loc = location where expression match was attempted and failed- expr = the parse expression that failed- err = the exception thrownThe function returns no value. It may throw C{L{ParseFatalException}}if it is desired to stop parsing immediately."""self.failAction = fnreturn selfdef _skipIgnorables( self, instring, loc ):exprsFound = Truewhile exprsFound:exprsFound = Falsefor e in self.ignoreExprs:try:while 1:loc,dummy = e._parse( instring, loc )exprsFound = Trueexcept ParseException:passreturn locdef preParse( self, instring, loc ):if self.ignoreExprs:loc = self._skipIgnorables( instring, loc )if self.skipWhitespace:wt = self.whiteCharsinstrlen = len(instring)while loc < instrlen and instring[loc] in wt:loc += 1return locdef parseImpl( self, instring, loc, doActions=True ):return loc, []def postParse( self, instring, loc, tokenlist ):return tokenlist#~ @profiledef _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):debugging = ( self.debug ) #and doActions )if debugging or self.failAction:#~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))if (self.debugActions[0] ):self.debugActions[0]( instring, loc, self )if callPreParse and self.callPreparse:preloc = self.preParse( instring, loc )else:preloc = loctokensStart = preloctry:try:loc,tokens = self.parseImpl( instring, preloc, doActions )except IndexError:raise ParseException( instring, len(instring), self.errmsg, self )except ParseBaseException as err:#~ print ("Exception raised:", err)if self.debugActions[2]:self.debugActions[2]( instring, tokensStart, self, err )if self.failAction:self.failAction( instring, tokensStart, self, err )raiseelse:if callPreParse and self.callPreparse:preloc = self.preParse( instring, loc )else:preloc = loctokensStart = prelocif self.mayIndexError or loc >= len(instring):try:loc,tokens = self.parseImpl( instring, preloc, doActions )except IndexError:raise ParseException( instring, len(instring), self.errmsg, self )else:loc,tokens = self.parseImpl( instring, preloc, doActions )tokens = self.postParse( instring, loc, tokens )retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )if self.parseAction and (doActions or self.callDuringTry):if debugging:try:for fn in self.parseAction:tokens = fn( instring, tokensStart, retTokens )if tokens is not None:retTokens = ParseResults( tokens,self.resultsName,asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),modal=self.modalResults )except ParseBaseException as err:#~ print "Exception raised in user parse action:", errif (self.debugActions[2] ):self.debugActions[2]( instring, tokensStart, self, err )raiseelse:for fn in self.parseAction:tokens = fn( instring, tokensStart, retTokens )if tokens is not None:retTokens = ParseResults( tokens,self.resultsName,asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),modal=self.modalResults )if debugging:#~ print ("Matched",self,"->",retTokens.asList())if (self.debugActions[1] ):self.debugActions[1]( instring, tokensStart, loc, self, retTokens )return loc, retTokensdef tryParse( self, instring, loc ):try:return self._parse( instring, loc, doActions=False )[0]except ParseFatalException:raise ParseException( instring, loc, self.errmsg, self)def canParseNext(self, instring, loc):try:self.tryParse(instring, loc)except (ParseException, IndexError):return Falseelse:return Trueclass _UnboundedCache(object):def __init__(self):cache = {}self.not_in_cache = not_in_cache = object()def get(self, key):return cache.get(key, not_in_cache)def set(self, key, value):cache[key] = valuedef clear(self):cache.clear()self.get = types.MethodType(get, self)self.set = types.MethodType(set, self)self.clear = types.MethodType(clear, self)if _OrderedDict is not None:class _FifoCache(object):def __init__(self, size):self.not_in_cache = not_in_cache = object()cache = _OrderedDict()def get(self, key):return cache.get(key, not_in_cache)def set(self, key, value):cache[key] = valueif len(cache) > size:cache.popitem(False)def clear(self):cache.clear()self.get = types.MethodType(get, self)self.set = types.MethodType(set, self)self.clear = types.MethodType(clear, self)else:class _FifoCache(object):def __init__(self, size):self.not_in_cache = not_in_cache = object()cache = {}key_fifo = collections.deque([], size)def get(self, key):return cache.get(key, not_in_cache)def set(self, key, value):cache[key] = valueif len(cache) > size:cache.pop(key_fifo.popleft(), None)key_fifo.append(key)def clear(self):cache.clear()key_fifo.clear()self.get = types.MethodType(get, self)self.set = types.MethodType(set, self)self.clear = types.MethodType(clear, self)# argument cache for optimizing repeated calls when backtracking through recursive expressionspackrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't failpackrat_cache_lock = RLock()packrat_cache_stats = [0, 0]# this method gets repeatedly called during backtracking with the same arguments -# we can cache these arguments and save ourselves the trouble of re-parsing the contained expressiondef _parseCache( self, instring, loc, doActions=True, callPreParse=True ):HIT, MISS = 0, 1lookup = (self, instring, loc, callPreParse, doActions)with ParserElement.packrat_cache_lock:cache = ParserElement.packrat_cachevalue = cache.get(lookup)if value is cache.not_in_cache:ParserElement.packrat_cache_stats[MISS] += 1try:value = self._parseNoCache(instring, loc, doActions, callPreParse)except ParseBaseException as pe:# cache a copy of the exception, without the tracebackcache.set(lookup, pe.__class__(*pe.args))raiseelse:cache.set(lookup, (value[0], value[1].copy()))return valueelse:ParserElement.packrat_cache_stats[HIT] += 1if isinstance(value, Exception):raise valuereturn (value[0], value[1].copy())_parse = _parseNoCache@staticmethoddef resetCache():ParserElement.packrat_cache.clear()ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)_packratEnabled = False@staticmethoddef enablePackrat(cache_size_limit=128):"""Enables "packrat" parsing, which adds memoizing to the parsing logic.Repeated parse attempts at the same string location (which happensoften in many complex grammars) can immediately return a cached value,instead of re-executing parsing/validating code. Memoizing is done ofboth valid results and parsing exceptions.Parameters:- cache_size_limit - (default=C{128}) - if an integer value is providedwill limit the size of the packrat cache; if None is passed, thenthe cache size will be unbounded; if 0 is passed, the cache willbe effectively disabled.This speedup may break existing programs that use parse actions thathave side-effects. For this reason, packrat parsing is disabled whenyou first import pyparsing. To activate the packrat feature, yourprogram must call the class method C{ParserElement.enablePackrat()}. Ifyour program uses C{psyco} to "compile as you go", you must callC{enablePackrat} before calling C{psyco.full()}. If you do not do this,Python will crash. For best results, call C{enablePackrat()} immediatelyafter importing pyparsing.Example::import pyparsingpyparsing.ParserElement.enablePackrat()"""if not ParserElement._packratEnabled:ParserElement._packratEnabled = Trueif cache_size_limit is None:ParserElement.packrat_cache = ParserElement._UnboundedCache()else:ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)ParserElement._parse = ParserElement._parseCachedef parseString( self, instring, parseAll=False ):"""Execute the parse expression with the given string.This is the main interface to the client code, once the completeexpression has been built.If you want the grammar to require that the entire input string besuccessfully parsed, then set C{parseAll} to True (equivalent to endingthe grammar with C{L{StringEnd()}}).Note: C{parseString} implicitly calls C{expandtabs()} on the input string,in order to report proper column numbers in parse actions.If the input string contains tabs andthe grammar uses parse actions that use the C{loc} argument to index into thestring being parsed, you can ensure you have a consistent view of the inputstring by:- calling C{parseWithTabs} on your grammar before calling C{parseString}(see L{I{parseWithTabs}<parseWithTabs>})- define your parse action using the full C{(s,loc,toks)} signature, andreference the input string using the parse action's C{s} argument- explictly expand the tabs in your input string before callingC{parseString}Example::Word('a').parseString('aaaaabaaa') # -> ['aaaaa']Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text"""ParserElement.resetCache()if not self.streamlined:self.streamline()#~ self.saveAsList = Truefor e in self.ignoreExprs:e.streamline()if not self.keepTabs:instring = instring.expandtabs()try:loc, tokens = self._parse( instring, 0 )if parseAll:loc = self.preParse( instring, loc )se = Empty() + StringEnd()se._parse( instring, loc )except ParseBaseException as exc:if ParserElement.verbose_stacktrace:raiseelse:# catch and re-raise exception from here, clears out pyparsing internal stack traceraise excelse:return tokensdef scanString( self, instring, maxMatches=_MAX_INT, overlap=False ):"""Scan the input string for expression matches. Each match will return thematching tokens, start location, and end location. May be called with optionalC{maxMatches} argument, to clip scanning after 'n' matches are found. IfC{overlap} is specified, then overlapping matches will be reported.Note that the start and end locations are reported relative to the stringbeing parsed. See L{I{parseString}<parseString>} for more information on parsingstrings with embedded tabs.Example::source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"print(source)for tokens,start,end in Word(alphas).scanString(source):print(' '*start + '^'*(end-start))print(' '*start + tokens[0])prints::sldjf123lsdjjkf345sldkjf879lkjsfd987^^^^^sldjf^^^^^^^lsdjjkf^^^^^^sldkjf^^^^^^lkjsfd"""if not self.streamlined:self.streamline()for e in self.ignoreExprs:e.streamline()if not self.keepTabs:instring = _ustr(instring).expandtabs()instrlen = len(instring)loc = 0preparseFn = self.preParseparseFn = self._parseParserElement.resetCache()matches = 0try:while loc <= instrlen and matches < maxMatches:try:preloc = preparseFn( instring, loc )nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )except ParseException:loc = preloc+1else:if nextLoc > loc:matches += 1yield tokens, preloc, nextLocif overlap:nextloc = preparseFn( instring, loc )if nextloc > loc:loc = nextLocelse:loc += 1else:loc = nextLocelse:loc = preloc+1except ParseBaseException as exc:if ParserElement.verbose_stacktrace:raiseelse:# catch and re-raise exception from here, clears out pyparsing internal stack traceraise excdef transformString( self, instring ):"""Extension to C{L{scanString}}, to modify matching text with modified tokens that maybe returned from a parse action. To use C{transformString}, define a grammar andattach a parse action to it that modifies the returned token list.Invoking C{transformString()} on a target string will then scan for matches,and replace the matched text patterns according to the logic in the parseaction. C{transformString()} returns the resulting transformed string.Example::wd = Word(alphas)wd.setParseAction(lambda toks: toks[0].title())print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))Prints::Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York."""out = []lastE = 0# force preservation of <TAB>s, to minimize unwanted transformation of string, and to# keep string locs straight between transformString and scanStringself.keepTabs = Truetry:for t,s,e in self.scanString( instring ):out.append( instring[lastE:s] )if t:if isinstance(t,ParseResults):out += t.asList()elif isinstance(t,list):out += telse:out.append(t)lastE = eout.append(instring[lastE:])out = [o for o in out if o]return "".join(map(_ustr,_flatten(out)))except ParseBaseException as exc:if ParserElement.verbose_stacktrace:raiseelse:# catch and re-raise exception from here, clears out pyparsing internal stack traceraise excdef searchString( self, instring, maxMatches=_MAX_INT ):"""Another extension to C{L{scanString}}, simplifying the access to the tokens foundto match the given parse expression. May be called with optionalC{maxMatches} argument, to clip searching after 'n' matches are found.Example::# a capitalized word starts with an uppercase letter, followed by zero or more lowercase letterscap_word = Word(alphas.upper(), alphas.lower())print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))prints::['More', 'Iron', 'Lead', 'Gold', 'I']"""try:return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])except ParseBaseException as exc:if ParserElement.verbose_stacktrace:raiseelse:# catch and re-raise exception from here, clears out pyparsing internal stack traceraise excdef split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):"""Generator method to split a string using the given expression as a separator.May be called with optional C{maxsplit} argument, to limit the number of splits;and the optional C{includeSeparators} argument (default=C{False}), if the separatingmatching text should be included in the split results.Example::punc = oneOf(list(".,;:/-!?"))print(list(punc.split("This, this?, this sentence, is badly punctuated!")))prints::['This', ' this', '', ' this sentence', ' is badly punctuated', '']"""splits = 0last = 0for t,s,e in self.scanString(instring, maxMatches=maxsplit):yield instring[last:s]if includeSeparators:yield t[0]last = eyield instring[last:]def __add__(self, other ):"""Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElementconverts them to L{Literal}s by default.Example::greet = Word(alphas) + "," + Word(alphas) + "!"hello = "Hello, World!"print (hello, "->", greet.parseString(hello))Prints::Hello, World! -> ['Hello', ',', 'World', '!']"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn And( [ self, other ] )def __radd__(self, other ):"""Implementation of + operator when left operand is not a C{L{ParserElement}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn other + selfdef __sub__(self, other):"""Implementation of - operator, returns C{L{And}} with error stop"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn And( [ self, And._ErrorStop(), other ] )def __rsub__(self, other ):"""Implementation of - operator when left operand is not a C{L{ParserElement}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn other - selfdef __mul__(self,other):"""Implementation of * operator, allows use of C{expr * 3} in place ofC{expr + expr + expr}. Expressions may also me multiplied by a 2-integertuple, similar to C{{min,max}} multipliers in regular expressions. Tuplesmay also include C{None} as in:- C{expr*(n,None)} or C{expr*(n,)} is equivalentto C{expr*n + L{ZeroOrMore}(expr)}(read as "at least n instances of C{expr}")- C{expr*(None,n)} is equivalent to C{expr*(0,n)}(read as "0 to n instances of C{expr}")- C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}- C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}Note that C{expr*(None,n)} does not raise an exception ifmore than n exprs exist in the input stream; that is,C{expr*(None,n)} does not enforce a maximum number of exproccurrences. If this behavior is desired, then writeC{expr*(None,n) + ~expr}"""if isinstance(other,int):minElements, optElements = other,0elif isinstance(other,tuple):other = (other + (None, None))[:2]if other[0] is None:other = (0, other[1])if isinstance(other[0],int) and other[1] is None:if other[0] == 0:return ZeroOrMore(self)if other[0] == 1:return OneOrMore(self)else:return self*other[0] + ZeroOrMore(self)elif isinstance(other[0],int) and isinstance(other[1],int):minElements, optElements = otheroptElements -= minElementselse:raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))else:raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))if minElements < 0:raise ValueError("cannot multiply ParserElement by negative value")if optElements < 0:raise ValueError("second tuple value must be greater or equal to first tuple value")if minElements == optElements == 0:raise ValueError("cannot multiply ParserElement by 0 or (0,0)")if (optElements):def makeOptionalList(n):if n>1:return Optional(self + makeOptionalList(n-1))else:return Optional(self)if minElements:if minElements == 1:ret = self + makeOptionalList(optElements)else:ret = And([self]*minElements) + makeOptionalList(optElements)else:ret = makeOptionalList(optElements)else:if minElements == 1:ret = selfelse:ret = And([self]*minElements)return retdef __rmul__(self, other):return self.__mul__(other)def __or__(self, other ):"""Implementation of | operator - returns C{L{MatchFirst}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn MatchFirst( [ self, other ] )def __ror__(self, other ):"""Implementation of | operator when left operand is not a C{L{ParserElement}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn other | selfdef __xor__(self, other ):"""Implementation of ^ operator - returns C{L{Or}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn Or( [ self, other ] )def __rxor__(self, other ):"""Implementation of ^ operator when left operand is not a C{L{ParserElement}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn other ^ selfdef __and__(self, other ):"""Implementation of & operator - returns C{L{Each}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn Each( [ self, other ] )def __rand__(self, other ):"""Implementation of & operator when left operand is not a C{L{ParserElement}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn other & selfdef __invert__( self ):"""Implementation of ~ operator - returns C{L{NotAny}}"""return NotAny( self )def __call__(self, name=None):"""Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will bepassed as C{True}.If C{name} is omitted, same as calling C{L{copy}}.Example::# these are equivalentuserdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")"""if name is not None:return self.setResultsName(name)else:return self.copy()def suppress( self ):"""Suppresses the output of this C{ParserElement}; useful to keep punctuation fromcluttering up returned output."""return Suppress( self )def leaveWhitespace( self ):"""Disables the skipping of whitespace before matching the characters in theC{ParserElement}'s defined pattern. This is normally only used internally bythe pyparsing module, but may be needed in some whitespace-sensitive grammars."""self.skipWhitespace = Falsereturn selfdef setWhitespaceChars( self, chars ):"""Overrides the default whitespace chars"""self.skipWhitespace = Trueself.whiteChars = charsself.copyDefaultWhiteChars = Falsereturn selfdef parseWithTabs( self ):"""Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string.Must be called before C{parseString} when the input grammar contains elements thatmatch C{<TAB>} characters."""self.keepTabs = Truereturn selfdef ignore( self, other ):"""Define expression to be ignored (e.g., comments) while doing patternmatching; may be called repeatedly, to define multiple comment or otherignorable patterns.Example::patt = OneOrMore(Word(alphas))patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']patt.ignore(cStyleComment)patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']"""if isinstance(other, basestring):other = Suppress(other)if isinstance( other, Suppress ):if other not in self.ignoreExprs:self.ignoreExprs.append(other)else:self.ignoreExprs.append( Suppress( other.copy() ) )return selfdef setDebugActions( self, startAction, successAction, exceptionAction ):"""Enable display of debugging messages while doing pattern matching."""self.debugActions = (startAction or _defaultStartDebugAction,successAction or _defaultSuccessDebugAction,exceptionAction or _defaultExceptionDebugAction)self.debug = Truereturn selfdef setDebug( self, flag=True ):"""Enable display of debugging messages while doing pattern matching.Set C{flag} to True to enable, False to disable.Example::wd = Word(alphas).setName("alphaword")integer = Word(nums).setName("numword")term = wd | integer# turn on debugging for wdwd.setDebug()OneOrMore(term).parseString("abc 123 xyz 890")prints::Match alphaword at loc 0(1,1)Matched alphaword -> ['abc']Match alphaword at loc 3(1,4)Exception raised:Expected alphaword (at char 4), (line:1, col:5)Match alphaword at loc 7(1,8)Matched alphaword -> ['xyz']Match alphaword at loc 11(1,12)Exception raised:Expected alphaword (at char 12), (line:1, col:13)Match alphaword at loc 15(1,16)Exception raised:Expected alphaword (at char 15), (line:1, col:16)The output shown is that produced by the default debug actions - custom debug actions can bespecified using L{setDebugActions}. Prior to attemptingto match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"}is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,which makes debugging and exception messages easier to understand - for instance, the defaultname created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}."""if flag:self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )else:self.debug = Falsereturn selfdef __str__( self ):return self.namedef __repr__( self ):return _ustr(self)def streamline( self ):self.streamlined = Trueself.strRepr = Nonereturn selfdef checkRecursion( self, parseElementList ):passdef validate( self, validateTrace=[] ):"""Check defined expressions for valid structure, check for infinite recursive definitions."""self.checkRecursion( [] )def parseFile( self, file_or_filename, parseAll=False ):"""Execute the parse expression on the given file or filename.If a filename is specified (instead of a file object),the entire file is opened, read, and closed before parsing."""try:file_contents = file_or_filename.read()except AttributeError:with open(file_or_filename, "r") as f:file_contents = f.read()try:return self.parseString(file_contents, parseAll)except ParseBaseException as exc:if ParserElement.verbose_stacktrace:raiseelse:# catch and re-raise exception from here, clears out pyparsing internal stack traceraise excdef __eq__(self,other):if isinstance(other, ParserElement):return self is other or vars(self) == vars(other)elif isinstance(other, basestring):return self.matches(other)else:return super(ParserElement,self)==otherdef __ne__(self,other):return not (self == other)def __hash__(self):return hash(id(self))def __req__(self,other):return self == otherdef __rne__(self,other):return not (self == other)def matches(self, testString, parseAll=True):"""Method for quick testing of a parser against a test string. Good for simpleinline microtests of sub expressions while building up larger parser.Parameters:- testString - to test against this expression for a match- parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running testsExample::expr = Word(nums)assert expr.matches("100")"""try:self.parseString(_ustr(testString), parseAll=parseAll)return Trueexcept ParseBaseException:return Falsedef runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False):"""Execute the parse expression on a series of test strings, showing eachtest, the parsed results or where the parse failed. Quick and easy way torun a parse expression against a list of sample strings.Parameters:- tests - a list of separate test strings, or a multiline string of test strings- parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests- comment - (default=C{'#'}) - expression for indicating embedded comments in the teststring; pass None to disable comment filtering- fullDump - (default=C{True}) - dump results as list followed by results names in nested outline;if False, only dump nested list- printResults - (default=C{True}) prints test output to stdout- failureTests - (default=C{False}) indicates if these tests are expected to fail parsingReturns: a (success, results) tuple, where success indicates that all tests succeeded(or failed if C{failureTests} is True), and the results contain a list of lines of eachtest's outputExample::number_expr = pyparsing_common.number.copy()result = number_expr.runTests('''# unsigned integer100# negative integer-100# float with scientific notation6.02e23# integer with scientific notation1e-12''')print("Success" if result[0] else "Failed!")result = number_expr.runTests('''# stray character100Z# missing leading digit before '.'-.100# too many '.'3.14.159''', failureTests=True)print("Success" if result[0] else "Failed!")prints::# unsigned integer100[100]# negative integer-100[-100]# float with scientific notation6.02e23[6.02e+23]# integer with scientific notation1e-12[1e-12]Success# stray character100Z^FAIL: Expected end of text (at char 3), (line:1, col:4)# missing leading digit before '.'-.100^FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)# too many '.'3.14.159^FAIL: Expected end of text (at char 4), (line:1, col:5)SuccessEach test string must be on a single line. If you want to test a string that spans multiplelines, create a test like this::expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")(Note that this is a raw string literal, you must include the leading 'r'.)"""if isinstance(tests, basestring):tests = list(map(str.strip, tests.rstrip().splitlines()))if isinstance(comment, basestring):comment = Literal(comment)allResults = []comments = []success = Truefor t in tests:if comment is not None and comment.matches(t, False) or comments and not t:comments.append(t)continueif not t:continueout = ['\n'.join(comments), t]comments = []try:t = t.replace(r'\n','\n')result = self.parseString(t, parseAll=parseAll)out.append(result.dump(full=fullDump))success = success and not failureTestsexcept ParseBaseException as pe:fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""if '\n' in t:out.append(line(pe.loc, t))out.append(' '*(col(pe.loc,t)-1) + '^' + fatal)else:out.append(' '*pe.loc + '^' + fatal)out.append("FAIL: " + str(pe))success = success and failureTestsresult = peexcept Exception as exc:out.append("FAIL-EXCEPTION: " + str(exc))success = success and failureTestsresult = excif printResults:if fullDump:out.append('')print('\n'.join(out))allResults.append((t, result))return success, allResultsclass Token(ParserElement):"""Abstract C{ParserElement} subclass, for defining atomic matching patterns."""def __init__( self ):super(Token,self).__init__( savelist=False )class Empty(Token):"""An empty token, will always match."""def __init__( self ):super(Empty,self).__init__()self.name = "Empty"self.mayReturnEmpty = Trueself.mayIndexError = Falseclass NoMatch(Token):"""A token that will never match."""def __init__( self ):super(NoMatch,self).__init__()self.name = "NoMatch"self.mayReturnEmpty = Trueself.mayIndexError = Falseself.errmsg = "Unmatchable token"def parseImpl( self, instring, loc, doActions=True ):raise ParseException(instring, loc, self.errmsg, self)class Literal(Token):"""Token to exactly match a specified string.Example::Literal('blah').parseString('blah') # -> ['blah']Literal('blah').parseString('blahfooblah') # -> ['blah']Literal('blah').parseString('bla') # -> Exception: Expected "blah"For case-insensitive matching, use L{CaselessLiteral}.For keyword matching (force word break before and after the matched string),use L{Keyword} or L{CaselessKeyword}."""def __init__( self, matchString ):super(Literal,self).__init__()self.match = matchStringself.matchLen = len(matchString)try:self.firstMatchChar = matchString[0]except IndexError:warnings.warn("null string passed to Literal; use Empty() instead",SyntaxWarning, stacklevel=2)self.__class__ = Emptyself.name = '"%s"' % _ustr(self.match)self.errmsg = "Expected " + self.nameself.mayReturnEmpty = Falseself.mayIndexError = False# Performance tuning: this routine gets called a *lot*# if this is a single character match string and the first character matches,# short-circuit as quickly as possible, and avoid calling startswith#~ @profiledef parseImpl( self, instring, loc, doActions=True ):if (instring[loc] == self.firstMatchChar and(self.matchLen==1 or instring.startswith(self.match,loc)) ):return loc+self.matchLen, self.matchraise ParseException(instring, loc, self.errmsg, self)_L = LiteralParserElement._literalStringClass = Literalclass Keyword(Token):"""Token to exactly match a specified string as a keyword, that is, it must beimmediately followed by a non-keyword character. Compare with C{L{Literal}}:- C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}.- C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}Accepts two optional constructor arguments in addition to the keyword string:- C{identChars} is a string of characters that would be valid identifier characters,defaulting to all alphanumerics + "_" and "$"- C{caseless} allows case-insensitive matching, default is C{False}.Example::Keyword("start").parseString("start") # -> ['start']Keyword("start").parseString("starting") # -> ExceptionFor case-insensitive matching, use L{CaselessKeyword}."""DEFAULT_KEYWORD_CHARS = alphanums+"_$"def __init__( self, matchString, identChars=None, caseless=False ):super(Keyword,self).__init__()if identChars is None:identChars = Keyword.DEFAULT_KEYWORD_CHARSself.match = matchStringself.matchLen = len(matchString)try:self.firstMatchChar = matchString[0]except IndexError:warnings.warn("null string passed to Keyword; use Empty() instead",SyntaxWarning, stacklevel=2)self.name = '"%s"' % self.matchself.errmsg = "Expected " + self.nameself.mayReturnEmpty = Falseself.mayIndexError = Falseself.caseless = caselessif caseless:self.caselessmatch = matchString.upper()identChars = identChars.upper()self.identChars = set(identChars)def parseImpl( self, instring, loc, doActions=True ):if self.caseless:if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and(loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and(loc == 0 or instring[loc-1].upper() not in self.identChars) ):return loc+self.matchLen, self.matchelse:if (instring[loc] == self.firstMatchChar and(self.matchLen==1 or instring.startswith(self.match,loc)) and(loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and(loc == 0 or instring[loc-1] not in self.identChars) ):return loc+self.matchLen, self.matchraise ParseException(instring, loc, self.errmsg, self)def copy(self):c = super(Keyword,self).copy()c.identChars = Keyword.DEFAULT_KEYWORD_CHARSreturn c@staticmethoddef setDefaultKeywordChars( chars ):"""Overrides the default Keyword chars"""Keyword.DEFAULT_KEYWORD_CHARS = charsclass CaselessLiteral(Literal):"""Token to match a specified string, ignoring case of letters.Note: the matched results will always be in the case of the givenmatch string, NOT the case of the input text.Example::OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'](Contrast with example for L{CaselessKeyword}.)"""def __init__( self, matchString ):super(CaselessLiteral,self).__init__( matchString.upper() )# Preserve the defining literal.self.returnString = matchStringself.name = "'%s'" % self.returnStringself.errmsg = "Expected " + self.namedef parseImpl( self, instring, loc, doActions=True ):if instring[ loc:loc+self.matchLen ].upper() == self.match:return loc+self.matchLen, self.returnStringraise ParseException(instring, loc, self.errmsg, self)class CaselessKeyword(Keyword):"""Caseless version of L{Keyword}.Example::OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'](Contrast with example for L{CaselessLiteral}.)"""def __init__( self, matchString, identChars=None ):super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )def parseImpl( self, instring, loc, doActions=True ):if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and(loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ):return loc+self.matchLen, self.matchraise ParseException(instring, loc, self.errmsg, self)class CloseMatch(Token):"""A variation on L{Literal} which matches "close" matches, that is,strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:- C{match_string} - string to be matched- C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a matchThe results from a successful parse will contain the matched text from the input string and the following named results:- C{mismatches} - a list of the positions within the match_string where mismatches were found- C{original} - the original match_string used to compare against the input stringIf C{mismatches} is an empty list, then the match was an exact match.Example::patt = CloseMatch("ATCATCGAATGGA")patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)# exact matchpatt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})# close match allowing up to 2 mismatchespatt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})"""def __init__(self, match_string, maxMismatches=1):super(CloseMatch,self).__init__()self.name = match_stringself.match_string = match_stringself.maxMismatches = maxMismatchesself.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)self.mayIndexError = Falseself.mayReturnEmpty = Falsedef parseImpl( self, instring, loc, doActions=True ):start = locinstrlen = len(instring)maxloc = start + len(self.match_string)if maxloc <= instrlen:match_string = self.match_stringmatch_stringloc = 0mismatches = []maxMismatches = self.maxMismatchesfor match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)):src,mat = s_mif src != mat:mismatches.append(match_stringloc)if len(mismatches) > maxMismatches:breakelse:loc = match_stringloc + 1results = ParseResults([instring[start:loc]])results['original'] = self.match_stringresults['mismatches'] = mismatchesreturn loc, resultsraise ParseException(instring, loc, self.errmsg, self)class Word(Token):"""Token for matching words composed of allowed character sets.Defined with string containing all allowed initial characters,an optional string containing allowed body characters (if omitted,defaults to the initial character set), and an optional minimum,maximum, and/or exact length. The default value for C{min} is 1 (aminimum value < 1 is not valid); the default values for C{max} and C{exact}are 0, meaning no maximum or exact length restriction. An optionalC{excludeChars} parameter can list characters that might be found inthe input C{bodyChars} string; useful to define a word of all printablesexcept for one or two characters, for instance.L{srange} is useful for defining custom character set strings for definingC{Word} expressions, using range notation from regular expression character sets.A common mistake is to use C{Word} to match a specific literal string, as inC{Word("Address")}. Remember that C{Word} uses the string argument to defineI{sets} of matchable characters. This expression would match "Add", "AAA","dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.To match an exact literal string, use L{Literal} or L{Keyword}.pyparsing includes helper strings for building Words:- L{alphas}- L{nums}- L{alphanums}- L{hexnums}- L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)- L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)- L{printables} (any non-whitespace character)Example::# a word composed of digitsinteger = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))# a word with a leading capital, and zero or more lowercasecapital_word = Word(alphas.upper(), alphas.lower())# hostnames are alphanumeric, with leading alpha, and '-'hostname = Word(alphas, alphanums+'-')# roman numeral (not a strict parser, accepts invalid mix of characters)roman = Word("IVXLCDM")# any string of non-whitespace characters, except for ','csv_value = Word(printables, excludeChars=",")"""def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):super(Word,self).__init__()if excludeChars:initChars = ''.join(c for c in initChars if c not in excludeChars)if bodyChars:bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)self.initCharsOrig = initCharsself.initChars = set(initChars)if bodyChars :self.bodyCharsOrig = bodyCharsself.bodyChars = set(bodyChars)else:self.bodyCharsOrig = initCharsself.bodyChars = set(initChars)self.maxSpecified = max > 0if min < 1:raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")self.minLen = minif max > 0:self.maxLen = maxelse:self.maxLen = _MAX_INTif exact > 0:self.maxLen = exactself.minLen = exactself.name = _ustr(self)self.errmsg = "Expected " + self.nameself.mayIndexError = Falseself.asKeyword = asKeywordif ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):if self.bodyCharsOrig == self.initCharsOrig:self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)elif len(self.initCharsOrig) == 1:self.reString = "%s[%s]*" % \(re.escape(self.initCharsOrig),_escapeRegexRangeChars(self.bodyCharsOrig),)else:self.reString = "[%s][%s]*" % \(_escapeRegexRangeChars(self.initCharsOrig),_escapeRegexRangeChars(self.bodyCharsOrig),)if self.asKeyword:self.reString = r"\b"+self.reString+r"\b"try:self.re = re.compile( self.reString )except Exception:self.re = Nonedef parseImpl( self, instring, loc, doActions=True ):if self.re:result = self.re.match(instring,loc)if not result:raise ParseException(instring, loc, self.errmsg, self)loc = result.end()return loc, result.group()if not(instring[ loc ] in self.initChars):raise ParseException(instring, loc, self.errmsg, self)start = locloc += 1instrlen = len(instring)bodychars = self.bodyCharsmaxloc = start + self.maxLenmaxloc = min( maxloc, instrlen )while loc < maxloc and instring[loc] in bodychars:loc += 1throwException = Falseif loc - start < self.minLen:throwException = Trueif self.maxSpecified and loc < instrlen and instring[loc] in bodychars:throwException = Trueif self.asKeyword:if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars):throwException = Trueif throwException:raise ParseException(instring, loc, self.errmsg, self)return loc, instring[start:loc]def __str__( self ):try:return super(Word,self).__str__()except Exception:passif self.strRepr is None:def charsAsStr(s):if len(s)>4:return s[:4]+"..."else:return sif ( self.initCharsOrig != self.bodyCharsOrig ):self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )else:self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)return self.strReprclass Regex(Token):"""Token for matching strings that match a given regular expression.Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved asnamed parse results.Example::realnum = Regex(r"[+-]?\d+\.\d*")date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)')# ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expressionroman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")"""compiledREtype = type(re.compile("[A-Z]"))def __init__( self, pattern, flags=0):"""The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags."""super(Regex,self).__init__()if isinstance(pattern, basestring):if not pattern:warnings.warn("null string passed to Regex; use Empty() instead",SyntaxWarning, stacklevel=2)self.pattern = patternself.flags = flagstry:self.re = re.compile(self.pattern, self.flags)self.reString = self.patternexcept sre_constants.error:warnings.warn("invalid pattern (%s) passed to Regex" % pattern,SyntaxWarning, stacklevel=2)raiseelif isinstance(pattern, Regex.compiledREtype):self.re = patternself.pattern = \self.reString = str(pattern)self.flags = flagselse:raise ValueError("Regex may only be constructed with a string or a compiled RE object")self.name = _ustr(self)self.errmsg = "Expected " + self.nameself.mayIndexError = Falseself.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):result = self.re.match(instring,loc)if not result:raise ParseException(instring, loc, self.errmsg, self)loc = result.end()d = result.groupdict()ret = ParseResults(result.group())if d:for k in d:ret[k] = d[k]return loc,retdef __str__( self ):try:return super(Regex,self).__str__()except Exception:passif self.strRepr is None:self.strRepr = "Re:(%s)" % repr(self.pattern)return self.strReprclass QuotedString(Token):r"""Token for matching strings that are delimited by quoting characters.Defined with the following parameters:- quoteChar - string of one or more characters defining the quote delimiting string- escChar - character to escape quotes, typically backslash (default=C{None})- escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None})- multiline - boolean indicating whether quotes can span multiple lines (default=C{False})- unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})- endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)- convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True})Example::qs = QuotedString('"')print(qs.searchString('lsjdf "This is the quote" sldjf'))complex_qs = QuotedString('{{', endQuoteChar='}}')print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))sql_qs = QuotedString('"', escQuote='""')print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))prints::[['This is the quote']][['This is the "quote"']][['This is the quote with "embedded" quotes']]"""def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True):super(QuotedString,self).__init__()# remove white space from quote chars - wont work anywayquoteChar = quoteChar.strip()if not quoteChar:warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)raise SyntaxError()if endQuoteChar is None:endQuoteChar = quoteCharelse:endQuoteChar = endQuoteChar.strip()if not endQuoteChar:warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)raise SyntaxError()self.quoteChar = quoteCharself.quoteCharLen = len(quoteChar)self.firstQuoteChar = quoteChar[0]self.endQuoteChar = endQuoteCharself.endQuoteCharLen = len(endQuoteChar)self.escChar = escCharself.escQuote = escQuoteself.unquoteResults = unquoteResultsself.convertWhitespaceEscapes = convertWhitespaceEscapesif multiline:self.flags = re.MULTILINE | re.DOTALLself.pattern = r'%s(?:[^%s%s]' % \( re.escape(self.quoteChar),_escapeRegexRangeChars(self.endQuoteChar[0]),(escChar is not None and _escapeRegexRangeChars(escChar) or '') )else:self.flags = 0self.pattern = r'%s(?:[^%s\n\r%s]' % \( re.escape(self.quoteChar),_escapeRegexRangeChars(self.endQuoteChar[0]),(escChar is not None and _escapeRegexRangeChars(escChar) or '') )if len(self.endQuoteChar) > 1:self.pattern += ('|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),_escapeRegexRangeChars(self.endQuoteChar[i]))for i in range(len(self.endQuoteChar)-1,0,-1)) + ')')if escQuote:self.pattern += (r'|(?:%s)' % re.escape(escQuote))if escChar:self.pattern += (r'|(?:%s.)' % re.escape(escChar))self.escCharReplacePattern = re.escape(self.escChar)+"(.)"self.pattern += (r')*%s' % re.escape(self.endQuoteChar))try:self.re = re.compile(self.pattern, self.flags)self.reString = self.patternexcept sre_constants.error:warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,SyntaxWarning, stacklevel=2)raiseself.name = _ustr(self)self.errmsg = "Expected " + self.nameself.mayIndexError = Falseself.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or Noneif not result:raise ParseException(instring, loc, self.errmsg, self)loc = result.end()ret = result.group()if self.unquoteResults:# strip off quotesret = ret[self.quoteCharLen:-self.endQuoteCharLen]if isinstance(ret,basestring):# replace escaped whitespaceif '\\' in ret and self.convertWhitespaceEscapes:ws_map = {r'\t' : '\t',r'\n' : '\n',r'\f' : '\f',r'\r' : '\r',}for wslit,wschar in ws_map.items():ret = ret.replace(wslit, wschar)# replace escaped charactersif self.escChar:ret = re.sub(self.escCharReplacePattern,"\g<1>",ret)# replace escaped quotesif self.escQuote:ret = ret.replace(self.escQuote, self.endQuoteChar)return loc, retdef __str__( self ):try:return super(QuotedString,self).__str__()except Exception:passif self.strRepr is None:self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)return self.strReprclass CharsNotIn(Token):"""Token for matching words composed of characters I{not} in a given set (willinclude whitespace in matched characters if not listed in the provided exclusion set - see example).Defined with string containing all disallowed characters, and an optionalminimum, maximum, and/or exact length. The default value for C{min} is 1 (aminimum value < 1 is not valid); the default values for C{max} and C{exact}are 0, meaning no maximum or exact length restriction.Example::# define a comma-separated-value as anything that is not a ','csv_value = CharsNotIn(',')print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))prints::['dkls', 'lsdkjf', 's12 34', '@!#', '213']"""def __init__( self, notChars, min=1, max=0, exact=0 ):super(CharsNotIn,self).__init__()self.skipWhitespace = Falseself.notChars = notCharsif min < 1:raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted")self.minLen = minif max > 0:self.maxLen = maxelse:self.maxLen = _MAX_INTif exact > 0:self.maxLen = exactself.minLen = exactself.name = _ustr(self)self.errmsg = "Expected " + self.nameself.mayReturnEmpty = ( self.minLen == 0 )self.mayIndexError = Falsedef parseImpl( self, instring, loc, doActions=True ):if instring[loc] in self.notChars:raise ParseException(instring, loc, self.errmsg, self)start = locloc += 1notchars = self.notCharsmaxlen = min( start+self.maxLen, len(instring) )while loc < maxlen and \(instring[loc] not in notchars):loc += 1if loc - start < self.minLen:raise ParseException(instring, loc, self.errmsg, self)return loc, instring[start:loc]def __str__( self ):try:return super(CharsNotIn, self).__str__()except Exception:passif self.strRepr is None:if len(self.notChars) > 4:self.strRepr = "!W:(%s...)" % self.notChars[:4]else:self.strRepr = "!W:(%s)" % self.notCharsreturn self.strReprclass White(Token):"""Special matching class for matching whitespace. Normally, whitespace is ignoredby pyparsing grammars. This class is included when some whitespace structuresare significant. Define with a string containing the whitespace characters to bematched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments,as defined for the C{L{Word}} class."""whiteStrs = {" " : "<SPC>","\t": "<TAB>","\n": "<LF>","\r": "<CR>","\f": "<FF>",}def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):super(White,self).__init__()self.matchWhite = wsself.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) )#~ self.leaveWhitespace()self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))self.mayReturnEmpty = Trueself.errmsg = "Expected " + self.nameself.minLen = minif max > 0:self.maxLen = maxelse:self.maxLen = _MAX_INTif exact > 0:self.maxLen = exactself.minLen = exactdef parseImpl( self, instring, loc, doActions=True ):if not(instring[ loc ] in self.matchWhite):raise ParseException(instring, loc, self.errmsg, self)start = locloc += 1maxloc = start + self.maxLenmaxloc = min( maxloc, len(instring) )while loc < maxloc and instring[loc] in self.matchWhite:loc += 1if loc - start < self.minLen:raise ParseException(instring, loc, self.errmsg, self)return loc, instring[start:loc]class _PositionToken(Token):def __init__( self ):super(_PositionToken,self).__init__()self.name=self.__class__.__name__self.mayReturnEmpty = Trueself.mayIndexError = Falseclass GoToColumn(_PositionToken):"""Token to advance to a specific column of input text; useful for tabular report scraping."""def __init__( self, colno ):super(GoToColumn,self).__init__()self.col = colnodef preParse( self, instring, loc ):if col(loc,instring) != self.col:instrlen = len(instring)if self.ignoreExprs:loc = self._skipIgnorables( instring, loc )while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :loc += 1return locdef parseImpl( self, instring, loc, doActions=True ):thiscol = col( loc, instring )if thiscol > self.col:raise ParseException( instring, loc, "Text not in expected column", self )newloc = loc + self.col - thiscolret = instring[ loc: newloc ]return newloc, retclass LineStart(_PositionToken):"""Matches if current position is at the beginning of a line within the parse stringExample::test = '''\AAA this lineAAA and this lineAAA but not this oneB AAA and definitely not this one'''for t in (LineStart() + 'AAA' + restOfLine).searchString(test):print(t)Prints::['AAA', ' this line']['AAA', ' and this line']"""def __init__( self ):super(LineStart,self).__init__()self.errmsg = "Expected start of line"def parseImpl( self, instring, loc, doActions=True ):if col(loc, instring) == 1:return loc, []raise ParseException(instring, loc, self.errmsg, self)class LineEnd(_PositionToken):"""Matches if current position is at the end of a line within the parse string"""def __init__( self ):super(LineEnd,self).__init__()self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )self.errmsg = "Expected end of line"def parseImpl( self, instring, loc, doActions=True ):if loc<len(instring):if instring[loc] == "\n":return loc+1, "\n"else:raise ParseException(instring, loc, self.errmsg, self)elif loc == len(instring):return loc+1, []else:raise ParseException(instring, loc, self.errmsg, self)class StringStart(_PositionToken):"""Matches if current position is at the beginning of the parse string"""def __init__( self ):super(StringStart,self).__init__()self.errmsg = "Expected start of text"def parseImpl( self, instring, loc, doActions=True ):if loc != 0:# see if entire string up to here is just whitespace and ignoreablesif loc != self.preParse( instring, 0 ):raise ParseException(instring, loc, self.errmsg, self)return loc, []class StringEnd(_PositionToken):"""Matches if current position is at the end of the parse string"""def __init__( self ):super(StringEnd,self).__init__()self.errmsg = "Expected end of text"def parseImpl( self, instring, loc, doActions=True ):if loc < len(instring):raise ParseException(instring, loc, self.errmsg, self)elif loc == len(instring):return loc+1, []elif loc > len(instring):return loc, []else:raise ParseException(instring, loc, self.errmsg, self)class WordStart(_PositionToken):"""Matches if the current position is at the beginning of a Word, andis not preceded by any character in a given set of C{wordChars}(default=C{printables}). To emulate the C{\b} behavior of regular expressions,use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning ofthe string being parsed, or at the beginning of a line."""def __init__(self, wordChars = printables):super(WordStart,self).__init__()self.wordChars = set(wordChars)self.errmsg = "Not at the start of a word"def parseImpl(self, instring, loc, doActions=True ):if loc != 0:if (instring[loc-1] in self.wordChars orinstring[loc] not in self.wordChars):raise ParseException(instring, loc, self.errmsg, self)return loc, []class WordEnd(_PositionToken):"""Matches if the current position is at the end of a Word, andis not followed by any character in a given set of C{wordChars}(default=C{printables}). To emulate the C{\b} behavior of regular expressions,use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end ofthe string being parsed, or at the end of a line."""def __init__(self, wordChars = printables):super(WordEnd,self).__init__()self.wordChars = set(wordChars)self.skipWhitespace = Falseself.errmsg = "Not at the end of a word"def parseImpl(self, instring, loc, doActions=True ):instrlen = len(instring)if instrlen>0 and loc<instrlen:if (instring[loc] in self.wordChars orinstring[loc-1] not in self.wordChars):raise ParseException(instring, loc, self.errmsg, self)return loc, []class ParseExpression(ParserElement):"""Abstract subclass of ParserElement, for combining and post-processing parsed tokens."""def __init__( self, exprs, savelist = False ):super(ParseExpression,self).__init__(savelist)if isinstance( exprs, _generatorType ):exprs = list(exprs)if isinstance( exprs, basestring ):self.exprs = [ ParserElement._literalStringClass( exprs ) ]elif isinstance( exprs, collections.Iterable ):exprs = list(exprs)# if sequence of strings provided, wrap with Literalif all(isinstance(expr, basestring) for expr in exprs):exprs = map(ParserElement._literalStringClass, exprs)self.exprs = list(exprs)else:try:self.exprs = list( exprs )except TypeError:self.exprs = [ exprs ]self.callPreparse = Falsedef __getitem__( self, i ):return self.exprs[i]def append( self, other ):self.exprs.append( other )self.strRepr = Nonereturn selfdef leaveWhitespace( self ):"""Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} onall contained expressions."""self.skipWhitespace = Falseself.exprs = [ e.copy() for e in self.exprs ]for e in self.exprs:e.leaveWhitespace()return selfdef ignore( self, other ):if isinstance( other, Suppress ):if other not in self.ignoreExprs:super( ParseExpression, self).ignore( other )for e in self.exprs:e.ignore( self.ignoreExprs[-1] )else:super( ParseExpression, self).ignore( other )for e in self.exprs:e.ignore( self.ignoreExprs[-1] )return selfdef __str__( self ):try:return super(ParseExpression,self).__str__()except Exception:passif self.strRepr is None:self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) )return self.strReprdef streamline( self ):super(ParseExpression,self).streamline()for e in self.exprs:e.streamline()# collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d )# but only if there are no parse actions or resultsNames on the nested And's# (likewise for Or's and MatchFirst's)if ( len(self.exprs) == 2 ):other = self.exprs[0]if ( isinstance( other, self.__class__ ) andnot(other.parseAction) andother.resultsName is None andnot other.debug ):self.exprs = other.exprs[:] + [ self.exprs[1] ]self.strRepr = Noneself.mayReturnEmpty |= other.mayReturnEmptyself.mayIndexError |= other.mayIndexErrorother = self.exprs[-1]if ( isinstance( other, self.__class__ ) andnot(other.parseAction) andother.resultsName is None andnot other.debug ):self.exprs = self.exprs[:-1] + other.exprs[:]self.strRepr = Noneself.mayReturnEmpty |= other.mayReturnEmptyself.mayIndexError |= other.mayIndexErrorself.errmsg = "Expected " + _ustr(self)return selfdef setResultsName( self, name, listAllMatches=False ):ret = super(ParseExpression,self).setResultsName(name,listAllMatches)return retdef validate( self, validateTrace=[] ):tmp = validateTrace[:]+[self]for e in self.exprs:e.validate(tmp)self.checkRecursion( [] )def copy(self):ret = super(ParseExpression,self).copy()ret.exprs = [e.copy() for e in self.exprs]return retclass And(ParseExpression):"""Requires all given C{ParseExpression}s to be found in the given order.Expressions may be separated by whitespace.May be constructed using the C{'+'} operator.May also be constructed using the C{'-'} operator, which will suppress backtracking.Example::integer = Word(nums)name_expr = OneOrMore(Word(alphas))expr = And([integer("id"),name_expr("name"),integer("age")])# more easily written as:expr = integer("id") + name_expr("name") + integer("age")"""class _ErrorStop(Empty):def __init__(self, *args, **kwargs):super(And._ErrorStop,self).__init__(*args, **kwargs)self.name = '-'self.leaveWhitespace()def __init__( self, exprs, savelist = True ):super(And,self).__init__(exprs, savelist)self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)self.setWhitespaceChars( self.exprs[0].whiteChars )self.skipWhitespace = self.exprs[0].skipWhitespaceself.callPreparse = Truedef parseImpl( self, instring, loc, doActions=True ):# pass False as last arg to _parse for first element, since we already# pre-parsed the string as part of our And pre-parsingloc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False )errorStop = Falsefor e in self.exprs[1:]:if isinstance(e, And._ErrorStop):errorStop = Truecontinueif errorStop:try:loc, exprtokens = e._parse( instring, loc, doActions )except ParseSyntaxException:raiseexcept ParseBaseException as pe:pe.__traceback__ = Noneraise ParseSyntaxException._from_exception(pe)except IndexError:raise ParseSyntaxException(instring, len(instring), self.errmsg, self)else:loc, exprtokens = e._parse( instring, loc, doActions )if exprtokens or exprtokens.haskeys():resultlist += exprtokensreturn loc, resultlistdef __iadd__(self, other ):if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )return self.append( other ) #And( [ self, other ] )def checkRecursion( self, parseElementList ):subRecCheckList = parseElementList[:] + [ self ]for e in self.exprs:e.checkRecursion( subRecCheckList )if not e.mayReturnEmpty:breakdef __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}"return self.strReprclass Or(ParseExpression):"""Requires that at least one C{ParseExpression} is found.If two expressions match, the expression that matches the longest string will be used.May be constructed using the C{'^'} operator.Example::# construct Or using '^' operatornumber = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums))print(number.searchString("123 3.1416 789"))prints::[['123'], ['3.1416'], ['789']]"""def __init__( self, exprs, savelist = False ):super(Or,self).__init__(exprs, savelist)if self.exprs:self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)else:self.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):maxExcLoc = -1maxException = Nonematches = []for e in self.exprs:try:loc2 = e.tryParse( instring, loc )except ParseException as err:err.__traceback__ = Noneif err.loc > maxExcLoc:maxException = errmaxExcLoc = err.locexcept IndexError:if len(instring) > maxExcLoc:maxException = ParseException(instring,len(instring),e.errmsg,self)maxExcLoc = len(instring)else:# save match among all matches, to retry longest to shortestmatches.append((loc2, e))if matches:matches.sort(key=lambda x: -x[0])for _,e in matches:try:return e._parse( instring, loc, doActions )except ParseException as err:err.__traceback__ = Noneif err.loc > maxExcLoc:maxException = errmaxExcLoc = err.locif maxException is not None:maxException.msg = self.errmsgraise maxExceptionelse:raise ParseException(instring, loc, "no defined alternatives to match", self)def __ixor__(self, other ):if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )return self.append( other ) #Or( [ self, other ] )def __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"return self.strReprdef checkRecursion( self, parseElementList ):subRecCheckList = parseElementList[:] + [ self ]for e in self.exprs:e.checkRecursion( subRecCheckList )class MatchFirst(ParseExpression):"""Requires that at least one C{ParseExpression} is found.If two expressions match, the first one listed is the one that will match.May be constructed using the C{'|'} operator.Example::# construct MatchFirst using '|' operator# watch the order of expressions to matchnumber = Word(nums) | Combine(Word(nums) + '.' + Word(nums))print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']]# put more selective expression firstnumber = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']]"""def __init__( self, exprs, savelist = False ):super(MatchFirst,self).__init__(exprs, savelist)if self.exprs:self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)else:self.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):maxExcLoc = -1maxException = Nonefor e in self.exprs:try:ret = e._parse( instring, loc, doActions )return retexcept ParseException as err:if err.loc > maxExcLoc:maxException = errmaxExcLoc = err.locexcept IndexError:if len(instring) > maxExcLoc:maxException = ParseException(instring,len(instring),e.errmsg,self)maxExcLoc = len(instring)# only got here if no expression matched, raise exception for match that made it the furthestelse:if maxException is not None:maxException.msg = self.errmsgraise maxExceptionelse:raise ParseException(instring, loc, "no defined alternatives to match", self)def __ior__(self, other ):if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )return self.append( other ) #MatchFirst( [ self, other ] )def __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"return self.strReprdef checkRecursion( self, parseElementList ):subRecCheckList = parseElementList[:] + [ self ]for e in self.exprs:e.checkRecursion( subRecCheckList )class Each(ParseExpression):"""Requires all given C{ParseExpression}s to be found, but in any order.Expressions may be separated by whitespace.May be constructed using the C{'&'} operator.Example::color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")integer = Word(nums)shape_attr = "shape:" + shape_type("shape")posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")color_attr = "color:" + color("color")size_attr = "size:" + integer("size")# use Each (using operator '&') to accept attributes in any order# (shape and posn are required, color and size are optional)shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)shape_spec.runTests('''shape: SQUARE color: BLACK posn: 100, 120shape: CIRCLE size: 50 color: BLUE posn: 50,80color:GREEN size:20 shape:TRIANGLE posn:20,40''')prints::shape: SQUARE color: BLACK posn: 100, 120['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]- color: BLACK- posn: ['100', ',', '120']- x: 100- y: 120- shape: SQUAREshape: CIRCLE size: 50 color: BLUE posn: 50,80['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]- color: BLUE- posn: ['50', ',', '80']- x: 50- y: 80- shape: CIRCLE- size: 50color: GREEN size: 20 shape: TRIANGLE posn: 20,40['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]- color: GREEN- posn: ['20', ',', '40']- x: 20- y: 40- shape: TRIANGLE- size: 20"""def __init__( self, exprs, savelist = True ):super(Each,self).__init__(exprs, savelist)self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)self.skipWhitespace = Trueself.initExprGroups = Truedef parseImpl( self, instring, loc, doActions=True ):if self.initExprGroups:self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional))opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ]opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)]self.optionals = opt1 + opt2self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]self.required += self.multirequiredself.initExprGroups = FalsetmpLoc = loctmpReqd = self.required[:]tmpOpt = self.optionals[:]matchOrder = []keepMatching = Truewhile keepMatching:tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequiredfailed = []for e in tmpExprs:try:tmpLoc = e.tryParse( instring, tmpLoc )except ParseException:failed.append(e)else:matchOrder.append(self.opt1map.get(id(e),e))if e in tmpReqd:tmpReqd.remove(e)elif e in tmpOpt:tmpOpt.remove(e)if len(failed) == len(tmpExprs):keepMatching = Falseif tmpReqd:missing = ", ".join(_ustr(e) for e in tmpReqd)raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )# add any unmatched Optionals, in case they have default values definedmatchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt]resultlist = []for e in matchOrder:loc,results = e._parse(instring,loc,doActions)resultlist.append(results)finalResults = sum(resultlist, ParseResults([]))return loc, finalResultsdef __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"return self.strReprdef checkRecursion( self, parseElementList ):subRecCheckList = parseElementList[:] + [ self ]for e in self.exprs:e.checkRecursion( subRecCheckList )class ParseElementEnhance(ParserElement):"""Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens."""def __init__( self, expr, savelist=False ):super(ParseElementEnhance,self).__init__(savelist)if isinstance( expr, basestring ):if issubclass(ParserElement._literalStringClass, Token):expr = ParserElement._literalStringClass(expr)else:expr = ParserElement._literalStringClass(Literal(expr))self.expr = exprself.strRepr = Noneif expr is not None:self.mayIndexError = expr.mayIndexErrorself.mayReturnEmpty = expr.mayReturnEmptyself.setWhitespaceChars( expr.whiteChars )self.skipWhitespace = expr.skipWhitespaceself.saveAsList = expr.saveAsListself.callPreparse = expr.callPreparseself.ignoreExprs.extend(expr.ignoreExprs)def parseImpl( self, instring, loc, doActions=True ):if self.expr is not None:return self.expr._parse( instring, loc, doActions, callPreParse=False )else:raise ParseException("",loc,self.errmsg,self)def leaveWhitespace( self ):self.skipWhitespace = Falseself.expr = self.expr.copy()if self.expr is not None:self.expr.leaveWhitespace()return selfdef ignore( self, other ):if isinstance( other, Suppress ):if other not in self.ignoreExprs:super( ParseElementEnhance, self).ignore( other )if self.expr is not None:self.expr.ignore( self.ignoreExprs[-1] )else:super( ParseElementEnhance, self).ignore( other )if self.expr is not None:self.expr.ignore( self.ignoreExprs[-1] )return selfdef streamline( self ):super(ParseElementEnhance,self).streamline()if self.expr is not None:self.expr.streamline()return selfdef checkRecursion( self, parseElementList ):if self in parseElementList:raise RecursiveGrammarException( parseElementList+[self] )subRecCheckList = parseElementList[:] + [ self ]if self.expr is not None:self.expr.checkRecursion( subRecCheckList )def validate( self, validateTrace=[] ):tmp = validateTrace[:]+[self]if self.expr is not None:self.expr.validate(tmp)self.checkRecursion( [] )def __str__( self ):try:return super(ParseElementEnhance,self).__str__()except Exception:passif self.strRepr is None and self.expr is not None:self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )return self.strReprclass FollowedBy(ParseElementEnhance):"""Lookahead matching of the given parse expression. C{FollowedBy}does I{not} advance the parsing position within the input string, it onlyverifies that the specified parse expression matches at the currentposition. C{FollowedBy} always returns a null token list.Example::# use FollowedBy to match a label only if it is followed by a ':'data_word = Word(alphas)label = data_word + FollowedBy(':')attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()prints::[['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]"""def __init__( self, expr ):super(FollowedBy,self).__init__(expr)self.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):self.expr.tryParse( instring, loc )return loc, []class NotAny(ParseElementEnhance):"""Lookahead to disallow matching with the given parse expression. C{NotAny}does I{not} advance the parsing position within the input string, it onlyverifies that the specified parse expression does I{not} match at the currentposition. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny}always returns a null token list. May be constructed using the '~' operator.Example::"""def __init__( self, expr ):super(NotAny,self).__init__(expr)#~ self.leaveWhitespace()self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprsself.mayReturnEmpty = Trueself.errmsg = "Found unwanted token, "+_ustr(self.expr)def parseImpl( self, instring, loc, doActions=True ):if self.expr.canParseNext(instring, loc):raise ParseException(instring, loc, self.errmsg, self)return loc, []def __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "~{" + _ustr(self.expr) + "}"return self.strReprclass _MultipleMatch(ParseElementEnhance):def __init__( self, expr, stopOn=None):super(_MultipleMatch, self).__init__(expr)self.saveAsList = Trueender = stopOnif isinstance(ender, basestring):ender = ParserElement._literalStringClass(ender)self.not_ender = ~ender if ender is not None else Nonedef parseImpl( self, instring, loc, doActions=True ):self_expr_parse = self.expr._parseself_skip_ignorables = self._skipIgnorablescheck_ender = self.not_ender is not Noneif check_ender:try_not_ender = self.not_ender.tryParse# must be at least one (but first see if we are the stopOn sentinel;# if so, fail)if check_ender:try_not_ender(instring, loc)loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False )try:hasIgnoreExprs = (not not self.ignoreExprs)while 1:if check_ender:try_not_ender(instring, loc)if hasIgnoreExprs:preloc = self_skip_ignorables( instring, loc )else:preloc = locloc, tmptokens = self_expr_parse( instring, preloc, doActions )if tmptokens or tmptokens.haskeys():tokens += tmptokensexcept (ParseException,IndexError):passreturn loc, tokensclass OneOrMore(_MultipleMatch):"""Repetition of one or more of the given expression.Parameters:- expr - expression that must match one or more times- stopOn - (default=C{None}) - expression for a terminating sentinel(only required if the sentinel would ordinarily match the repetitionexpression)Example::data_word = Word(alphas)label = data_word + FollowedBy(':')attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))text = "shape: SQUARE posn: upper left color: BLACK"OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]# use stopOn attribute for OneOrMore to avoid reading label string as part of the dataattr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]# could also be written as(attr_expr * (1,)).parseString(text).pprint()"""def __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "{" + _ustr(self.expr) + "}..."return self.strReprclass ZeroOrMore(_MultipleMatch):"""Optional repetition of zero or more of the given expression.Parameters:- expr - expression that must match zero or more times- stopOn - (default=C{None}) - expression for a terminating sentinel(only required if the sentinel would ordinarily match the repetitionexpression)Example: similar to L{OneOrMore}"""def __init__( self, expr, stopOn=None):super(ZeroOrMore,self).__init__(expr, stopOn=stopOn)self.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):try:return super(ZeroOrMore, self).parseImpl(instring, loc, doActions)except (ParseException,IndexError):return loc, []def __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "[" + _ustr(self.expr) + "]..."return self.strReprclass _NullToken(object):def __bool__(self):return False__nonzero__ = __bool__def __str__(self):return ""_optionalNotMatched = _NullToken()class Optional(ParseElementEnhance):"""Optional matching of the given expression.Parameters:- expr - expression that must match zero or more times- default (optional) - value to be returned if the optional expression is not found.Example::# US postal code can be a 5-digit zip, plus optional 4-digit qualifierzip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))zip.runTests('''# traditional ZIP code12345# ZIP+4 form12101-0001# invalid ZIP98765-''')prints::# traditional ZIP code12345['12345']# ZIP+4 form12101-0001['12101-0001']# invalid ZIP98765-^FAIL: Expected end of text (at char 5), (line:1, col:6)"""def __init__( self, expr, default=_optionalNotMatched ):super(Optional,self).__init__( expr, savelist=False )self.saveAsList = self.expr.saveAsListself.defaultValue = defaultself.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):try:loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )except (ParseException,IndexError):if self.defaultValue is not _optionalNotMatched:if self.expr.resultsName:tokens = ParseResults([ self.defaultValue ])tokens[self.expr.resultsName] = self.defaultValueelse:tokens = [ self.defaultValue ]else:tokens = []return loc, tokensdef __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "[" + _ustr(self.expr) + "]"return self.strReprclass SkipTo(ParseElementEnhance):"""Token for skipping over all undefined text until the matched expression is found.Parameters:- expr - target expression marking the end of the data to be skipped- include - (default=C{False}) if True, the target expression is also parsed(the skipped text and target expression are returned as a 2-element list).- ignore - (default=C{None}) used to define grammars (typically quoted strings andcomments) that might contain false matches to the target expression- failOn - (default=C{None}) define expressions that are not allowed to beincluded in the skipped test; if found before the target expression is found,the SkipTo is not a matchExample::report = '''Outstanding Issues Report - 1 Jan 2000# | Severity | Description | Days Open-----+----------+-------------------------------------------+-----------101 | Critical | Intermittent system crash | 694 | Cosmetic | Spelling error on Login ('log|n') | 1479 | Minor | System slow when running too many reports | 47'''integer = Word(nums)SEP = Suppress('|')# use SkipTo to simply match everything up until the next SEP# - ignore quoted strings, so that a '|' character inside a quoted string does not match# - parse action will call token.strip() for each matched token, i.e., the description bodystring_data = SkipTo(SEP, ignore=quotedString)string_data.setParseAction(tokenMap(str.strip))ticket_expr = (integer("issue_num") + SEP+ string_data("sev") + SEP+ string_data("desc") + SEP+ integer("days_open"))for tkt in ticket_expr.searchString(report):print tkt.dump()prints::['101', 'Critical', 'Intermittent system crash', '6']- days_open: 6- desc: Intermittent system crash- issue_num: 101- sev: Critical['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']- days_open: 14- desc: Spelling error on Login ('log|n')- issue_num: 94- sev: Cosmetic['79', 'Minor', 'System slow when running too many reports', '47']- days_open: 47- desc: System slow when running too many reports- issue_num: 79- sev: Minor"""def __init__( self, other, include=False, ignore=None, failOn=None ):super( SkipTo, self ).__init__( other )self.ignoreExpr = ignoreself.mayReturnEmpty = Trueself.mayIndexError = Falseself.includeMatch = includeself.asList = Falseif isinstance(failOn, basestring):self.failOn = ParserElement._literalStringClass(failOn)else:self.failOn = failOnself.errmsg = "No match found for "+_ustr(self.expr)def parseImpl( self, instring, loc, doActions=True ):startloc = locinstrlen = len(instring)expr = self.exprexpr_parse = self.expr._parseself_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else Noneself_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else Nonetmploc = locwhile tmploc <= instrlen:if self_failOn_canParseNext is not None:# break if failOn expression matchesif self_failOn_canParseNext(instring, tmploc):breakif self_ignoreExpr_tryParse is not None:# advance past ignore expressionswhile 1:try:tmploc = self_ignoreExpr_tryParse(instring, tmploc)except ParseBaseException:breaktry:expr_parse(instring, tmploc, doActions=False, callPreParse=False)except (ParseException, IndexError):# no match, advance loc in stringtmploc += 1else:# matched skipto expr, donebreakelse:# ran off the end of the input string without matching skipto expr, failraise ParseException(instring, loc, self.errmsg, self)# build up return valuesloc = tmplocskiptext = instring[startloc:loc]skipresult = ParseResults(skiptext)if self.includeMatch:loc, mat = expr_parse(instring,loc,doActions,callPreParse=False)skipresult += matreturn loc, skipresultclass Forward(ParseElementEnhance):"""Forward declaration of an expression to be defined later -used for recursive grammars, such as algebraic infix notation.When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.Note: take care when assigning to C{Forward} not to overlook precedence of operators.Specifically, '|' has a lower precedence than '<<', so that::fwdExpr << a | b | cwill actually be evaluated as::(fwdExpr << a) | b | cthereby leaving b and c out as parseable alternatives. It is recommended that youexplicitly group the values inserted into the C{Forward}::fwdExpr << (a | b | c)Converting to use the '<<=' operator instead will avoid this problem.See L{ParseResults.pprint} for an example of a recursive parser created usingC{Forward}."""def __init__( self, other=None ):super(Forward,self).__init__( other, savelist=False )def __lshift__( self, other ):if isinstance( other, basestring ):other = ParserElement._literalStringClass(other)self.expr = otherself.strRepr = Noneself.mayIndexError = self.expr.mayIndexErrorself.mayReturnEmpty = self.expr.mayReturnEmptyself.setWhitespaceChars( self.expr.whiteChars )self.skipWhitespace = self.expr.skipWhitespaceself.saveAsList = self.expr.saveAsListself.ignoreExprs.extend(self.expr.ignoreExprs)return selfdef __ilshift__(self, other):return self << otherdef leaveWhitespace( self ):self.skipWhitespace = Falsereturn selfdef streamline( self ):if not self.streamlined:self.streamlined = Trueif self.expr is not None:self.expr.streamline()return selfdef validate( self, validateTrace=[] ):if self not in validateTrace:tmp = validateTrace[:]+[self]if self.expr is not None:self.expr.validate(tmp)self.checkRecursion([])def __str__( self ):if hasattr(self,"name"):return self.namereturn self.__class__.__name__ + ": ..."# stubbed out for now - creates awful memory and perf issuesself._revertClass = self.__class__self.__class__ = _ForwardNoRecursetry:if self.expr is not None:retString = _ustr(self.expr)else:retString = "None"finally:self.__class__ = self._revertClassreturn self.__class__.__name__ + ": " + retStringdef copy(self):if self.expr is not None:return super(Forward,self).copy()else:ret = Forward()ret <<= selfreturn retclass _ForwardNoRecurse(Forward):def __str__( self ):return "..."class TokenConverter(ParseElementEnhance):"""Abstract subclass of C{ParseExpression}, for converting parsed results."""def __init__( self, expr, savelist=False ):super(TokenConverter,self).__init__( expr )#, savelist )self.saveAsList = Falseclass Combine(TokenConverter):"""Converter to concatenate all matching tokens to a single string.By default, the matching patterns must also be contiguous in the input string;this can be disabled by specifying C{'adjacent=False'} in the constructor.Example::real = Word(nums) + '.' + Word(nums)print(real.parseString('3.1416')) # -> ['3', '.', '1416']# will also erroneously match the followingprint(real.parseString('3. 1416')) # -> ['3', '.', '1416']real = Combine(Word(nums) + '.' + Word(nums))print(real.parseString('3.1416')) # -> ['3.1416']# no match when there are internal spacesprint(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)"""def __init__( self, expr, joinString="", adjacent=True ):super(Combine,self).__init__( expr )# suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itselfif adjacent:self.leaveWhitespace()self.adjacent = adjacentself.skipWhitespace = Trueself.joinString = joinStringself.callPreparse = Truedef ignore( self, other ):if self.adjacent:ParserElement.ignore(self, other)else:super( Combine, self).ignore( other )return selfdef postParse( self, instring, loc, tokenlist ):retToks = tokenlist.copy()del retToks[:]retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)if self.resultsName and retToks.haskeys():return [ retToks ]else:return retToksclass Group(TokenConverter):"""Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.Example::ident = Word(alphas)num = Word(nums)term = ident | numfunc = ident + Optional(delimitedList(term))print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100']func = ident + Group(Optional(delimitedList(term)))print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']]"""def __init__( self, expr ):super(Group,self).__init__( expr )self.saveAsList = Truedef postParse( self, instring, loc, tokenlist ):return [ tokenlist ]class Dict(TokenConverter):"""Converter to return a repetitive expression as a list, but also as a dictionary.Each element can also be referenced using the first token in the expression as its key.Useful for tabular report scraping when the first column can be used as a item key.Example::data_word = Word(alphas)label = data_word + FollowedBy(':')attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))text = "shape: SQUARE posn: upper left color: light blue texture: burlap"attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))# print attributes as plain groupsprint(OneOrMore(attr_expr).parseString(text).dump())# instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign namesresult = Dict(OneOrMore(Group(attr_expr))).parseString(text)print(result.dump())# access named fields as dict entries, or output as dictprint(result['shape'])print(result.asDict())prints::['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'][['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]- color: light blue- posn: upper left- shape: SQUARE- texture: burlapSQUARE{'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}See more examples at L{ParseResults} of accessing fields by results name."""def __init__( self, expr ):super(Dict,self).__init__( expr )self.saveAsList = Truedef postParse( self, instring, loc, tokenlist ):for i,tok in enumerate(tokenlist):if len(tok) == 0:continueikey = tok[0]if isinstance(ikey,int):ikey = _ustr(tok[0]).strip()if len(tok)==1:tokenlist[ikey] = _ParseResultsWithOffset("",i)elif len(tok)==2 and not isinstance(tok[1],ParseResults):tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)else:dictvalue = tok.copy() #ParseResults(i)del dictvalue[0]if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()):tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)else:tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)if self.resultsName:return [ tokenlist ]else:return tokenlistclass Suppress(TokenConverter):"""Converter for ignoring the results of a parsed expression.Example::source = "a, b, c,d"wd = Word(alphas)wd_list1 = wd + ZeroOrMore(',' + wd)print(wd_list1.parseString(source))# often, delimiters that are useful during parsing are just in the# way afterward - use Suppress to keep them out of the parsed outputwd_list2 = wd + ZeroOrMore(Suppress(',') + wd)print(wd_list2.parseString(source))prints::['a', ',', 'b', ',', 'c', ',', 'd']['a', 'b', 'c', 'd'](See also L{delimitedList}.)"""def postParse( self, instring, loc, tokenlist ):return []def suppress( self ):return selfclass OnlyOnce(object):"""Wrapper for parse actions, to ensure they are only called once."""def __init__(self, methodCall):self.callable = _trim_arity(methodCall)self.called = Falsedef __call__(self,s,l,t):if not self.called:results = self.callable(s,l,t)self.called = Truereturn resultsraise ParseException(s,l,"")def reset(self):self.called = Falsedef traceParseAction(f):"""Decorator for debugging parse actions.When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.Example::wd = Word(alphas)@traceParseActiondef remove_duplicate_chars(tokens):return ''.join(sorted(set(''.join(tokens)))wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)print(wds.parseString("slkdjs sld sldd sdlf sdljf"))prints::>>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))<<leaving remove_duplicate_chars (ret: 'dfjkls')['dfjkls']"""f = _trim_arity(f)def z(*paArgs):thisFunc = f.__name__s,l,t = paArgs[-3:]if len(paArgs)>3:thisFunc = paArgs[0].__class__.__name__ + '.' + thisFuncsys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) )try:ret = f(*paArgs)except Exception as exc:sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) )raisesys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) )return rettry:z.__name__ = f.__name__except AttributeError:passreturn z## global helpers#def delimitedList( expr, delim=",", combine=False ):"""Helper to define a delimited list of expressions - the delimiter defaults to ','.By default, the list elements and delimiters can have intervening whitespace, andcomments, but this can be overridden by passing C{combine=True} in the constructor.If C{combine} is set to C{True}, the matching tokens are returned as a single tokenstring, with the delimiters included; otherwise, the matching tokens are returnedas a list of tokens, with the delimiters suppressed.Example::delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc']delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']"""dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."if combine:return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)else:return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)def countedArray( expr, intExpr=None ):"""Helper to define a counted list of expressions.This helper defines a pattern of the form::integer expr expr expr...where the leading integer tells how many expr expressions follow.The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.Example::countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd']# in this parser, the leading integer value is given in binary,# '10' indicating that 2 values are in the arraybinaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd']"""arrayExpr = Forward()def countFieldParseAction(s,l,t):n = t[0]arrayExpr << (n and Group(And([expr]*n)) or Group(empty))return []if intExpr is None:intExpr = Word(nums).setParseAction(lambda t:int(t[0]))else:intExpr = intExpr.copy()intExpr.setName("arrayLen")intExpr.addParseAction(countFieldParseAction, callDuringTry=True)return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...')def _flatten(L):ret = []for i in L:if isinstance(i,list):ret.extend(_flatten(i))else:ret.append(i)return retdef matchPreviousLiteral(expr):"""Helper to define an expression that is indirectly defined fromthe tokens matched in a previous expression, that is, it looksfor a 'repeat' of a previous expression. For example::first = Word(nums)second = matchPreviousLiteral(first)matchExpr = first + ":" + secondwill match C{"1:1"}, but not C{"1:2"}. Because this matches aprevious literal, will also match the leading C{"1:1"} in C{"1:10"}.If this is not desired, use C{matchPreviousExpr}.Do I{not} use with packrat parsing enabled."""rep = Forward()def copyTokenToRepeater(s,l,t):if t:if len(t) == 1:rep << t[0]else:# flatten t tokenstflat = _flatten(t.asList())rep << And(Literal(tt) for tt in tflat)else:rep << Empty()expr.addParseAction(copyTokenToRepeater, callDuringTry=True)rep.setName('(prev) ' + _ustr(expr))return repdef matchPreviousExpr(expr):"""Helper to define an expression that is indirectly defined fromthe tokens matched in a previous expression, that is, it looksfor a 'repeat' of a previous expression. For example::first = Word(nums)second = matchPreviousExpr(first)matchExpr = first + ":" + secondwill match C{"1:1"}, but not C{"1:2"}. Because this matches byexpressions, will I{not} match the leading C{"1:1"} in C{"1:10"};the expressions are evaluated first, and then compared, soC{"1"} is compared with C{"10"}.Do I{not} use with packrat parsing enabled."""rep = Forward()e2 = expr.copy()rep <<= e2def copyTokenToRepeater(s,l,t):matchTokens = _flatten(t.asList())def mustMatchTheseTokens(s,l,t):theseTokens = _flatten(t.asList())if theseTokens != matchTokens:raise ParseException("",0,"")rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )expr.addParseAction(copyTokenToRepeater, callDuringTry=True)rep.setName('(prev) ' + _ustr(expr))return repdef _escapeRegexRangeChars(s):#~ escape these chars: ^-]for c in r"\^-]":s = s.replace(c,_bslash+c)s = s.replace("\n",r"\n")s = s.replace("\t",r"\t")return _ustr(s)def oneOf( strs, caseless=False, useRegex=True ):"""Helper to quickly define a set of alternative Literals, and makes sure to dolongest-first testing when there is a conflict, regardless of the input order,but returns a C{L{MatchFirst}} for best performance.Parameters:- strs - a string of space-delimited literals, or a collection of string literals- caseless - (default=C{False}) - treat all literals as caseless- useRegex - (default=C{True}) - as an optimization, will generate a Regexobject; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, orif creating a C{Regex} raises an exception)Example::comp_oper = oneOf("< = > <= >= !=")var = Word(alphas)number = Word(nums)term = var | numbercomparison_expr = term + comp_oper + termprint(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12"))prints::[['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]"""if caseless:isequal = ( lambda a,b: a.upper() == b.upper() )masks = ( lambda a,b: b.upper().startswith(a.upper()) )parseElementClass = CaselessLiteralelse:isequal = ( lambda a,b: a == b )masks = ( lambda a,b: b.startswith(a) )parseElementClass = Literalsymbols = []if isinstance(strs,basestring):symbols = strs.split()elif isinstance(strs, collections.Iterable):symbols = list(strs)else:warnings.warn("Invalid argument to oneOf, expected string or iterable",SyntaxWarning, stacklevel=2)if not symbols:return NoMatch()i = 0while i < len(symbols)-1:cur = symbols[i]for j,other in enumerate(symbols[i+1:]):if ( isequal(other, cur) ):del symbols[i+j+1]breakelif ( masks(cur, other) ):del symbols[i+j+1]symbols.insert(i,other)cur = otherbreakelse:i += 1if not caseless and useRegex:#~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))try:if len(symbols)==len("".join(symbols)):return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols))else:return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols))except Exception:warnings.warn("Exception creating Regex for oneOf, building MatchFirst",SyntaxWarning, stacklevel=2)# last resort, just use MatchFirstreturn MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols))def dictOf( key, value ):"""Helper to easily and clearly define a dictionary by specifying the respective patternsfor the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokensin the proper order. The key pattern can include delimiting markers or punctuation,as long as they are suppressed, thereby leaving the significant key text. The valuepattern can include named results, so that the C{Dict} results can include named tokenfields.Example::text = "shape: SQUARE posn: upper left color: light blue texture: burlap"attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))print(OneOrMore(attr_expr).parseString(text).dump())attr_label = labelattr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)# similar to Dict, but simpler call formatresult = dictOf(attr_label, attr_value).parseString(text)print(result.dump())print(result['shape'])print(result.shape) # object attribute access works tooprint(result.asDict())prints::[['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]- color: light blue- posn: upper left- shape: SQUARE- texture: burlapSQUARESQUARE{'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}"""return Dict( ZeroOrMore( Group ( key + value ) ) )def originalTextFor(expr, asString=True):"""Helper to return the original, untokenized text for a given expression. Useful torestore the parsed fields of an HTML start tag into the raw tag text itself, or torevert separate tokens with intervening whitespace back to the original matchinginput text. By default, returns astring containing the original parsed text.If the optional C{asString} argument is passed as C{False}, then the return value is aC{L{ParseResults}} containing any results names that were originally matched, and asingle token containing the original matched text from the input string. So ifthe expression passed to C{L{originalTextFor}} contains expressions with definedresults names, you must set C{asString} to C{False} if you want to preserve thoseresults name values.Example::src = "this is test <b> bold <i>text</i> </b> normal text "for tag in ("b","i"):opener,closer = makeHTMLTags(tag)patt = originalTextFor(opener + SkipTo(closer) + closer)print(patt.searchString(src)[0])prints::['<b> bold <i>text</i> </b>']['<i>text</i>']"""locMarker = Empty().setParseAction(lambda s,loc,t: loc)endlocMarker = locMarker.copy()endlocMarker.callPreparse = FalsematchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")if asString:extractText = lambda s,l,t: s[t._original_start:t._original_end]else:def extractText(s,l,t):t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]]matchExpr.setParseAction(extractText)matchExpr.ignoreExprs = expr.ignoreExprsreturn matchExprdef ungroup(expr):"""Helper to undo pyparsing's default grouping of And expressions, evenif all but one are non-empty."""return TokenConverter(expr).setParseAction(lambda t:t[0])def locatedExpr(expr):"""Helper to decorate a returned token with its starting and ending locations in the input string.This helper adds the following results names:- locn_start = location where matched expression begins- locn_end = location where matched expression ends- value = the actual parsed resultsBe careful if the input text contains C{<TAB>} characters, you may want to callC{L{ParserElement.parseWithTabs}}Example::wd = Word(alphas)for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):print(match)prints::[[0, 'ljsdf', 5]][[8, 'lksdjjf', 15]][[18, 'lkkjj', 23]]"""locator = Empty().setParseAction(lambda s,l,t: l)return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))# convenience constants for positional expressionsempty = Empty().setName("empty")lineStart = LineStart().setName("lineStart")lineEnd = LineEnd().setName("lineEnd")stringStart = StringStart().setName("stringStart")stringEnd = StringEnd().setName("stringEnd")_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE)_charRange = Group(_singleChar + Suppress("-") + _singleChar)_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"def srange(s):r"""Helper to easily define string ranges for use in Word construction. Borrowssyntax from regexp '[]' string range definitions::srange("[0-9]") -> "0123456789"srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz"srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"The input string must be enclosed in []'s, and the returned string is the expandedcharacter set joined into a single string.The values enclosed in the []'s may be:- a single character- an escaped character with a leading backslash (such as C{\-} or C{\]})- an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character)(C{\0x##} is also supported for backwards compatibility)- an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character)- a range of any of the above, separated by a dash (C{'a-z'}, etc.)- any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.)"""_expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))try:return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)except Exception:return ""def matchOnlyAtCol(n):"""Helper method for defining parse actions that require matching at a specificcolumn in the input text."""def verifyCol(strg,locn,toks):if col(locn,strg) != n:raise ParseException(strg,locn,"matched token not at column %d" % n)return verifyColdef replaceWith(replStr):"""Helper method for common parse actions that simply return a literal value. Especiallyuseful when used with C{L{transformString<ParserElement.transformString>}()}.Example::num = Word(nums).setParseAction(lambda toks: int(toks[0]))na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))term = na | numOneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]"""return lambda s,l,t: [replStr]def removeQuotes(s,l,t):"""Helper parse action for removing quotation marks from parsed quoted strings.Example::# by default, quotation marks are included in parsed resultsquotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]# use removeQuotes to strip quotation marks from parsed resultsquotedString.setParseAction(removeQuotes)quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]"""return t[0][1:-1]def tokenMap(func, *args):"""Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additionalargs are passed, they are forwarded to the given function as additional arguments afterthe token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert theparsed data to an integer using base 16.Example (compare the last to example in L{ParserElement.transformString}::hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))hex_ints.runTests('''00 11 22 aa FF 0a 0d 1a''')upperword = Word(alphas).setParseAction(tokenMap(str.upper))OneOrMore(upperword).runTests('''my kingdom for a horse''')wd = Word(alphas).setParseAction(tokenMap(str.title))OneOrMore(wd).setParseAction(' '.join).runTests('''now is the winter of our discontent made glorious summer by this sun of york''')prints::00 11 22 aa FF 0a 0d 1a[0, 17, 34, 170, 255, 10, 13, 26]my kingdom for a horse['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']now is the winter of our discontent made glorious summer by this sun of york['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']"""def pa(s,l,t):return [func(tokn, *args) for tokn in t]try:func_name = getattr(func, '__name__',getattr(func, '__class__').__name__)except Exception:func_name = str(func)pa.__name__ = func_namereturn paupcaseTokens = tokenMap(lambda t: _ustr(t).upper())"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}"""downcaseTokens = tokenMap(lambda t: _ustr(t).lower())"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}"""def _makeTags(tagStr, xml):"""Internal helper to construct opening and closing tag expressions, given a tag name"""if isinstance(tagStr,basestring):resname = tagStrtagStr = Keyword(tagStr, caseless=not xml)else:resname = tagStr.nametagAttrName = Word(alphas,alphanums+"_-:")if (xml):tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )openTag = Suppress("<") + tagStr("tag") + \Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")else:printablesLessRAbrack = "".join(c for c in printables if c not in ">")tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack)openTag = Suppress("<") + tagStr("tag") + \Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \Optional( Suppress("=") + tagAttrValue ) ))) + \Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")closeTag = Combine(_L("</") + tagStr + ">")openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname)closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname)openTag.tag = resnamecloseTag.tag = resnamereturn openTag, closeTagdef makeHTMLTags(tagStr):"""Helper to construct opening and closing tag expressions for HTML, given a tag name. Matchestags in either upper or lower case, attributes with namespaces and with quoted or unquoted values.Example::text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'# makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuplea,a_end = makeHTMLTags("A")link_expr = a + SkipTo(a_end)("link_text") + a_endfor link in link_expr.searchString(text):# attributes in the <A> tag (like "href" shown here) are also accessible as named resultsprint(link.link_text, '->', link.href)prints::pyparsing -> http://pyparsing.wikispaces.com"""return _makeTags( tagStr, False )def makeXMLTags(tagStr):"""Helper to construct opening and closing tag expressions for XML, given a tag name. Matchestags only in the given upper/lower case.Example: similar to L{makeHTMLTags}"""return _makeTags( tagStr, True )def withAttribute(*args,**attrDict):"""Helper to create a validating parse action to be used with start tags createdwith C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tagwith a required attribute value, to avoid false matches on common tags such asC{<TD>} or C{<DIV>}.Call C{withAttribute} with a series of attribute names and values. Specify the listof filter attributes names and values as:- keyword arguments, as in C{(align="right")}, or- as an explicit dict with C{**} operator, when an attribute name is also a Pythonreserved word, as in C{**{"class":"Customer", "align":"right"}}- a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") )For attribute names with a namespace prefix, you must use the second form. Attributenames are matched insensitive to upper/lower case.If just testing for C{class} (with or without a namespace), use C{L{withClass}}.To verify that the attribute exists, but without specifying a value, passC{withAttribute.ANY_VALUE} as the value.Example::html = '''<div>Some text<div type="grid">1 4 0 1 0</div><div type="graph">1,3 2,3 1,1</div><div>this has no type</div></div>'''div,div_end = makeHTMLTags("div")# only match div tag having a type attribute with value "grid"div_grid = div().setParseAction(withAttribute(type="grid"))grid_expr = div_grid + SkipTo(div | div_end)("body")for grid_header in grid_expr.searchString(html):print(grid_header.body)# construct a match with any div tag having a type attribute, regardless of the valuediv_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE))div_expr = div_any_type + SkipTo(div | div_end)("body")for div_header in div_expr.searchString(html):print(div_header.body)prints::1 4 0 1 01 4 0 1 01,3 2,3 1,1"""if args:attrs = args[:]else:attrs = attrDict.items()attrs = [(k,v) for k,v in attrs]def pa(s,l,tokens):for attrName,attrValue in attrs:if attrName not in tokens:raise ParseException(s,l,"no matching attribute " + attrName)if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue:raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" %(attrName, tokens[attrName], attrValue))return pawithAttribute.ANY_VALUE = object()def withClass(classname, namespace=''):"""Simplified version of C{L{withAttribute}} when matching on a div class - madedifficult because C{class} is a reserved word in Python.Example::html = '''<div>Some text<div class="grid">1 4 0 1 0</div><div class="graph">1,3 2,3 1,1</div><div>this <div> has no class</div></div>'''div,div_end = makeHTMLTags("div")div_grid = div().setParseAction(withClass("grid"))grid_expr = div_grid + SkipTo(div | div_end)("body")for grid_header in grid_expr.searchString(html):print(grid_header.body)div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE))div_expr = div_any_type + SkipTo(div | div_end)("body")for div_header in div_expr.searchString(html):print(div_header.body)prints::1 4 0 1 01 4 0 1 01,3 2,3 1,1"""classattr = "%s:class" % namespace if namespace else "class"return withAttribute(**{classattr : classname})opAssoc = _Constants()opAssoc.LEFT = object()opAssoc.RIGHT = object()def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ):"""Helper method for constructing grammars of expressions made up ofoperators working in a precedence hierarchy. Operators may be unary orbinary, left- or right-associative. Parse actions can also be attachedto operator expressions. The generated parser will also recognize the useof parentheses to override operator precedences (see example below).Note: if you define a deep operator list, you may see performance issueswhen using infixNotation. See L{ParserElement.enablePackrat} for amechanism to potentially improve your parser performance.Parameters:- baseExpr - expression representing the most basic element for the nested- opList - list of tuples, one for each operator precedence level in theexpression grammar; each tuple is of the form(opExpr, numTerms, rightLeftAssoc, parseAction), where:- opExpr is the pyparsing expression for the operator;may also be a string, which will be converted to a Literal;if numTerms is 3, opExpr is a tuple of two expressions, for thetwo operators separating the 3 terms- numTerms is the number of terms for this operator (mustbe 1, 2, or 3)- rightLeftAssoc is the indicator whether the operator isright or left associative, using the pyparsing-definedconstants C{opAssoc.RIGHT} and C{opAssoc.LEFT}.- parseAction is the parse action to be associated withexpressions matching this operator expression (theparse action tuple member may be omitted)- lpar - expression for matching left-parentheses (default=C{Suppress('(')})- rpar - expression for matching right-parentheses (default=C{Suppress(')')})Example::# simple example of four-function arithmetic with ints and variable namesinteger = pyparsing_common.signed_integervarname = pyparsing_common.identifierarith_expr = infixNotation(integer | varname,[('-', 1, opAssoc.RIGHT),(oneOf('* /'), 2, opAssoc.LEFT),(oneOf('+ -'), 2, opAssoc.LEFT),])arith_expr.runTests('''5+3*6(5+3)*6-2--11''', fullDump=False)prints::5+3*6[[5, '+', [3, '*', 6]]](5+3)*6[[[5, '+', 3], '*', 6]]-2--11[[['-', 2], '-', ['-', 11]]]"""ret = Forward()lastExpr = baseExpr | ( lpar + ret + rpar )for i,operDef in enumerate(opList):opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4]termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExprif arity == 3:if opExpr is None or len(opExpr) != 2:raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions")opExpr1, opExpr2 = opExprthisExpr = Forward().setName(termName)if rightLeftAssoc == opAssoc.LEFT:if arity == 1:matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) )elif arity == 2:if opExpr is not None:matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) )else:matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) )elif arity == 3:matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr )else:raise ValueError("operator must be unary (1), binary (2), or ternary (3)")elif rightLeftAssoc == opAssoc.RIGHT:if arity == 1:# try to avoid LR with this extra testif not isinstance(opExpr, Optional):opExpr = Optional(opExpr)matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr )elif arity == 2:if opExpr is not None:matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) )else:matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) )elif arity == 3:matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr )else:raise ValueError("operator must be unary (1), binary (2), or ternary (3)")else:raise ValueError("operator must indicate right or left associativity")if pa:matchExpr.setParseAction( pa )thisExpr <<= ( matchExpr.setName(termName) | lastExpr )lastExpr = thisExprret <<= lastExprreturn retoperatorPrecedence = infixNotation"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release."""dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes")sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes")quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'|Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes")unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal")def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()):"""Helper method for defining nested lists enclosed in opening and closingdelimiters ("(" and ")" are the default).Parameters:- opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression- closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression- content - expression for items within the nested lists (default=C{None})- ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString})If an expression is not provided for the content argument, the nestedexpression will capture all whitespace-delimited content between delimitersas a list of separate values.Use the C{ignoreExpr} argument to define expressions that may containopening or closing characters that should not be treated as openingor closing characters for nesting, such as quotedString or a commentexpression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}.The default is L{quotedString}, but if no expressions are to be ignored,then pass C{None} for this argument.Example::data_type = oneOf("void int short long char float double")decl_data_type = Combine(data_type + Optional(Word('*')))ident = Word(alphas+'_', alphanums+'_')number = pyparsing_common.numberarg = Group(decl_data_type + ident)LPAR,RPAR = map(Suppress, "()")code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment))c_function = (decl_data_type("type")+ ident("name")+ LPAR + Optional(delimitedList(arg), [])("args") + RPAR+ code_body("body"))c_function.ignore(cStyleComment)source_code = '''int is_odd(int x) {return (x%2);}int dec_to_hex(char hchar) {if (hchar >= '0' && hchar <= '9') {return (ord(hchar)-ord('0'));} else {return (10+ord(hchar)-ord('A'));}}'''for func in c_function.searchString(source_code):print("%(name)s (%(type)s) args: %(args)s" % func)prints::is_odd (int) args: [['int', 'x']]dec_to_hex (int) args: [['char', 'hchar']]"""if opener == closer:raise ValueError("opening and closing strings cannot be the same")if content is None:if isinstance(opener,basestring) and isinstance(closer,basestring):if len(opener) == 1 and len(closer)==1:if ignoreExpr is not None:content = (Combine(OneOrMore(~ignoreExpr +CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1))).setParseAction(lambda t:t[0].strip()))else:content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS).setParseAction(lambda t:t[0].strip()))else:if ignoreExpr is not None:content = (Combine(OneOrMore(~ignoreExpr +~Literal(opener) + ~Literal(closer) +CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))).setParseAction(lambda t:t[0].strip()))else:content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) +CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))).setParseAction(lambda t:t[0].strip()))else:raise ValueError("opening and closing arguments must be strings if no content expression is given")ret = Forward()if ignoreExpr is not None:ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) )else:ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) )ret.setName('nested %s%s expression' % (opener,closer))return retdef indentedBlock(blockStatementExpr, indentStack, indent=True):"""Helper method for defining space-delimited indentation blocks, such asthose used to define block statements in Python source code.Parameters:- blockStatementExpr - expression defining syntax of statement thatis repeated within the indented block- indentStack - list created by caller to manage indentation stack(multiple statementWithIndentedBlock expressions within a single grammarshould share a common indentStack)- indent - boolean indicating whether block must be indented beyond thethe current level; set to False for block of left-most statements(default=C{True})A valid block must contain at least one C{blockStatement}.Example::data = '''def A(z):A1B = 100G = A2A2A3Bdef BB(a,b,c):BB1def BBA():bba1bba2bba3CDdef spam(x,y):def eggs(z):pass'''indentStack = [1]stmt = Forward()identifier = Word(alphas, alphanums)funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":")func_body = indentedBlock(stmt, indentStack)funcDef = Group( funcDecl + func_body )rvalue = Forward()funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")")rvalue << (funcCall | identifier | Word(nums))assignment = Group(identifier + "=" + rvalue)stmt << ( funcDef | assignment | identifier )module_body = OneOrMore(stmt)parseTree = module_body.parseString(data)parseTree.pprint()prints::[['def','A',['(', 'z', ')'],':',[['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]],'B',['def','BB',['(', 'a', 'b', 'c', ')'],':',[['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]],'C','D',['def','spam',['(', 'x', 'y', ')'],':',[[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]]"""def checkPeerIndent(s,l,t):if l >= len(s): returncurCol = col(l,s)if curCol != indentStack[-1]:if curCol > indentStack[-1]:raise ParseFatalException(s,l,"illegal nesting")raise ParseException(s,l,"not a peer entry")def checkSubIndent(s,l,t):curCol = col(l,s)if curCol > indentStack[-1]:indentStack.append( curCol )else:raise ParseException(s,l,"not a subentry")def checkUnindent(s,l,t):if l >= len(s): returncurCol = col(l,s)if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]):raise ParseException(s,l,"not an unindent")indentStack.pop()NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress())INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT')PEER = Empty().setParseAction(checkPeerIndent).setName('')UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT')if indent:smExpr = Group( Optional(NL) +#~ FollowedBy(blockStatementExpr) +INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT)else:smExpr = Group( Optional(NL) +(OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) )blockStatementExpr.ignore(_bslash + LineEnd())return smExpr.setName('indented block')alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]")punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]")anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag'))_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\''))commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity")def replaceHTMLEntity(t):"""Helper parser action to replace common HTML entities with their special characters"""return _htmlEntityMap.get(t.entity)# it's easy to get these comment structures wrong - they're very common, so may as well make them availablecStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment")"Comment of the form C{/* ... */}"htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment")"Comment of the form C{<!-- ... -->}"restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line")dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment")"Comment of the form C{// ... (to end of line)}"cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment")"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}"javaStyleComment = cppStyleComment"Same as C{L{cppStyleComment}}"pythonStyleComment = Regex(r"#.*").setName("Python style comment")"Comment of the form C{# ... (to end of line)}"_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') +Optional( Word(" \t") +~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem")commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList")"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas.This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}."""# some other useful expressions - using lower-case class name since we are really using this as a namespaceclass pyparsing_common:"""Here are some common low-level expressions that may be useful in jump-starting parser development:- numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>})- common L{programming identifiers<identifier>}- network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>})- ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>}- L{UUID<uuid>}- L{comma-separated list<comma_separated_list>}Parse actions:- C{L{convertToInteger}}- C{L{convertToFloat}}- C{L{convertToDate}}- C{L{convertToDatetime}}- C{L{stripHTMLTags}}- C{L{upcaseTokens}}- C{L{downcaseTokens}}Example::pyparsing_common.number.runTests('''# any int or real number, returned as the appropriate type100-100+1003.141596.02e231e-12''')pyparsing_common.fnumber.runTests('''# any int or real number, returned as float100-100+1003.141596.02e231e-12''')pyparsing_common.hex_integer.runTests('''# hex numbers100FF''')pyparsing_common.fraction.runTests('''# fractions1/2-3/4''')pyparsing_common.mixed_integer.runTests('''# mixed fractions11/2-3/41-3/4''')import uuidpyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))pyparsing_common.uuid.runTests('''# uuid12345678-1234-5678-1234-567812345678''')prints::# any int or real number, returned as the appropriate type100[100]-100[-100]+100[100]3.14159[3.14159]6.02e23[6.02e+23]1e-12[1e-12]# any int or real number, returned as float100[100.0]-100[-100.0]+100[100.0]3.14159[3.14159]6.02e23[6.02e+23]1e-12[1e-12]# hex numbers100[256]FF[255]# fractions1/2[0.5]-3/4[-0.75]# mixed fractions1[1]1/2[0.5]-3/4[-0.75]1-3/4[1.75]# uuid12345678-1234-5678-1234-567812345678[UUID('12345678-1234-5678-1234-567812345678')]"""convertToInteger = tokenMap(int)"""Parse action for converting parsed integers to Python int"""convertToFloat = tokenMap(float)"""Parse action for converting parsed numbers to Python float"""integer = Word(nums).setName("integer").setParseAction(convertToInteger)"""expression that parses an unsigned integer, returns an int"""hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16))"""expression that parses a hexadecimal integer, returns an int"""signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger)"""expression that parses an integer with optional leading sign, returns an int"""fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction")"""fractional expression of an integer divided by an integer, returns a float"""fraction.addParseAction(lambda t: t[0]/t[-1])mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction")"""mixed integer of the form 'integer - fraction', with optional leading integer, returns float"""mixed_integer.addParseAction(sum)real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat)"""expression that parses a floating point number and returns a float"""sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat)"""expression that parses a floating point number with optional scientific notation and returns a float"""# streamlining this expression makes the docs nicer-lookingnumber = (sci_real | real | signed_integer).streamline()"""any numeric expression, returns the corresponding Python type"""fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat)"""any int or real number, returned as float"""identifier = Word(alphas+'_', alphanums+'_').setName("identifier")"""typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')"""ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address")"IPv4 address (C{0.0.0.0 - 255.255.255.255})"_ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer")_full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address")_short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address")_short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8)_mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address")ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address")"IPv6 address (long, short, or mixed form)"mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address")"MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)"@staticmethoddef convertToDate(fmt="%Y-%m-%d"):"""Helper to create a parse action for converting parsed date string to Python datetime.dateParams -- fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"})Example::date_expr = pyparsing_common.iso8601_date.copy()date_expr.setParseAction(pyparsing_common.convertToDate())print(date_expr.parseString("1999-12-31"))prints::[datetime.date(1999, 12, 31)]"""def cvt_fn(s,l,t):try:return datetime.strptime(t[0], fmt).date()except ValueError as ve:raise ParseException(s, l, str(ve))return cvt_fn@staticmethoddef convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"):"""Helper to create a parse action for converting parsed datetime string to Python datetime.datetimeParams -- fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"})Example::dt_expr = pyparsing_common.iso8601_datetime.copy()dt_expr.setParseAction(pyparsing_common.convertToDatetime())print(dt_expr.parseString("1999-12-31T23:59:59.999"))prints::[datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)]"""def cvt_fn(s,l,t):try:return datetime.strptime(t[0], fmt)except ValueError as ve:raise ParseException(s, l, str(ve))return cvt_fniso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date")"ISO8601 date (C{yyyy-mm-dd})"iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime")"ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}"uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID")"UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})"_html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress()@staticmethoddef stripHTMLTags(s, l, tokens):"""Parse action to remove HTML tags from web page HTML sourceExample::# strip HTML links from normal texttext = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'td,td_end = makeHTMLTags("TD")table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_endprint(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page'"""return pyparsing_common._html_stripper.transformString(tokens[0])_commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',')+ Optional( White(" \t") ) ) ).streamline().setName("commaItem")comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list")"""Predefined expression of 1 or more printable words or quoted strings, separated by commas."""upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper()))"""Parse action to convert tokens to upper case."""downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower()))"""Parse action to convert tokens to lower case."""if __name__ == "__main__":selectToken = CaselessLiteral("select")fromToken = CaselessLiteral("from")ident = Word(alphas, alphanums + "_$")columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)columnNameList = Group(delimitedList(columnName)).setName("columns")columnSpec = ('*' | columnNameList)tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)tableNameList = Group(delimitedList(tableName)).setName("tables")simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables")# demo runTests method, including embedded comments in test stringsimpleSQL.runTests("""# '*' as column list and dotted table nameselect * from SYS.XYZZY# caseless match on "SELECT", and casts back to "select"SELECT * from XYZZY, ABC# list of column names, and mixed case SELECT keywordSelect AA,BB,CC from Sys.dual# multiple tablesSelect A, B, C from Sys.dual, Table2# invalid SELECT keyword - should failXelect A, B, C from Sys.dual# incomplete command - should failSelect# invalid column name - should failSelect ^^^ frox Sys.dual""")pyparsing_common.number.runTests("""100-100+1003.141596.02e231e-12""")# any int or real number, returned as floatpyparsing_common.fnumber.runTests("""100-100+1003.141596.02e231e-12""")pyparsing_common.hex_integer.runTests("""100FF""")import uuidpyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))pyparsing_common.uuid.runTests("""12345678-1234-5678-1234-567812345678""")
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionimport collectionsimport itertoolsimport refrom ._structures import Infinity__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]_Version = collections.namedtuple("_Version",["epoch", "release", "dev", "pre", "post", "local"],)def parse(version):"""Parse the given version string and return either a :class:`Version` objector a :class:`LegacyVersion` object depending on if the given version isa valid PEP 440 version or a legacy version."""try:return Version(version)except InvalidVersion:return LegacyVersion(version)class InvalidVersion(ValueError):"""An invalid version was found, users should refer to PEP 440."""class _BaseVersion(object):def __hash__(self):return hash(self._key)def __lt__(self, other):return self._compare(other, lambda s, o: s < o)def __le__(self, other):return self._compare(other, lambda s, o: s <= o)def __eq__(self, other):return self._compare(other, lambda s, o: s == o)def __ge__(self, other):return self._compare(other, lambda s, o: s >= o)def __gt__(self, other):return self._compare(other, lambda s, o: s > o)def __ne__(self, other):return self._compare(other, lambda s, o: s != o)def _compare(self, other, method):if not isinstance(other, _BaseVersion):return NotImplementedreturn method(self._key, other._key)class LegacyVersion(_BaseVersion):def __init__(self, version):self._version = str(version)self._key = _legacy_cmpkey(self._version)def __str__(self):return self._versiondef __repr__(self):return "<LegacyVersion({0})>".format(repr(str(self)))@propertydef public(self):return self._version@propertydef base_version(self):return self._version@propertydef local(self):return None@propertydef is_prerelease(self):return False@propertydef is_postrelease(self):return False_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,)_legacy_version_replacement_map = {"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",}def _parse_version_parts(s):for part in _legacy_version_component_re.split(s):part = _legacy_version_replacement_map.get(part, part)if not part or part == ".":continueif part[:1] in "0123456789":# pad for numeric comparisonyield part.zfill(8)else:yield "*" + part# ensure that alpha/beta/candidate are before finalyield "*final"def _legacy_cmpkey(version):# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch# greater than or equal to 0. This will effectively put the LegacyVersion,# which uses the defacto standard originally implemented by setuptools,# as before all PEP 440 versions.epoch = -1# This scheme is taken from pkg_resources.parse_version setuptools prior to# it's adoption of the packaging library.parts = []for part in _parse_version_parts(version.lower()):if part.startswith("*"):# remove "-" before a prerelease tagif part < "*final":while parts and parts[-1] == "*final-":parts.pop()# remove trailing zeros from each series of numeric partswhile parts and parts[-1] == "00000000":parts.pop()parts.append(part)parts = tuple(parts)return epoch, parts# Deliberately not anchored to the start and end of the string, to make it# easier for 3rd party code to reuseVERSION_PATTERN = r"""v?(?:(?:(?P<epoch>[0-9]+)!)? # epoch(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment(?P<pre> # pre-release[-_\.]?(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))[-_\.]?(?P<pre_n>[0-9]+)?)?(?P<post> # post release(?:-(?P<post_n1>[0-9]+))|(?:[-_\.]?(?P<post_l>post|rev|r)[-_\.]?(?P<post_n2>[0-9]+)?))?(?P<dev> # dev release[-_\.]?(?P<dev_l>dev)[-_\.]?(?P<dev_n>[0-9]+)?)?)(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version"""class Version(_BaseVersion):_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$",re.VERBOSE | re.IGNORECASE,)def __init__(self, version):# Validate the version and parse it into piecesmatch = self._regex.search(version)if not match:raise InvalidVersion("Invalid version: '{0}'".format(version))# Store the parsed out pieces of the versionself._version = _Version(epoch=int(match.group("epoch")) if match.group("epoch") else 0,release=tuple(int(i) for i in match.group("release").split(".")),pre=_parse_letter_version(match.group("pre_l"),match.group("pre_n"),),post=_parse_letter_version(match.group("post_l"),match.group("post_n1") or match.group("post_n2"),),dev=_parse_letter_version(match.group("dev_l"),match.group("dev_n"),),local=_parse_local_version(match.group("local")),)# Generate a key which will be used for sortingself._key = _cmpkey(self._version.epoch,self._version.release,self._version.pre,self._version.post,self._version.dev,self._version.local,)def __repr__(self):return "<Version({0})>".format(repr(str(self)))def __str__(self):parts = []# Epochif self._version.epoch != 0:parts.append("{0}!".format(self._version.epoch))# Release segmentparts.append(".".join(str(x) for x in self._version.release))# Pre-releaseif self._version.pre is not None:parts.append("".join(str(x) for x in self._version.pre))# Post-releaseif self._version.post is not None:parts.append(".post{0}".format(self._version.post[1]))# Development releaseif self._version.dev is not None:parts.append(".dev{0}".format(self._version.dev[1]))# Local version segmentif self._version.local is not None:parts.append("+{0}".format(".".join(str(x) for x in self._version.local)))return "".join(parts)@propertydef public(self):return str(self).split("+", 1)[0]@propertydef base_version(self):parts = []# Epochif self._version.epoch != 0:parts.append("{0}!".format(self._version.epoch))# Release segmentparts.append(".".join(str(x) for x in self._version.release))return "".join(parts)@propertydef local(self):version_string = str(self)if "+" in version_string:return version_string.split("+", 1)[1]@propertydef is_prerelease(self):return bool(self._version.dev or self._version.pre)@propertydef is_postrelease(self):return bool(self._version.post)def _parse_letter_version(letter, number):if letter:# We consider there to be an implicit 0 in a pre-release if there is# not a numeral associated with it.if number is None:number = 0# We normalize any letters to their lower case formletter = letter.lower()# We consider some words to be alternate spellings of other words and# in those cases we want to normalize the spellings to our preferred# spelling.if letter == "alpha":letter = "a"elif letter == "beta":letter = "b"elif letter in ["c", "pre", "preview"]:letter = "rc"elif letter in ["rev", "r"]:letter = "post"return letter, int(number)if not letter and number:# We assume if we are given a number, but we are not given a letter# then this is using the implicit post release syntax (e.g. 1.0-1)letter = "post"return letter, int(number)_local_version_seperators = re.compile(r"[\._-]")def _parse_local_version(local):"""Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve")."""if local is not None:return tuple(part.lower() if not part.isdigit() else int(part)for part in _local_version_seperators.split(local))def _cmpkey(epoch, release, pre, post, dev, local):# When we compare a release version, we want to compare it with all of the# trailing zeros removed. So we'll use a reverse the list, drop all the now# leading zeros until we come to something non zero, then take the rest# re-reverse it back into the correct order and make it a tuple and use# that for our sorting key.release = tuple(reversed(list(itertools.dropwhile(lambda x: x == 0,reversed(release),))))# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.# We'll do this by abusing the pre segment, but we _only_ want to do this# if there is not a pre or a post segment. If we have one of those then# the normal sorting rules will handle this case correctly.if pre is None and post is None and dev is not None:pre = -Infinity# Versions without a pre-release (except as noted above) should sort after# those with one.elif pre is None:pre = Infinity# Versions without a post segment should sort before those with one.if post is None:post = -Infinity# Versions without a development segment should sort after those with one.if dev is None:dev = Infinityif local is None:# Versions without a local segment should sort before those with one.local = -Infinityelse:# Versions with a local segment need that segment parsed to implement# the sorting rules in PEP440.# - Alpha numeric segments sort before numeric segments# - Alpha numeric segments sort lexicographically# - Numeric segments sort numerically# - Shorter versions sort before longer versions when the prefixes# match exactlylocal = tuple((i, "") if isinstance(i, int) else (-Infinity, i)for i in local)return epoch, release, pre, post, dev, local
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionimport re_canonicalize_regex = re.compile(r"[-_.]+")def canonicalize_name(name):# This is taken from PEP 503.return _canonicalize_regex.sub("-", name).lower()
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionimport abcimport functoolsimport itertoolsimport refrom ._compat import string_types, with_metaclassfrom .version import Version, LegacyVersion, parseclass InvalidSpecifier(ValueError):"""An invalid specifier was found, users should refer to PEP 440."""class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):@abc.abstractmethoddef __str__(self):"""Returns the str representation of this Specifier like object. Thisshould be representative of the Specifier itself."""@abc.abstractmethoddef __hash__(self):"""Returns a hash value for this Specifier like object."""@abc.abstractmethoddef __eq__(self, other):"""Returns a boolean representing whether or not the two Specifier likeobjects are equal."""@abc.abstractmethoddef __ne__(self, other):"""Returns a boolean representing whether or not the two Specifier likeobjects are not equal."""@abc.abstractpropertydef prereleases(self):"""Returns whether or not pre-releases as a whole are allowed by thisspecifier."""@prereleases.setterdef prereleases(self, value):"""Sets whether or not pre-releases as a whole are allowed by thisspecifier."""@abc.abstractmethoddef contains(self, item, prereleases=None):"""Determines if the given item is contained within this specifier."""@abc.abstractmethoddef filter(self, iterable, prereleases=None):"""Takes an iterable of items and filters them so that only items whichare contained within this specifier are allowed in it."""class _IndividualSpecifier(BaseSpecifier):_operators = {}def __init__(self, spec="", prereleases=None):match = self._regex.search(spec)if not match:raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))self._spec = (match.group("operator").strip(),match.group("version").strip(),)# Store whether or not this Specifier should accept prereleasesself._prereleases = prereleasesdef __repr__(self):pre = (", prereleases={0!r}".format(self.prereleases)if self._prereleases is not Noneelse "")return "<{0}({1!r}{2})>".format(self.__class__.__name__,str(self),pre,)def __str__(self):return "{0}{1}".format(*self._spec)def __hash__(self):return hash(self._spec)def __eq__(self, other):if isinstance(other, string_types):try:other = self.__class__(other)except InvalidSpecifier:return NotImplementedelif not isinstance(other, self.__class__):return NotImplementedreturn self._spec == other._specdef __ne__(self, other):if isinstance(other, string_types):try:other = self.__class__(other)except InvalidSpecifier:return NotImplementedelif not isinstance(other, self.__class__):return NotImplementedreturn self._spec != other._specdef _get_operator(self, op):return getattr(self, "_compare_{0}".format(self._operators[op]))def _coerce_version(self, version):if not isinstance(version, (LegacyVersion, Version)):version = parse(version)return version@propertydef operator(self):return self._spec[0]@propertydef version(self):return self._spec[1]@propertydef prereleases(self):return self._prereleases@prereleases.setterdef prereleases(self, value):self._prereleases = valuedef __contains__(self, item):return self.contains(item)def contains(self, item, prereleases=None):# Determine if prereleases are to be allowed or not.if prereleases is None:prereleases = self.prereleases# Normalize item to a Version or LegacyVersion, this allows us to have# a shortcut for ``"2.0" in Specifier(">=2")item = self._coerce_version(item)# Determine if we should be supporting prereleases in this specifier# or not, if we do not support prereleases than we can short circuit# logic if this version is a prereleases.if item.is_prerelease and not prereleases:return False# Actually do the comparison to determine if this item is contained# within this Specifier or not.return self._get_operator(self.operator)(item, self.version)def filter(self, iterable, prereleases=None):yielded = Falsefound_prereleases = []kw = {"prereleases": prereleases if prereleases is not None else True}# Attempt to iterate over all the values in the iterable and if any of# them match, yield them.for version in iterable:parsed_version = self._coerce_version(version)if self.contains(parsed_version, **kw):# If our version is a prerelease, and we were not set to allow# prereleases, then we'll store it for later incase nothing# else matches this specifier.if (parsed_version.is_prerelease and not(prereleases or self.prereleases)):found_prereleases.append(version)# Either this is not a prerelease, or we should have been# accepting prereleases from the begining.else:yielded = Trueyield version# Now that we've iterated over everything, determine if we've yielded# any values, and if we have not and we have any prereleases stored up# then we will go ahead and yield the prereleases.if not yielded and found_prereleases:for version in found_prereleases:yield versionclass LegacySpecifier(_IndividualSpecifier):_regex_str = (r"""(?P<operator>(==|!=|<=|>=|<|>))\s*(?P<version>[^,;\s)]* # Since this is a "legacy" specifier, and the version# string can be just about anything, we match everything# except for whitespace, a semi-colon for marker support,# a closing paren since versions can be enclosed in# them, and a comma since it's a version separator.)""")_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)_operators = {"==": "equal","!=": "not_equal","<=": "less_than_equal",">=": "greater_than_equal","<": "less_than",">": "greater_than",}def _coerce_version(self, version):if not isinstance(version, LegacyVersion):version = LegacyVersion(str(version))return versiondef _compare_equal(self, prospective, spec):return prospective == self._coerce_version(spec)def _compare_not_equal(self, prospective, spec):return prospective != self._coerce_version(spec)def _compare_less_than_equal(self, prospective, spec):return prospective <= self._coerce_version(spec)def _compare_greater_than_equal(self, prospective, spec):return prospective >= self._coerce_version(spec)def _compare_less_than(self, prospective, spec):return prospective < self._coerce_version(spec)def _compare_greater_than(self, prospective, spec):return prospective > self._coerce_version(spec)def _require_version_compare(fn):@functools.wraps(fn)def wrapped(self, prospective, spec):if not isinstance(prospective, Version):return Falsereturn fn(self, prospective, spec)return wrappedclass Specifier(_IndividualSpecifier):_regex_str = (r"""(?P<operator>(~=|==|!=|<=|>=|<|>|===))(?P<version>(?:# The identity operators allow for an escape hatch that will# do an exact string match of the version you wish to install.# This will not be parsed by PEP 440 and we cannot determine# any semantic meaning from it. This operator is discouraged# but included entirely as an escape hatch.(?<====) # Only match for the identity operator\s*[^\s]* # We just match everything, except for whitespace# since we are only testing for strict identity.)|(?:# The (non)equality operators allow for wild card and local# versions to be specified so we have to define these two# operators separately to enable that.(?<===|!=) # Only match for equals and not equals\s*v?(?:[0-9]+!)? # epoch[0-9]+(?:\.[0-9]+)* # release(?: # pre release[-_\.]?(a|b|c|rc|alpha|beta|pre|preview)[-_\.]?[0-9]*)?(?: # post release(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*))?# You cannot use a wild card and a dev or local version# together so group them with a | and make them optional.(?:(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local|\.\* # Wild card syntax of .*)?)|(?:# The compatible operator requires at least two digits in the# release segment.(?<=~=) # Only match for the compatible operator\s*v?(?:[0-9]+!)? # epoch[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)(?: # pre release[-_\.]?(a|b|c|rc|alpha|beta|pre|preview)[-_\.]?[0-9]*)?(?: # post release(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*))?(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release)|(?:# All other operators only allow a sub set of what the# (non)equality operators do. Specifically they do not allow# local versions to be specified nor do they allow the prefix# matching wild cards.(?<!==|!=|~=) # We have special cases for these# operators so we want to make sure they# don't match here.\s*v?(?:[0-9]+!)? # epoch[0-9]+(?:\.[0-9]+)* # release(?: # pre release[-_\.]?(a|b|c|rc|alpha|beta|pre|preview)[-_\.]?[0-9]*)?(?: # post release(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*))?(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release))""")_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)_operators = {"~=": "compatible","==": "equal","!=": "not_equal","<=": "less_than_equal",">=": "greater_than_equal","<": "less_than",">": "greater_than","===": "arbitrary",}@_require_version_comparedef _compare_compatible(self, prospective, spec):# Compatible releases have an equivalent combination of >= and ==. That# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to# implement this in terms of the other specifiers instead of# implementing it ourselves. The only thing we need to do is construct# the other specifiers.# We want everything but the last item in the version, but we want to# ignore post and dev releases and we want to treat the pre-release as# it's own separate segment.prefix = ".".join(list(itertools.takewhile(lambda x: (not x.startswith("post") and notx.startswith("dev")),_version_split(spec),))[:-1])# Add the prefix notation to the end of our stringprefix += ".*"return (self._get_operator(">=")(prospective, spec) andself._get_operator("==")(prospective, prefix))@_require_version_comparedef _compare_equal(self, prospective, spec):# We need special logic to handle prefix matchingif spec.endswith(".*"):# In the case of prefix matching we want to ignore local segment.prospective = Version(prospective.public)# Split the spec out by dots, and pretend that there is an implicit# dot in between a release segment and a pre-release segment.spec = _version_split(spec[:-2]) # Remove the trailing .*# Split the prospective version out by dots, and pretend that there# is an implicit dot in between a release segment and a pre-release# segment.prospective = _version_split(str(prospective))# Shorten the prospective version to be the same length as the spec# so that we can determine if the specifier is a prefix of the# prospective version or not.prospective = prospective[:len(spec)]# Pad out our two sides with zeros so that they both equal the same# length.spec, prospective = _pad_version(spec, prospective)else:# Convert our spec string into a Versionspec = Version(spec)# If the specifier does not have a local segment, then we want to# act as if the prospective version also does not have a local# segment.if not spec.local:prospective = Version(prospective.public)return prospective == spec@_require_version_comparedef _compare_not_equal(self, prospective, spec):return not self._compare_equal(prospective, spec)@_require_version_comparedef _compare_less_than_equal(self, prospective, spec):return prospective <= Version(spec)@_require_version_comparedef _compare_greater_than_equal(self, prospective, spec):return prospective >= Version(spec)@_require_version_comparedef _compare_less_than(self, prospective, spec):# Convert our spec to a Version instance, since we'll want to work with# it as a version.spec = Version(spec)# Check to see if the prospective version is less than the spec# version. If it's not we can short circuit and just return False now# instead of doing extra unneeded work.if not prospective < spec:return False# This special case is here so that, unless the specifier itself# includes is a pre-release version, that we do not accept pre-release# versions for the version mentioned in the specifier (e.g. <3.1 should# not match 3.1.dev0, but should match 3.0.dev0).if not spec.is_prerelease and prospective.is_prerelease:if Version(prospective.base_version) == Version(spec.base_version):return False# If we've gotten to here, it means that prospective version is both# less than the spec version *and* it's not a pre-release of the same# version in the spec.return True@_require_version_comparedef _compare_greater_than(self, prospective, spec):# Convert our spec to a Version instance, since we'll want to work with# it as a version.spec = Version(spec)# Check to see if the prospective version is greater than the spec# version. If it's not we can short circuit and just return False now# instead of doing extra unneeded work.if not prospective > spec:return False# This special case is here so that, unless the specifier itself# includes is a post-release version, that we do not accept# post-release versions for the version mentioned in the specifier# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).if not spec.is_postrelease and prospective.is_postrelease:if Version(prospective.base_version) == Version(spec.base_version):return False# Ensure that we do not allow a local version of the version mentioned# in the specifier, which is techincally greater than, to match.if prospective.local is not None:if Version(prospective.base_version) == Version(spec.base_version):return False# If we've gotten to here, it means that prospective version is both# greater than the spec version *and* it's not a pre-release of the# same version in the spec.return Truedef _compare_arbitrary(self, prospective, spec):return str(prospective).lower() == str(spec).lower()@propertydef prereleases(self):# If there is an explicit prereleases set for this, then we'll just# blindly use that.if self._prereleases is not None:return self._prereleases# Look at all of our specifiers and determine if they are inclusive# operators, and if they are if they are including an explicit# prerelease.operator, version = self._specif operator in ["==", ">=", "<=", "~=", "==="]:# The == specifier can include a trailing .*, if it does we# want to remove before parsing.if operator == "==" and version.endswith(".*"):version = version[:-2]# Parse the version, and if it is a pre-release than this# specifier allows pre-releases.if parse(version).is_prerelease:return Truereturn False@prereleases.setterdef prereleases(self, value):self._prereleases = value_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")def _version_split(version):result = []for item in version.split("."):match = _prefix_regex.search(item)if match:result.extend(match.groups())else:result.append(item)return resultdef _pad_version(left, right):left_split, right_split = [], []# Get the release segment of our versionsleft_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))# Get the rest of our versionsleft_split.append(left[len(left_split[0]):])right_split.append(right[len(right_split[0]):])# Insert our paddingleft_split.insert(1,["0"] * max(0, len(right_split[0]) - len(left_split[0])),)right_split.insert(1,["0"] * max(0, len(left_split[0]) - len(right_split[0])),)return (list(itertools.chain(*left_split)),list(itertools.chain(*right_split)),)class SpecifierSet(BaseSpecifier):def __init__(self, specifiers="", prereleases=None):# Split on , to break each indidivual specifier into it's own item, and# strip each item to remove leading/trailing whitespace.specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]# Parsed each individual specifier, attempting first to make it a# Specifier and falling back to a LegacySpecifier.parsed = set()for specifier in specifiers:try:parsed.add(Specifier(specifier))except InvalidSpecifier:parsed.add(LegacySpecifier(specifier))# Turn our parsed specifiers into a frozen set and save them for later.self._specs = frozenset(parsed)# Store our prereleases value so we can use it later to determine if# we accept prereleases or not.self._prereleases = prereleasesdef __repr__(self):pre = (", prereleases={0!r}".format(self.prereleases)if self._prereleases is not Noneelse "")return "<SpecifierSet({0!r}{1})>".format(str(self), pre)def __str__(self):return ",".join(sorted(str(s) for s in self._specs))def __hash__(self):return hash(self._specs)def __and__(self, other):if isinstance(other, string_types):other = SpecifierSet(other)elif not isinstance(other, SpecifierSet):return NotImplementedspecifier = SpecifierSet()specifier._specs = frozenset(self._specs | other._specs)if self._prereleases is None and other._prereleases is not None:specifier._prereleases = other._prereleaseselif self._prereleases is not None and other._prereleases is None:specifier._prereleases = self._prereleaseselif self._prereleases == other._prereleases:specifier._prereleases = self._prereleaseselse:raise ValueError("Cannot combine SpecifierSets with True and False prerelease ""overrides.")return specifierdef __eq__(self, other):if isinstance(other, string_types):other = SpecifierSet(other)elif isinstance(other, _IndividualSpecifier):other = SpecifierSet(str(other))elif not isinstance(other, SpecifierSet):return NotImplementedreturn self._specs == other._specsdef __ne__(self, other):if isinstance(other, string_types):other = SpecifierSet(other)elif isinstance(other, _IndividualSpecifier):other = SpecifierSet(str(other))elif not isinstance(other, SpecifierSet):return NotImplementedreturn self._specs != other._specsdef __len__(self):return len(self._specs)def __iter__(self):return iter(self._specs)@propertydef prereleases(self):# If we have been given an explicit prerelease modifier, then we'll# pass that through here.if self._prereleases is not None:return self._prereleases# If we don't have any specifiers, and we don't have a forced value,# then we'll just return None since we don't know if this should have# pre-releases or not.if not self._specs:return None# Otherwise we'll see if any of the given specifiers accept# prereleases, if any of them do we'll return True, otherwise False.return any(s.prereleases for s in self._specs)@prereleases.setterdef prereleases(self, value):self._prereleases = valuedef __contains__(self, item):return self.contains(item)def contains(self, item, prereleases=None):# Ensure that our item is a Version or LegacyVersion instance.if not isinstance(item, (LegacyVersion, Version)):item = parse(item)# Determine if we're forcing a prerelease or not, if we're not forcing# one for this particular filter call, then we'll use whatever the# SpecifierSet thinks for whether or not we should support prereleases.if prereleases is None:prereleases = self.prereleases# We can determine if we're going to allow pre-releases by looking to# see if any of the underlying items supports them. If none of them do# and this item is a pre-release then we do not allow it and we can# short circuit that here.# Note: This means that 1.0.dev1 would not be contained in something# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0if not prereleases and item.is_prerelease:return False# We simply dispatch to the underlying specs here to make sure that the# given version is contained within all of them.# Note: This use of all() here means that an empty set of specifiers# will always return True, this is an explicit design decision.return all(s.contains(item, prereleases=prereleases)for s in self._specs)def filter(self, iterable, prereleases=None):# Determine if we're forcing a prerelease or not, if we're not forcing# one for this particular filter call, then we'll use whatever the# SpecifierSet thinks for whether or not we should support prereleases.if prereleases is None:prereleases = self.prereleases# If we have any specifiers, then we want to wrap our iterable in the# filter method for each one, this will act as a logical AND amongst# each specifier.if self._specs:for spec in self._specs:iterable = spec.filter(iterable, prereleases=bool(prereleases))return iterable# If we do not have any specifiers, then we need to have a rough filter# which will filter out any pre-releases, unless there are no final# releases, and which will filter out LegacyVersion in general.else:filtered = []found_prereleases = []for item in iterable:# Ensure that we some kind of Version class for this item.if not isinstance(item, (LegacyVersion, Version)):parsed_version = parse(item)else:parsed_version = item# Filter out any item which is parsed as a LegacyVersionif isinstance(parsed_version, LegacyVersion):continue# Store any item which is a pre-release for later unless we've# already found a final version or we are accepting prereleasesif parsed_version.is_prerelease and not prereleases:if not filtered:found_prereleases.append(item)else:filtered.append(item)# If we've found no items except for pre-releases, then we'll go# ahead and use the pre-releasesif not filtered and found_prereleases and prereleases is None:return found_prereleasesreturn filtered
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionimport stringimport refrom pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseExceptionfrom pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combinefrom pkg_resources.extern.pyparsing import Literal as L # noqafrom pkg_resources.extern.six.moves.urllib import parse as urlparsefrom .markers import MARKER_EXPR, Markerfrom .specifiers import LegacySpecifier, Specifier, SpecifierSetclass InvalidRequirement(ValueError):"""An invalid requirement was found, users should refer to PEP 508."""ALPHANUM = Word(string.ascii_letters + string.digits)LBRACKET = L("[").suppress()RBRACKET = L("]").suppress()LPAREN = L("(").suppress()RPAREN = L(")").suppress()COMMA = L(",").suppress()SEMICOLON = L(";").suppress()AT = L("@").suppress()PUNCTUATION = Word("-_.")IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))NAME = IDENTIFIER("name")EXTRA = IDENTIFIERURI = Regex(r'[^ ]+')("url")URL = (AT + URI)EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACYVERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),joinString=",", adjacent=False)("_raw_spec")_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")VERSION_SPEC.setParseAction(lambda s, l, t: t[1])MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")MARKER_EXPR.setParseAction(lambda s, l, t: Marker(s[t._original_start:t._original_end]))MARKER_SEPERATOR = SEMICOLONMARKER = MARKER_SEPERATOR + MARKER_EXPRVERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)URL_AND_MARKER = URL + Optional(MARKER)NAMED_REQUIREMENT = \NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEndclass Requirement(object):"""Parse a requirement.Parse a given requirement string into its parts, such as name, specifier,URL, and extras. Raises InvalidRequirement on a badly-formed requirementstring."""# TODO: Can we test whether something is contained within a requirement?# If so how do we do that? Do we need to test against the _name_ of# the thing as well as the version? What about the markers?# TODO: Can we normalize the name and extra name?def __init__(self, requirement_string):try:req = REQUIREMENT.parseString(requirement_string)except ParseException as e:raise InvalidRequirement("Invalid requirement, parse error at \"{0!r}\"".format(requirement_string[e.loc:e.loc + 8]))self.name = req.nameif req.url:parsed_url = urlparse.urlparse(req.url)if not (parsed_url.scheme and parsed_url.netloc) or (not parsed_url.scheme and not parsed_url.netloc):raise InvalidRequirement("Invalid URL given")self.url = req.urlelse:self.url = Noneself.extras = set(req.extras.asList() if req.extras else [])self.specifier = SpecifierSet(req.specifier)self.marker = req.marker if req.marker else Nonedef __str__(self):parts = [self.name]if self.extras:parts.append("[{0}]".format(",".join(sorted(self.extras))))if self.specifier:parts.append(str(self.specifier))if self.url:parts.append("@ {0}".format(self.url))if self.marker:parts.append("; {0}".format(self.marker))return "".join(parts)def __repr__(self):return "<Requirement({0!r})>".format(str(self))
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionimport operatorimport osimport platformimport sysfrom pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEndfrom pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedStringfrom pkg_resources.extern.pyparsing import Literal as L # noqafrom ._compat import string_typesfrom .specifiers import Specifier, InvalidSpecifier__all__ = ["InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName","Marker", "default_environment",]class InvalidMarker(ValueError):"""An invalid marker was found, users should refer to PEP 508."""class UndefinedComparison(ValueError):"""An invalid operation was attempted on a value that doesn't support it."""class UndefinedEnvironmentName(ValueError):"""A name was attempted to be used that does not exist inside of theenvironment."""class Node(object):def __init__(self, value):self.value = valuedef __str__(self):return str(self.value)def __repr__(self):return "<{0}({1!r})>".format(self.__class__.__name__, str(self))def serialize(self):raise NotImplementedErrorclass Variable(Node):def serialize(self):return str(self)class Value(Node):def serialize(self):return '"{0}"'.format(self)class Op(Node):def serialize(self):return str(self)VARIABLE = (L("implementation_version") |L("platform_python_implementation") |L("implementation_name") |L("python_full_version") |L("platform_release") |L("platform_version") |L("platform_machine") |L("platform_system") |L("python_version") |L("sys_platform") |L("os_name") |L("os.name") | # PEP-345L("sys.platform") | # PEP-345L("platform.version") | # PEP-345L("platform.machine") | # PEP-345L("platform.python_implementation") | # PEP-345L("python_implementation") | # undocumented setuptools legacyL("extra"))ALIASES = {'os.name': 'os_name','sys.platform': 'sys_platform','platform.version': 'platform_version','platform.machine': 'platform_machine','platform.python_implementation': 'platform_python_implementation','python_implementation': 'platform_python_implementation'}VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))VERSION_CMP = (L("===") |L("==") |L(">=") |L("<=") |L("!=") |L("~=") |L(">") |L("<"))MARKER_OP = VERSION_CMP | L("not in") | L("in")MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))MARKER_VALUE = QuotedString("'") | QuotedString('"')MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))BOOLOP = L("and") | L("or")MARKER_VAR = VARIABLE | MARKER_VALUEMARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))LPAREN = L("(").suppress()RPAREN = L(")").suppress()MARKER_EXPR = Forward()MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)MARKER = stringStart + MARKER_EXPR + stringEnddef _coerce_parse_result(results):if isinstance(results, ParseResults):return [_coerce_parse_result(i) for i in results]else:return resultsdef _format_marker(marker, first=True):assert isinstance(marker, (list, tuple, string_types))# Sometimes we have a structure like [[...]] which is a single item list# where the single item is itself it's own list. In that case we want skip# the rest of this function so that we don't get extraneous () on the# outside.if (isinstance(marker, list) and len(marker) == 1 andisinstance(marker[0], (list, tuple))):return _format_marker(marker[0])if isinstance(marker, list):inner = (_format_marker(m, first=False) for m in marker)if first:return " ".join(inner)else:return "(" + " ".join(inner) + ")"elif isinstance(marker, tuple):return " ".join([m.serialize() for m in marker])else:return marker_operators = {"in": lambda lhs, rhs: lhs in rhs,"not in": lambda lhs, rhs: lhs not in rhs,"<": operator.lt,"<=": operator.le,"==": operator.eq,"!=": operator.ne,">=": operator.ge,">": operator.gt,}def _eval_op(lhs, op, rhs):try:spec = Specifier("".join([op.serialize(), rhs]))except InvalidSpecifier:passelse:return spec.contains(lhs)oper = _operators.get(op.serialize())if oper is None:raise UndefinedComparison("Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs))return oper(lhs, rhs)_undefined = object()def _get_env(environment, name):value = environment.get(name, _undefined)if value is _undefined:raise UndefinedEnvironmentName("{0!r} does not exist in evaluation environment.".format(name))return valuedef _evaluate_markers(markers, environment):groups = [[]]for marker in markers:assert isinstance(marker, (list, tuple, string_types))if isinstance(marker, list):groups[-1].append(_evaluate_markers(marker, environment))elif isinstance(marker, tuple):lhs, op, rhs = markerif isinstance(lhs, Variable):lhs_value = _get_env(environment, lhs.value)rhs_value = rhs.valueelse:lhs_value = lhs.valuerhs_value = _get_env(environment, rhs.value)groups[-1].append(_eval_op(lhs_value, op, rhs_value))else:assert marker in ["and", "or"]if marker == "or":groups.append([])return any(all(item) for item in groups)def format_full_version(info):version = '{0.major}.{0.minor}.{0.micro}'.format(info)kind = info.releaselevelif kind != 'final':version += kind[0] + str(info.serial)return versiondef default_environment():if hasattr(sys, 'implementation'):iver = format_full_version(sys.implementation.version)implementation_name = sys.implementation.nameelse:iver = '0'implementation_name = ''return {"implementation_name": implementation_name,"implementation_version": iver,"os_name": os.name,"platform_machine": platform.machine(),"platform_release": platform.release(),"platform_system": platform.system(),"platform_version": platform.version(),"python_full_version": platform.python_version(),"platform_python_implementation": platform.python_implementation(),"python_version": platform.python_version()[:3],"sys_platform": sys.platform,}class Marker(object):def __init__(self, marker):try:self._markers = _coerce_parse_result(MARKER.parseString(marker))except ParseException as e:err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(marker, marker[e.loc:e.loc + 8])raise InvalidMarker(err_str)def __str__(self):return _format_marker(self._markers)def __repr__(self):return "<Marker({0!r})>".format(str(self))def evaluate(self, environment=None):"""Evaluate a marker.Return the boolean from evaluating the given marker against theenvironment. environment is an optional argument to override all orpart of the determined environment.The environment is determined from the current Python process."""current_environment = default_environment()if environment is not None:current_environment.update(environment)return _evaluate_markers(self._markers, current_environment)
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionclass Infinity(object):def __repr__(self):return "Infinity"def __hash__(self):return hash(repr(self))def __lt__(self, other):return Falsedef __le__(self, other):return Falsedef __eq__(self, other):return isinstance(other, self.__class__)def __ne__(self, other):return not isinstance(other, self.__class__)def __gt__(self, other):return Truedef __ge__(self, other):return Truedef __neg__(self):return NegativeInfinityInfinity = Infinity()class NegativeInfinity(object):def __repr__(self):return "-Infinity"def __hash__(self):return hash(repr(self))def __lt__(self, other):return Truedef __le__(self, other):return Truedef __eq__(self, other):return isinstance(other, self.__class__)def __ne__(self, other):return not isinstance(other, self.__class__)def __gt__(self, other):return Falsedef __ge__(self, other):return Falsedef __neg__(self):return InfinityNegativeInfinity = NegativeInfinity()
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionimport sysPY2 = sys.version_info[0] == 2PY3 = sys.version_info[0] == 3# flake8: noqaif PY3:string_types = str,else:string_types = basestring,def with_metaclass(meta, *bases):"""Create a base class with a metaclass."""# This requires a bit of explanation: the basic idea is to make a dummy# metaclass for one level of class instantiation that replaces itself with# the actual metaclass.class metaclass(meta):def __new__(cls, name, this_bases, d):return meta(name, bases, d)return type.__new__(metaclass, 'temporary_class', (), {})
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionfrom .__about__ import (__author__, __copyright__, __email__, __license__, __summary__, __title__,__uri__, __version__)__all__ = ["__title__", "__summary__", "__uri__", "__version__", "__author__","__email__", "__license__", "__copyright__",]
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_function__all__ = ["__title__", "__summary__", "__uri__", "__version__", "__author__","__email__", "__license__", "__copyright__",]__title__ = "packaging"__summary__ = "Core utilities for Python packages"__uri__ = "https://github.com/pypa/packaging"__version__ = "16.8"__author__ = "Donald Stufft and individual contributors"__email__ = "donald@stufft.io"__license__ = "BSD or Apache License, Version 2.0"__copyright__ = "Copyright 2014-2016 %s" % __author__
#!/usr/bin/env python# -*- coding: utf-8 -*-# Copyright (c) 2005-2010 ActiveState Software Inc.# Copyright (c) 2013 Eddy Petrișor"""Utilities for determining application-specific dirs.See <http://github.com/ActiveState/appdirs> for details and usage."""# Dev Notes:# - MSDN on where to store app data files:# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html__version_info__ = (1, 4, 0)__version__ = '.'.join(map(str, __version_info__))import sysimport osPY3 = sys.version_info[0] == 3if PY3:unicode = strif sys.platform.startswith('java'):import platformos_name = platform.java_ver()[3][0]if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.system = 'win32'elif os_name.startswith('Mac'): # "Mac OS X", etc.system = 'darwin'else: # "Linux", "SunOS", "FreeBSD", etc.# Setting this to "linux2" is not ideal, but only Windows or Mac# are actually checked for and the rest of the module expects# *sys.platform* style strings.system = 'linux2'else:system = sys.platformdef user_data_dir(appname=None, appauthor=None, version=None, roaming=False):r"""Return full path to the user-specific data dir for this application."appname" is the name of application.If None, just the system directory is returned."appauthor" (only used on Windows) is the name of theappauthor or distributing body for this application. Typicallyit is the owning company name. This falls back to appname. You maypass False to disable it."version" is an optional version path element to append to thepath. You might want to use this if you want multiple versionsof your app to be able to run independently. If used, thiswould typically be "<major>.<minor>".Only applied when appname is present."roaming" (boolean, default False) can be set True to use the Windowsroaming appdata directory. That means that for users on a Windowsnetwork setup for roaming profiles, this user data will besync'd on login. See<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>for a discussion of issues.Typical user data directories are:Mac OS X: ~/Library/Application Support/<AppName>Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if definedWin XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>For Unix, we follow the XDG spec and support $XDG_DATA_HOME.That means, by default "~/.local/share/<AppName>"."""if system == "win32":if appauthor is None:appauthor = appnameconst = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"path = os.path.normpath(_get_win_folder(const))if appname:if appauthor is not False:path = os.path.join(path, appauthor, appname)else:path = os.path.join(path, appname)elif system == 'darwin':path = os.path.expanduser('~/Library/Application Support/')if appname:path = os.path.join(path, appname)else:path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))if appname:path = os.path.join(path, appname)if appname and version:path = os.path.join(path, version)return pathdef site_data_dir(appname=None, appauthor=None, version=None, multipath=False):"""Return full path to the user-shared data dir for this application."appname" is the name of application.If None, just the system directory is returned."appauthor" (only used on Windows) is the name of theappauthor or distributing body for this application. Typicallyit is the owning company name. This falls back to appname. You maypass False to disable it."version" is an optional version path element to append to thepath. You might want to use this if you want multiple versionsof your app to be able to run independently. If used, thiswould typically be "<major>.<minor>".Only applied when appname is present."multipath" is an optional parameter only applicable to *nixwhich indicates that the entire list of data dirs should bereturned. By default, the first item from XDG_DATA_DIRS isreturned, or '/usr/local/share/<AppName>',if XDG_DATA_DIRS is not setTypical user data directories are:Mac OS X: /Library/Application Support/<AppName>Unix: /usr/local/share/<AppName> or /usr/share/<AppName>Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.For Unix, this is using the $XDG_DATA_DIRS[0] default.WARNING: Do not use this on Windows. See the Vista-Fail note above for why."""if system == "win32":if appauthor is None:appauthor = appnamepath = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))if appname:if appauthor is not False:path = os.path.join(path, appauthor, appname)else:path = os.path.join(path, appname)elif system == 'darwin':path = os.path.expanduser('/Library/Application Support')if appname:path = os.path.join(path, appname)else:# XDG default for $XDG_DATA_DIRS# only first, if multipath is Falsepath = os.getenv('XDG_DATA_DIRS',os.pathsep.join(['/usr/local/share', '/usr/share']))pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]if appname:if version:appname = os.path.join(appname, version)pathlist = [os.sep.join([x, appname]) for x in pathlist]if multipath:path = os.pathsep.join(pathlist)else:path = pathlist[0]return pathif appname and version:path = os.path.join(path, version)return pathdef user_config_dir(appname=None, appauthor=None, version=None, roaming=False):r"""Return full path to the user-specific config dir for this application."appname" is the name of application.If None, just the system directory is returned."appauthor" (only used on Windows) is the name of theappauthor or distributing body for this application. Typicallyit is the owning company name. This falls back to appname. You maypass False to disable it."version" is an optional version path element to append to thepath. You might want to use this if you want multiple versionsof your app to be able to run independently. If used, thiswould typically be "<major>.<minor>".Only applied when appname is present."roaming" (boolean, default False) can be set True to use the Windowsroaming appdata directory. That means that for users on a Windowsnetwork setup for roaming profiles, this user data will besync'd on login. See<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>for a discussion of issues.Typical user data directories are:Mac OS X: same as user_data_dirUnix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if definedWin *: same as user_data_dirFor Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.That means, by deafult "~/.config/<AppName>"."""if system in ["win32", "darwin"]:path = user_data_dir(appname, appauthor, None, roaming)else:path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))if appname:path = os.path.join(path, appname)if appname and version:path = os.path.join(path, version)return pathdef site_config_dir(appname=None, appauthor=None, version=None, multipath=False):"""Return full path to the user-shared data dir for this application."appname" is the name of application.If None, just the system directory is returned."appauthor" (only used on Windows) is the name of theappauthor or distributing body for this application. Typicallyit is the owning company name. This falls back to appname. You maypass False to disable it."version" is an optional version path element to append to thepath. You might want to use this if you want multiple versionsof your app to be able to run independently. If used, thiswould typically be "<major>.<minor>".Only applied when appname is present."multipath" is an optional parameter only applicable to *nixwhich indicates that the entire list of config dirs should bereturned. By default, the first item from XDG_CONFIG_DIRS isreturned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not setTypical user data directories are:Mac OS X: same as site_data_dirUnix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in$XDG_CONFIG_DIRSWin *: same as site_data_dirVista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=FalseWARNING: Do not use this on Windows. See the Vista-Fail note above for why."""if system in ["win32", "darwin"]:path = site_data_dir(appname, appauthor)if appname and version:path = os.path.join(path, version)else:# XDG default for $XDG_CONFIG_DIRS# only first, if multipath is Falsepath = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]if appname:if version:appname = os.path.join(appname, version)pathlist = [os.sep.join([x, appname]) for x in pathlist]if multipath:path = os.pathsep.join(pathlist)else:path = pathlist[0]return pathdef user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):r"""Return full path to the user-specific cache dir for this application."appname" is the name of application.If None, just the system directory is returned."appauthor" (only used on Windows) is the name of theappauthor or distributing body for this application. Typicallyit is the owning company name. This falls back to appname. You maypass False to disable it."version" is an optional version path element to append to thepath. You might want to use this if you want multiple versionsof your app to be able to run independently. If used, thiswould typically be "<major>.<minor>".Only applied when appname is present."opinion" (boolean) can be False to disable the appending of"Cache" to the base app data dir for Windows. Seediscussion below.Typical user cache directories are:Mac OS X: ~/Library/Caches/<AppName>Unix: ~/.cache/<AppName> (XDG default)Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\CacheVista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\CacheOn Windows the only suggestion in the MSDN docs is that local settings go inthe `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roamingapp data dir (the default returned by `user_data_dir` above). Apps typicallyput cache data somewhere *under* the given dir here. Some examples:...\Mozilla\Firefox\Profiles\<ProfileName>\Cache...\Acme\SuperApp\Cache\1.0OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.This can be disabled with the `opinion=False` option."""if system == "win32":if appauthor is None:appauthor = appnamepath = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))if appname:if appauthor is not False:path = os.path.join(path, appauthor, appname)else:path = os.path.join(path, appname)if opinion:path = os.path.join(path, "Cache")elif system == 'darwin':path = os.path.expanduser('~/Library/Caches')if appname:path = os.path.join(path, appname)else:path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))if appname:path = os.path.join(path, appname)if appname and version:path = os.path.join(path, version)return pathdef user_log_dir(appname=None, appauthor=None, version=None, opinion=True):r"""Return full path to the user-specific log dir for this application."appname" is the name of application.If None, just the system directory is returned."appauthor" (only used on Windows) is the name of theappauthor or distributing body for this application. Typicallyit is the owning company name. This falls back to appname. You maypass False to disable it."version" is an optional version path element to append to thepath. You might want to use this if you want multiple versionsof your app to be able to run independently. If used, thiswould typically be "<major>.<minor>".Only applied when appname is present."opinion" (boolean) can be False to disable the appending of"Logs" to the base app data dir for Windows, and "log" to thebase cache dir for Unix. See discussion below.Typical user cache directories are:Mac OS X: ~/Library/Logs/<AppName>Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if definedWin XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\LogsVista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\LogsOn Windows the only suggestion in the MSDN docs is that local settingsgo in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested inexamples of what some windows apps use for a logs dir.)OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`value for Windows and appends "log" to the user cache dir for Unix.This can be disabled with the `opinion=False` option."""if system == "darwin":path = os.path.join(os.path.expanduser('~/Library/Logs'),appname)elif system == "win32":path = user_data_dir(appname, appauthor, version)version = Falseif opinion:path = os.path.join(path, "Logs")else:path = user_cache_dir(appname, appauthor, version)version = Falseif opinion:path = os.path.join(path, "log")if appname and version:path = os.path.join(path, version)return pathclass AppDirs(object):"""Convenience wrapper for getting application dirs."""def __init__(self, appname, appauthor=None, version=None, roaming=False,multipath=False):self.appname = appnameself.appauthor = appauthorself.version = versionself.roaming = roamingself.multipath = multipath@propertydef user_data_dir(self):return user_data_dir(self.appname, self.appauthor,version=self.version, roaming=self.roaming)@propertydef site_data_dir(self):return site_data_dir(self.appname, self.appauthor,version=self.version, multipath=self.multipath)@propertydef user_config_dir(self):return user_config_dir(self.appname, self.appauthor,version=self.version, roaming=self.roaming)@propertydef site_config_dir(self):return site_config_dir(self.appname, self.appauthor,version=self.version, multipath=self.multipath)@propertydef user_cache_dir(self):return user_cache_dir(self.appname, self.appauthor,version=self.version)@propertydef user_log_dir(self):return user_log_dir(self.appname, self.appauthor,version=self.version)#---- internal support stuffdef _get_win_folder_from_registry(csidl_name):"""This is a fallback technique at best. I'm not sure if using theregistry for this guarantees us the correct answer for all CSIDL_*names."""import _winregshell_folder_name = {"CSIDL_APPDATA": "AppData","CSIDL_COMMON_APPDATA": "Common AppData","CSIDL_LOCAL_APPDATA": "Local AppData",}[csidl_name]key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")dir, type = _winreg.QueryValueEx(key, shell_folder_name)return dirdef _get_win_folder_with_pywin32(csidl_name):from win32com.shell import shellcon, shelldir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)# Try to make this a unicode path because SHGetFolderPath does# not return unicode strings when there is unicode data in the# path.try:dir = unicode(dir)# Downgrade to short path name if have highbit chars. See# <http://bugs.activestate.com/show_bug.cgi?id=85099>.has_high_char = Falsefor c in dir:if ord(c) > 255:has_high_char = Truebreakif has_high_char:try:import win32apidir = win32api.GetShortPathName(dir)except ImportError:passexcept UnicodeError:passreturn dirdef _get_win_folder_with_ctypes(csidl_name):import ctypescsidl_const = {"CSIDL_APPDATA": 26,"CSIDL_COMMON_APPDATA": 35,"CSIDL_LOCAL_APPDATA": 28,}[csidl_name]buf = ctypes.create_unicode_buffer(1024)ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)# Downgrade to short path name if have highbit chars. See# <http://bugs.activestate.com/show_bug.cgi?id=85099>.has_high_char = Falsefor c in buf:if ord(c) > 255:has_high_char = Truebreakif has_high_char:buf2 = ctypes.create_unicode_buffer(1024)if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):buf = buf2return buf.valuedef _get_win_folder_with_jna(csidl_name):import arrayfrom com.sun import jnafrom com.sun.jna.platform import win32buf_size = win32.WinDef.MAX_PATH * 2buf = array.zeros('c', buf_size)shell = win32.Shell32.INSTANCEshell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)dir = jna.Native.toString(buf.tostring()).rstrip("\0")# Downgrade to short path name if have highbit chars. See# <http://bugs.activestate.com/show_bug.cgi?id=85099>.has_high_char = Falsefor c in dir:if ord(c) > 255:has_high_char = Truebreakif has_high_char:buf = array.zeros('c', buf_size)kernel = win32.Kernel32.INSTANCEif kernal.GetShortPathName(dir, buf, buf_size):dir = jna.Native.toString(buf.tostring()).rstrip("\0")return dirif system == "win32":try:import win32com.shell_get_win_folder = _get_win_folder_with_pywin32except ImportError:try:from ctypes import windll_get_win_folder = _get_win_folder_with_ctypesexcept ImportError:try:import com.sun.jna_get_win_folder = _get_win_folder_with_jnaexcept ImportError:_get_win_folder = _get_win_folder_from_registry#---- self test codeif __name__ == "__main__":appname = "MyApp"appauthor = "MyCompany"props = ("user_data_dir", "site_data_dir","user_config_dir", "site_config_dir","user_cache_dir", "user_log_dir")print("-- app dirs (with optional 'version')")dirs = AppDirs(appname, appauthor, version="1.0")for prop in props:print("%s: %s" % (prop, getattr(dirs, prop)))print("\n-- app dirs (without optional 'version')")dirs = AppDirs(appname, appauthor)for prop in props:print("%s: %s" % (prop, getattr(dirs, prop)))print("\n-- app dirs (without optional 'appauthor')")dirs = AppDirs(appname)for prop in props:print("%s: %s" % (prop, getattr(dirs, prop)))print("\n-- app dirs (with disabled 'appauthor')")dirs = AppDirs(appname, appauthor=False)for prop in props:print("%s: %s" % (prop, getattr(dirs, prop)))
# coding: utf-8"""Package resource API--------------------A resource is a logical file contained within a package, or a logicalsubdirectory thereof. The package resource API expects resource namesto have their path parts separated with ``/``, *not* whatever the localpath separator is. Do not use os.path operations to manipulate resourcenames being passed into the API.The package resource API is designed to work with normal filesystem packages,.egg files, and unpacked .egg files. It can also work in a limited way with.zip files and with custom PEP 302 loaders that support the ``get_data()``method."""from __future__ import absolute_importimport sysimport osimport ioimport timeimport reimport typesimport zipfileimport zipimportimport warningsimport statimport functoolsimport pkgutilimport operatorimport platformimport collectionsimport plistlibimport email.parserimport errnoimport tempfileimport textwrapimport itertoolsimport inspectfrom pkgutil import get_importertry:import _impexcept ImportError:# Python 3.2 compatibilityimport imp as _impfrom pkg_resources.extern import sixfrom pkg_resources.extern.six.moves import urllib, map, filter# capture these to bypass sandboxingfrom os import utimetry:from os import mkdir, rename, unlinkWRITE_SUPPORT = Trueexcept ImportError:# no write support, probably under GAEWRITE_SUPPORT = Falsefrom os import open as os_openfrom os.path import isdir, splittry:import importlib.machinery as importlib_machinery# access attribute to force import under delayed import mechanisms.importlib_machinery.__name__except ImportError:importlib_machinery = Nonefrom . import py31compatfrom pkg_resources.extern import appdirsfrom pkg_resources.extern import packaging__import__('pkg_resources.extern.packaging.version')__import__('pkg_resources.extern.packaging.specifiers')__import__('pkg_resources.extern.packaging.requirements')__import__('pkg_resources.extern.packaging.markers')if (3, 0) < sys.version_info < (3, 3):raise RuntimeError("Python 3.3 or later is required")if six.PY2:# Those builtin exceptions are only defined in Python 3PermissionError = NoneNotADirectoryError = None# declare some globals that will be defined later to# satisfy the linters.require = Noneworking_set = Noneclass PEP440Warning(RuntimeWarning):"""Used when there is an issue with a version or specifier not complying withPEP 440."""class _SetuptoolsVersionMixin(object):def __hash__(self):return super(_SetuptoolsVersionMixin, self).__hash__()def __lt__(self, other):if isinstance(other, tuple):return tuple(self) < otherelse:return super(_SetuptoolsVersionMixin, self).__lt__(other)def __le__(self, other):if isinstance(other, tuple):return tuple(self) <= otherelse:return super(_SetuptoolsVersionMixin, self).__le__(other)def __eq__(self, other):if isinstance(other, tuple):return tuple(self) == otherelse:return super(_SetuptoolsVersionMixin, self).__eq__(other)def __ge__(self, other):if isinstance(other, tuple):return tuple(self) >= otherelse:return super(_SetuptoolsVersionMixin, self).__ge__(other)def __gt__(self, other):if isinstance(other, tuple):return tuple(self) > otherelse:return super(_SetuptoolsVersionMixin, self).__gt__(other)def __ne__(self, other):if isinstance(other, tuple):return tuple(self) != otherelse:return super(_SetuptoolsVersionMixin, self).__ne__(other)def __getitem__(self, key):return tuple(self)[key]def __iter__(self):component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)replace = {'pre': 'c','preview': 'c','-': 'final-','rc': 'c','dev': '@',}.getdef _parse_version_parts(s):for part in component_re.split(s):part = replace(part, part)if not part or part == '.':continueif part[:1] in '0123456789':# pad for numeric comparisonyield part.zfill(8)else:yield '*' + part# ensure that alpha/beta/candidate are before finalyield '*final'def old_parse_version(s):parts = []for part in _parse_version_parts(s.lower()):if part.startswith('*'):# remove '-' before a prerelease tagif part < '*final':while parts and parts[-1] == '*final-':parts.pop()# remove trailing zeros from each series of numeric partswhile parts and parts[-1] == '00000000':parts.pop()parts.append(part)return tuple(parts)# Warn for use of this functionwarnings.warn("You have iterated over the result of ""pkg_resources.parse_version. This is a legacy behavior which is ""inconsistent with the new version class introduced in setuptools ""8.0. In most cases, conversion to a tuple is unnecessary. For ""comparison of versions, sort the Version instances directly. If ""you have another use case requiring the tuple, please file a ""bug with the setuptools project describing that need.",RuntimeWarning,stacklevel=1,)for part in old_parse_version(str(self)):yield partclass SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):passclass SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,packaging.version.LegacyVersion):passdef parse_version(v):try:return SetuptoolsVersion(v)except packaging.version.InvalidVersion:return SetuptoolsLegacyVersion(v)_state_vars = {}def _declare_state(vartype, **kw):globals().update(kw)_state_vars.update(dict.fromkeys(kw, vartype))def __getstate__():state = {}g = globals()for k, v in _state_vars.items():state[k] = g['_sget_' + v](g[k])return statedef __setstate__(state):g = globals()for k, v in state.items():g['_sset_' + _state_vars[k]](k, g[k], v)return statedef _sget_dict(val):return val.copy()def _sset_dict(key, ob, state):ob.clear()ob.update(state)def _sget_object(val):return val.__getstate__()def _sset_object(key, ob, state):ob.__setstate__(state)_sget_none = _sset_none = lambda *args: Nonedef get_supported_platform():"""Return this platform's maximum compatible version.distutils.util.get_platform() normally reports the minimum versionof Mac OS X that would be required to *use* extensions produced bydistutils. But what we want when checking compatibility is to know theversion of Mac OS X that we are *running*. To allow usage of packages thatexplicitly require a newer version of Mac OS X, we must also know thecurrent version of the OS.If this condition occurs for any other platform with a version in itsplatform strings, this function should be extended accordingly."""plat = get_build_platform()m = macosVersionString.match(plat)if m is not None and sys.platform == "darwin":try:plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))except ValueError:# not Mac OS Xpassreturn plat__all__ = [# Basic resource access and distribution/entry point discovery'require', 'run_script', 'get_provider', 'get_distribution','load_entry_point', 'get_entry_map', 'get_entry_info','iter_entry_points','resource_string', 'resource_stream', 'resource_filename','resource_listdir', 'resource_exists', 'resource_isdir',# Environmental control'declare_namespace', 'working_set', 'add_activation_listener','find_distributions', 'set_extraction_path', 'cleanup_resources','get_default_cache',# Primary implementation classes'Environment', 'WorkingSet', 'ResourceManager','Distribution', 'Requirement', 'EntryPoint',# Exceptions'ResolutionError', 'VersionConflict', 'DistributionNotFound','UnknownExtra', 'ExtractionError',# Warnings'PEP440Warning',# Parsing functions and string utilities'parse_requirements', 'parse_version', 'safe_name', 'safe_version','get_platform', 'compatible_platforms', 'yield_lines', 'split_sections','safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',# filesystem utilities'ensure_directory', 'normalize_path',# Distribution "precedence" constants'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',# "Provider" interfaces, implementations, and registration/lookup APIs'IMetadataProvider', 'IResourceProvider', 'FileMetadata','PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider','NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider','register_finder', 'register_namespace_handler', 'register_loader_type','fixup_namespace_packages', 'get_importer',# Deprecated/backward compatibility only'run_main', 'AvailableDistributions',]class ResolutionError(Exception):"""Abstract base for dependency resolution errors"""def __repr__(self):return self.__class__.__name__ + repr(self.args)class VersionConflict(ResolutionError):"""An already-installed version conflicts with the requested version.Should be initialized with the installed Distribution and the requestedRequirement."""_template = "{self.dist} is installed but {self.req} is required"@propertydef dist(self):return self.args[0]@propertydef req(self):return self.args[1]def report(self):return self._template.format(**locals())def with_context(self, required_by):"""If required_by is non-empty, return a version of self that is aContextualVersionConflict."""if not required_by:return selfargs = self.args + (required_by,)return ContextualVersionConflict(*args)class ContextualVersionConflict(VersionConflict):"""A VersionConflict that accepts a third parameter, the set of therequirements that required the installed Distribution."""_template = VersionConflict._template + ' by {self.required_by}'@propertydef required_by(self):return self.args[2]class DistributionNotFound(ResolutionError):"""A requested distribution was not found"""_template = ("The '{self.req}' distribution was not found ""and is required by {self.requirers_str}")@propertydef req(self):return self.args[0]@propertydef requirers(self):return self.args[1]@propertydef requirers_str(self):if not self.requirers:return 'the application'return ', '.join(self.requirers)def report(self):return self._template.format(**locals())def __str__(self):return self.report()class UnknownExtra(ResolutionError):"""Distribution doesn't have an "extra feature" of the given name"""_provider_factories = {}PY_MAJOR = sys.version[:3]EGG_DIST = 3BINARY_DIST = 2SOURCE_DIST = 1CHECKOUT_DIST = 0DEVELOP_DIST = -1def register_loader_type(loader_type, provider_factory):"""Register `provider_factory` to make providers for `loader_type``loader_type` is the type or class of a PEP 302 ``module.__loader__``,and `provider_factory` is a function that, passed a *module* object,returns an ``IResourceProvider`` for that module."""_provider_factories[loader_type] = provider_factorydef get_provider(moduleOrReq):"""Return an IResourceProvider for the named module or requirement"""if isinstance(moduleOrReq, Requirement):return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]try:module = sys.modules[moduleOrReq]except KeyError:__import__(moduleOrReq)module = sys.modules[moduleOrReq]loader = getattr(module, '__loader__', None)return _find_adapter(_provider_factories, loader)(module)def _macosx_vers(_cache=[]):if not _cache:version = platform.mac_ver()[0]# fallback for MacPortsif version == '':plist = '/System/Library/CoreServices/SystemVersion.plist'if os.path.exists(plist):if hasattr(plistlib, 'readPlist'):plist_content = plistlib.readPlist(plist)if 'ProductVersion' in plist_content:version = plist_content['ProductVersion']_cache.append(version.split('.'))return _cache[0]def _macosx_arch(machine):return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)def get_build_platform():"""Return this platform's string for platform-specific distributionsXXX Currently this is the same as ``distutils.util.get_platform()``, but itneeds some hacks for Linux and Mac OS X."""try:# Python 2.7 or >=3.2from sysconfig import get_platformexcept ImportError:from distutils.util import get_platformplat = get_platform()if sys.platform == "darwin" and not plat.startswith('macosx-'):try:version = _macosx_vers()machine = os.uname()[4].replace(" ", "_")return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),_macosx_arch(machine),)except ValueError:# if someone is running a non-Mac darwin system, this will fall# through to the default implementationpassreturn platmacosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")# XXX backward compatget_platform = get_build_platformdef compatible_platforms(provided, required):"""Can code for the `provided` platform run on the `required` platform?Returns true if either platform is ``None``, or the platforms are equal.XXX Needs compatibility checks for Linux and other unixy OSes."""if provided is None or required is None or provided == required:# easy casereturn True# Mac OS X special casesreqMac = macosVersionString.match(required)if reqMac:provMac = macosVersionString.match(provided)# is this a Mac package?if not provMac:# this is backwards compatibility for packages built before# setuptools 0.6. All packages built after this point will# use the new macosx designation.provDarwin = darwinVersionString.match(provided)if provDarwin:dversion = int(provDarwin.group(1))macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))if dversion == 7 and macosversion >= "10.3" or \dversion == 8 and macosversion >= "10.4":return True# egg isn't macosx or legacy darwinreturn False# are they the same major version and machine type?if provMac.group(1) != reqMac.group(1) or \provMac.group(3) != reqMac.group(3):return False# is the required OS major update >= the provided one?if int(provMac.group(2)) > int(reqMac.group(2)):return Falsereturn True# XXX Linux and other platforms' special cases should go herereturn Falsedef run_script(dist_spec, script_name):"""Locate distribution `dist_spec` and run its `script_name` script"""ns = sys._getframe(1).f_globalsname = ns['__name__']ns.clear()ns['__name__'] = namerequire(dist_spec)[0].run_script(script_name, ns)# backward compatibilityrun_main = run_scriptdef get_distribution(dist):"""Return a current distribution object for a Requirement or string"""if isinstance(dist, six.string_types):dist = Requirement.parse(dist)if isinstance(dist, Requirement):dist = get_provider(dist)if not isinstance(dist, Distribution):raise TypeError("Expected string, Requirement, or Distribution", dist)return distdef load_entry_point(dist, group, name):"""Return `name` entry point of `group` for `dist` or raise ImportError"""return get_distribution(dist).load_entry_point(group, name)def get_entry_map(dist, group=None):"""Return the entry point map for `group`, or the full entry map"""return get_distribution(dist).get_entry_map(group)def get_entry_info(dist, group, name):"""Return the EntryPoint object for `group`+`name`, or ``None``"""return get_distribution(dist).get_entry_info(group, name)class IMetadataProvider:def has_metadata(name):"""Does the package's distribution contain the named metadata?"""def get_metadata(name):"""The named metadata resource as a string"""def get_metadata_lines(name):"""Yield named metadata resource as list of non-blank non-comment linesLeading and trailing whitespace is stripped from each line, and lineswith ``#`` as the first non-blank character are omitted."""def metadata_isdir(name):"""Is the named metadata a directory? (like ``os.path.isdir()``)"""def metadata_listdir(name):"""List of metadata names in the directory (like ``os.listdir()``)"""def run_script(script_name, namespace):"""Execute the named script in the supplied namespace dictionary"""class IResourceProvider(IMetadataProvider):"""An object that provides access to package resources"""def get_resource_filename(manager, resource_name):"""Return a true filesystem path for `resource_name``manager` must be an ``IResourceManager``"""def get_resource_stream(manager, resource_name):"""Return a readable file-like object for `resource_name``manager` must be an ``IResourceManager``"""def get_resource_string(manager, resource_name):"""Return a string containing the contents of `resource_name``manager` must be an ``IResourceManager``"""def has_resource(resource_name):"""Does the package contain the named resource?"""def resource_isdir(resource_name):"""Is the named resource a directory? (like ``os.path.isdir()``)"""def resource_listdir(resource_name):"""List of resource names in the directory (like ``os.listdir()``)"""class WorkingSet(object):"""A collection of active distributions on sys.path (or a similar list)"""def __init__(self, entries=None):"""Create working set from list of path entries (default=sys.path)"""self.entries = []self.entry_keys = {}self.by_key = {}self.callbacks = []if entries is None:entries = sys.pathfor entry in entries:self.add_entry(entry)@classmethoddef _build_master(cls):"""Prepare the master working set."""ws = cls()try:from __main__ import __requires__except ImportError:# The main program does not list any requirementsreturn ws# ensure the requirements are mettry:ws.require(__requires__)except VersionConflict:return cls._build_from_requirements(__requires__)return ws@classmethoddef _build_from_requirements(cls, req_spec):"""Build a working set from a requirement spec. Rewrites sys.path."""# try it without defaults already on sys.path# by starting with an empty pathws = cls([])reqs = parse_requirements(req_spec)dists = ws.resolve(reqs, Environment())for dist in dists:ws.add(dist)# add any missing entries from sys.pathfor entry in sys.path:if entry not in ws.entries:ws.add_entry(entry)# then copy back to sys.pathsys.path[:] = ws.entriesreturn wsdef add_entry(self, entry):"""Add a path item to ``.entries``, finding any distributions on it``find_distributions(entry, True)`` is used to find distributionscorresponding to the path entry, and they are added. `entry` isalways appended to ``.entries``, even if it is already present.(This is because ``sys.path`` can contain the same value more thanonce, and the ``.entries`` of the ``sys.path`` WorkingSet should alwaysequal ``sys.path``.)"""self.entry_keys.setdefault(entry, [])self.entries.append(entry)for dist in find_distributions(entry, True):self.add(dist, entry, False)def __contains__(self, dist):"""True if `dist` is the active distribution for its project"""return self.by_key.get(dist.key) == distdef find(self, req):"""Find a distribution matching requirement `req`If there is an active distribution for the requested project, thisreturns it as long as it meets the version requirement specified by`req`. But, if there is an active distribution for the project and itdoes *not* meet the `req` requirement, ``VersionConflict`` is raised.If there is no active distribution for the requested project, ``None``is returned."""dist = self.by_key.get(req.key)if dist is not None and dist not in req:# XXX add more inforaise VersionConflict(dist, req)return distdef iter_entry_points(self, group, name=None):"""Yield entry point objects from `group` matching `name`If `name` is None, yields all entry points in `group` from alldistributions in the working set, otherwise only ones matchingboth `group` and `name` are yielded (in distribution order)."""for dist in self:entries = dist.get_entry_map(group)if name is None:for ep in entries.values():yield epelif name in entries:yield entries[name]def run_script(self, requires, script_name):"""Locate distribution for `requires` and run `script_name` script"""ns = sys._getframe(1).f_globalsname = ns['__name__']ns.clear()ns['__name__'] = nameself.require(requires)[0].run_script(script_name, ns)def __iter__(self):"""Yield distributions for non-duplicate projects in the working setThe yield order is the order in which the items' path entries wereadded to the working set."""seen = {}for item in self.entries:if item not in self.entry_keys:# workaround a cache issuecontinuefor key in self.entry_keys[item]:if key not in seen:seen[key] = 1yield self.by_key[key]def add(self, dist, entry=None, insert=True, replace=False):"""Add `dist` to working set, associated with `entry`If `entry` is unspecified, it defaults to the ``.location`` of `dist`.On exit from this routine, `entry` is added to the end of the workingset's ``.entries`` (if it wasn't already present).`dist` is only added to the working set if it's for a project thatdoesn't already have a distribution in the set, unless `replace=True`.If it's added, any callbacks registered with the ``subscribe()`` methodwill be called."""if insert:dist.insert_on(self.entries, entry, replace=replace)if entry is None:entry = dist.locationkeys = self.entry_keys.setdefault(entry, [])keys2 = self.entry_keys.setdefault(dist.location, [])if not replace and dist.key in self.by_key:# ignore hidden distrosreturnself.by_key[dist.key] = distif dist.key not in keys:keys.append(dist.key)if dist.key not in keys2:keys2.append(dist.key)self._added_new(dist)def resolve(self, requirements, env=None, installer=None,replace_conflicting=False, extras=None):"""List all distributions needed to (recursively) meet `requirements``requirements` must be a sequence of ``Requirement`` objects. `env`,if supplied, should be an ``Environment`` instance. Ifnot supplied, it defaults to all distributions available within anyentry or distribution in the working set. `installer`, if supplied,will be invoked with each requirement that cannot be met by analready-installed distribution; it should return a ``Distribution`` or``None``.Unless `replace_conflicting=True`, raises a VersionConflict exceptionifany requirements are found on the path that have the correct name butthe wrong version. Otherwise, if an `installer` is supplied it will beinvoked to obtain the correct version of the requirement and activateit.`extras` is a list of the extras to be used with these requirements.This is important because extra requirements may look like `my_req;extra = "my_extra"`, which would otherwise be interpreted as a purelyoptional requirement. Instead, we want to be able to assert that theserequirements are truly required."""# set up the stackrequirements = list(requirements)[::-1]# set of processed requirementsprocessed = {}# key -> distbest = {}to_activate = []req_extras = _ReqExtras()# Mapping of requirement to set of distributions that required it;# useful for reporting info about conflicts.required_by = collections.defaultdict(set)while requirements:# process dependencies breadth-firstreq = requirements.pop(0)if req in processed:# Ignore cyclic or redundant dependenciescontinueif not req_extras.markers_pass(req, extras):continuedist = best.get(req.key)if dist is None:# Find the best distribution and add it to the mapdist = self.by_key.get(req.key)if dist is None or (dist not in req and replace_conflicting):ws = selfif env is None:if dist is None:env = Environment(self.entries)else:# Use an empty environment and workingset to avoid# any further conflicts with the conflicting# distributionenv = Environment([])ws = WorkingSet([])dist = best[req.key] = env.best_match(req, ws, installer,replace_conflicting=replace_conflicting)if dist is None:requirers = required_by.get(req, None)raise DistributionNotFound(req, requirers)to_activate.append(dist)if dist not in req:# Oops, the "best" so far conflicts with a dependencydependent_req = required_by[req]raise VersionConflict(dist, req).with_context(dependent_req)# push the new requirements onto the stacknew_requirements = dist.requires(req.extras)[::-1]requirements.extend(new_requirements)# Register the new requirements needed by reqfor new_requirement in new_requirements:required_by[new_requirement].add(req.project_name)req_extras[new_requirement] = req.extrasprocessed[req] = True# return list of distros to activatereturn to_activatedef find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True):"""Find all activatable distributions in `plugin_env`Example usage::distributions, errors = working_set.find_plugins(Environment(plugin_dirlist))# add plugins+libs to sys.pathmap(working_set.add, distributions)# display errorsprint('Could not load', errors)The `plugin_env` should be an ``Environment`` instance that containsonly distributions that are in the project's "plugin directory" ordirectories. The `full_env`, if supplied, should be an ``Environment``contains all currently-available distributions. If `full_env` is notsupplied, one is created automatically from the ``WorkingSet`` thismethod is called on, which will typically mean that every directory on``sys.path`` will be scanned for distributions.`installer` is a standard installer callback as used by the``resolve()`` method. The `fallback` flag indicates whether we shouldattempt to resolve older versions of a plugin if the newest versioncannot be resolved.This method returns a 2-tuple: (`distributions`, `error_info`), where`distributions` is a list of the distributions found in `plugin_env`that were loadable, along with any other distributions that are neededto resolve their dependencies. `error_info` is a dictionary mappingunloadable plugin distributions to an exception instance describing theerror that occurred. Usually this will be a ``DistributionNotFound`` or``VersionConflict`` instance."""plugin_projects = list(plugin_env)# scan project names in alphabetic orderplugin_projects.sort()error_info = {}distributions = {}if full_env is None:env = Environment(self.entries)env += plugin_envelse:env = full_env + plugin_envshadow_set = self.__class__([])# put all our entries in shadow_setlist(map(shadow_set.add, self))for project_name in plugin_projects:for dist in plugin_env[project_name]:req = [dist.as_requirement()]try:resolvees = shadow_set.resolve(req, env, installer)except ResolutionError as v:# save error infoerror_info[dist] = vif fallback:# try the next older version of projectcontinueelse:# give up on this project, keep goingbreakelse:list(map(shadow_set.add, resolvees))distributions.update(dict.fromkeys(resolvees))# success, no need to try any more versions of this projectbreakdistributions = list(distributions)distributions.sort()return distributions, error_infodef require(self, *requirements):"""Ensure that distributions matching `requirements` are activated`requirements` must be a string or a (possibly-nested) sequencethereof, specifying the distributions and versions required. Thereturn value is a sequence of the distributions that needed to beactivated to fulfill the requirements; all relevant distributions areincluded, even if they were already activated in this working set."""needed = self.resolve(parse_requirements(requirements))for dist in needed:self.add(dist)return neededdef subscribe(self, callback, existing=True):"""Invoke `callback` for all distributionsIf `existing=True` (default),call on all existing ones, as well."""if callback in self.callbacks:returnself.callbacks.append(callback)if not existing:returnfor dist in self:callback(dist)def _added_new(self, dist):for callback in self.callbacks:callback(dist)def __getstate__(self):return (self.entries[:], self.entry_keys.copy(), self.by_key.copy(),self.callbacks[:])def __setstate__(self, e_k_b_c):entries, keys, by_key, callbacks = e_k_b_cself.entries = entries[:]self.entry_keys = keys.copy()self.by_key = by_key.copy()self.callbacks = callbacks[:]class _ReqExtras(dict):"""Map each requirement to the extras that demanded it."""def markers_pass(self, req, extras=None):"""Evaluate markers for req against each extra thatdemanded it.Return False if the req has a marker and failsevaluation. Otherwise, return True."""extra_evals = (req.marker.evaluate({'extra': extra})for extra in self.get(req, ()) + (extras or (None,)))return not req.marker or any(extra_evals)class Environment(object):"""Searchable snapshot of distributions on a search path"""def __init__(self, search_path=None, platform=get_supported_platform(),python=PY_MAJOR):"""Snapshot distributions available on a search pathAny distributions found on `search_path` are added to the environment.`search_path` should be a sequence of ``sys.path`` items. If notsupplied, ``sys.path`` is used.`platform` is an optional string specifying the name of the platformthat platform-specific distributions must be compatible with. Ifunspecified, it defaults to the current platform. `python` is anoptional string naming the desired version of Python (e.g. ``'3.3'``);it defaults to the current version.You may explicitly set `platform` (and/or `python`) to ``None`` if youwish to map *all* distributions, not just those compatible with therunning platform or Python version."""self._distmap = {}self.platform = platformself.python = pythonself.scan(search_path)def can_add(self, dist):"""Is distribution `dist` acceptable for this environment?The distribution must match the platform and python versionrequirements specified when this environment was created, or Falseis returned."""return (self.python is None or dist.py_version is Noneor dist.py_version == self.python) \and compatible_platforms(dist.platform, self.platform)def remove(self, dist):"""Remove `dist` from the environment"""self._distmap[dist.key].remove(dist)def scan(self, search_path=None):"""Scan `search_path` for distributions usable in this environmentAny distributions found are added to the environment.`search_path` should be a sequence of ``sys.path`` items. If notsupplied, ``sys.path`` is used. Only distributions conforming tothe platform/python version defined at initialization are added."""if search_path is None:search_path = sys.pathfor item in search_path:for dist in find_distributions(item):self.add(dist)def __getitem__(self, project_name):"""Return a newest-to-oldest list of distributions for `project_name`Uses case-insensitive `project_name` comparison, assuming all theproject's distributions use their project's name converted to alllowercase as their key."""distribution_key = project_name.lower()return self._distmap.get(distribution_key, [])def add(self, dist):"""Add `dist` if we ``can_add()`` it and it has not already been added"""if self.can_add(dist) and dist.has_version():dists = self._distmap.setdefault(dist.key, [])if dist not in dists:dists.append(dist)dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)def best_match(self, req, working_set, installer=None, replace_conflicting=False):"""Find distribution best matching `req` and usable on `working_set`This calls the ``find(req)`` method of the `working_set` to see if asuitable distribution is already active. (This may raise``VersionConflict`` if an unsuitable version of the project is alreadyactive in the specified `working_set`.) If a suitable distributionisn't active, this method returns the newest distribution in theenvironment that meets the ``Requirement`` in `req`. If no suitabledistribution is found, and `installer` is supplied, then the result ofcalling the environment's ``obtain(req, installer)`` method will bereturned."""try:dist = working_set.find(req)except VersionConflict:if not replace_conflicting:raisedist = Noneif dist is not None:return distfor dist in self[req.key]:if dist in req:return dist# try to download/installreturn self.obtain(req, installer)def obtain(self, requirement, installer=None):"""Obtain a distribution matching `requirement` (e.g. via download)Obtain a distro that matches requirement (e.g. via download). In thebase ``Environment`` class, this routine just returns``installer(requirement)``, unless `installer` is None, in which caseNone is returned instead. This method is a hook that allows subclassesto attempt other ways of obtaining a distribution before falling backto the `installer` argument."""if installer is not None:return installer(requirement)def __iter__(self):"""Yield the unique project names of the available distributions"""for key in self._distmap.keys():if self[key]:yield keydef __iadd__(self, other):"""In-place addition of a distribution or environment"""if isinstance(other, Distribution):self.add(other)elif isinstance(other, Environment):for project in other:for dist in other[project]:self.add(dist)else:raise TypeError("Can't add %r to environment" % (other,))return selfdef __add__(self, other):"""Add an environment or distribution to an environment"""new = self.__class__([], platform=None, python=None)for env in self, other:new += envreturn new# XXX backward compatibilityAvailableDistributions = Environmentclass ExtractionError(RuntimeError):"""An error occurred extracting a resourceThe following attributes are available from instances of this exception:managerThe resource manager that raised this exceptioncache_pathThe base directory for resource extractionoriginal_errorThe exception instance that caused extraction to fail"""class ResourceManager:"""Manage resource extraction and packages"""extraction_path = Nonedef __init__(self):self.cached_files = {}def resource_exists(self, package_or_requirement, resource_name):"""Does the named resource exist?"""return get_provider(package_or_requirement).has_resource(resource_name)def resource_isdir(self, package_or_requirement, resource_name):"""Is the named resource an existing directory?"""return get_provider(package_or_requirement).resource_isdir(resource_name)def resource_filename(self, package_or_requirement, resource_name):"""Return a true filesystem path for specified resource"""return get_provider(package_or_requirement).get_resource_filename(self, resource_name)def resource_stream(self, package_or_requirement, resource_name):"""Return a readable file-like object for specified resource"""return get_provider(package_or_requirement).get_resource_stream(self, resource_name)def resource_string(self, package_or_requirement, resource_name):"""Return specified resource as a string"""return get_provider(package_or_requirement).get_resource_string(self, resource_name)def resource_listdir(self, package_or_requirement, resource_name):"""List the contents of the named resource directory"""return get_provider(package_or_requirement).resource_listdir(resource_name)def extraction_error(self):"""Give an error message for problems extracting file(s)"""old_exc = sys.exc_info()[1]cache_path = self.extraction_path or get_default_cache()tmpl = textwrap.dedent("""Can't extract file(s) to egg cacheThe following error occurred while trying to extract file(s)to the Python egg cache:{old_exc}The Python egg cache directory is currently set to:{cache_path}Perhaps your account does not have write access to this directory?You can change the cache directory by setting the PYTHON_EGG_CACHEenvironment variable to point to an accessible directory.""").lstrip()err = ExtractionError(tmpl.format(**locals()))err.manager = selferr.cache_path = cache_patherr.original_error = old_excraise errdef get_cache_path(self, archive_name, names=()):"""Return absolute location in cache for `archive_name` and `names`The parent directory of the resulting path will be created if it doesnot already exist. `archive_name` should be the base filename of theenclosing egg (which may not be the name of the enclosing zipfile!),including its ".egg" extension. `names`, if provided, should be asequence of path name parts "under" the egg's extraction location.This method should only be called by resource providers that need toobtain an extraction location, and only for names they intend toextract, as it tracks the generated names for possible cleanup later."""extract_path = self.extraction_path or get_default_cache()target_path = os.path.join(extract_path, archive_name + '-tmp', *names)try:_bypass_ensure_directory(target_path)except:self.extraction_error()self._warn_unsafe_extraction_path(extract_path)self.cached_files[target_path] = 1return target_path@staticmethoddef _warn_unsafe_extraction_path(path):"""If the default extraction path is overridden and set to an insecurelocation, such as /tmp, it opens up an opportunity for an attacker toreplace an extracted file with an unauthorized payload. Warn the userif a known insecure location is used.See Distribute #375 for more details."""if os.name == 'nt' and not path.startswith(os.environ['windir']):# On Windows, permissions are generally restrictive by default# and temp directories are not writable by other users, so# bypass the warning.returnmode = os.stat(path).st_modeif mode & stat.S_IWOTH or mode & stat.S_IWGRP:msg = ("%s is writable by group/others and vulnerable to attack ""when ""used with get_resource_filename. Consider a more secure ""location (set with .set_extraction_path or the ""PYTHON_EGG_CACHE environment variable)." % path)warnings.warn(msg, UserWarning)def postprocess(self, tempname, filename):"""Perform any platform-specific postprocessing of `tempname`This is where Mac header rewrites should be done; other platforms don'thave anything special they should do.Resource providers should call this method ONLY after successfullyextracting a compressed resource. They must NOT call it on resourcesthat are already in the filesystem.`tempname` is the current (temporary) name of the file, and `filename`is the name it will be renamed to by the caller after this routinereturns."""if os.name == 'posix':# Make the resource executablemode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777os.chmod(tempname, mode)def set_extraction_path(self, path):"""Set the base path where resources will be extracted to, if needed.If you do not call this routine before any extractions take place, thepath defaults to the return value of ``get_default_cache()``. (Whichis based on the ``PYTHON_EGG_CACHE`` environment variable, with variousplatform-specific fallbacks. See that routine's documentation for moredetails.)Resources are extracted to subdirectories of this path based uponinformation given by the ``IResourceProvider``. You may set this to atemporary directory, but then you must call ``cleanup_resources()`` todelete the extracted files when done. There is no guarantee that``cleanup_resources()`` will be able to remove all extracted files.(Note: you may not change the extraction path for a given resourcemanager once resources have been extracted, unless you first call``cleanup_resources()``.)"""if self.cached_files:raise ValueError("Can't change extraction path, files already extracted")self.extraction_path = pathdef cleanup_resources(self, force=False):"""Delete all extracted resource files and directories, returning a listof the file and directory names that could not be successfully removed.This function does not have any concurrency protection, so it shouldgenerally only be called when the extraction path is a temporarydirectory exclusive to a single process. This method is notautomatically called; you must call it explicitly or register it as an``atexit`` function if you wish to ensure cleanup of a temporarydirectory used for extractions."""# XXXdef get_default_cache():"""Return the ``PYTHON_EGG_CACHE`` environment variableor a platform-relevant user cache dir for an appnamed "Python-Eggs"."""return (os.environ.get('PYTHON_EGG_CACHE')or appdirs.user_cache_dir(appname='Python-Eggs'))def safe_name(name):"""Convert an arbitrary string to a standard distribution nameAny runs of non-alphanumeric/. characters are replaced with a single '-'."""return re.sub('[^A-Za-z0-9.]+', '-', name)def safe_version(version):"""Convert an arbitrary string to a standard version string"""try:# normalize the versionreturn str(packaging.version.Version(version))except packaging.version.InvalidVersion:version = version.replace(' ', '.')return re.sub('[^A-Za-z0-9.]+', '-', version)def safe_extra(extra):"""Convert an arbitrary string to a standard 'extra' nameAny runs of non-alphanumeric characters are replaced with a single '_',and the result is always lowercased."""return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()def to_filename(name):"""Convert a project or version name to its filename-escaped formAny '-' characters are currently replaced with '_'."""return name.replace('-', '_')def invalid_marker(text):"""Validate text as a PEP 508 environment marker; return an exceptionif invalid or False otherwise."""try:evaluate_marker(text)except SyntaxError as e:e.filename = Nonee.lineno = Nonereturn ereturn Falsedef evaluate_marker(text, extra=None):"""Evaluate a PEP 508 environment marker.Return a boolean indicating the marker result in this environment.Raise SyntaxError if marker is invalid.This implementation uses the 'pyparsing' module."""try:marker = packaging.markers.Marker(text)return marker.evaluate()except packaging.markers.InvalidMarker as e:raise SyntaxError(e)class NullProvider:"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""egg_name = Noneegg_info = Noneloader = Nonedef __init__(self, module):self.loader = getattr(module, '__loader__', None)self.module_path = os.path.dirname(getattr(module, '__file__', ''))def get_resource_filename(self, manager, resource_name):return self._fn(self.module_path, resource_name)def get_resource_stream(self, manager, resource_name):return io.BytesIO(self.get_resource_string(manager, resource_name))def get_resource_string(self, manager, resource_name):return self._get(self._fn(self.module_path, resource_name))def has_resource(self, resource_name):return self._has(self._fn(self.module_path, resource_name))def has_metadata(self, name):return self.egg_info and self._has(self._fn(self.egg_info, name))def get_metadata(self, name):if not self.egg_info:return ""value = self._get(self._fn(self.egg_info, name))return value.decode('utf-8') if six.PY3 else valuedef get_metadata_lines(self, name):return yield_lines(self.get_metadata(name))def resource_isdir(self, resource_name):return self._isdir(self._fn(self.module_path, resource_name))def metadata_isdir(self, name):return self.egg_info and self._isdir(self._fn(self.egg_info, name))def resource_listdir(self, resource_name):return self._listdir(self._fn(self.module_path, resource_name))def metadata_listdir(self, name):if self.egg_info:return self._listdir(self._fn(self.egg_info, name))return []def run_script(self, script_name, namespace):script = 'scripts/' + script_nameif not self.has_metadata(script):raise ResolutionError("Script {script!r} not found in metadata at {self.egg_info!r}".format(**locals()),)script_text = self.get_metadata(script).replace('\r\n', '\n')script_text = script_text.replace('\r', '\n')script_filename = self._fn(self.egg_info, script)namespace['__file__'] = script_filenameif os.path.exists(script_filename):source = open(script_filename).read()code = compile(source, script_filename, 'exec')exec(code, namespace, namespace)else:from linecache import cachecache[script_filename] = (len(script_text), 0, script_text.split('\n'), script_filename)script_code = compile(script_text, script_filename, 'exec')exec(script_code, namespace, namespace)def _has(self, path):raise NotImplementedError("Can't perform this operation for unregistered loader type")def _isdir(self, path):raise NotImplementedError("Can't perform this operation for unregistered loader type")def _listdir(self, path):raise NotImplementedError("Can't perform this operation for unregistered loader type")def _fn(self, base, resource_name):if resource_name:return os.path.join(base, *resource_name.split('/'))return basedef _get(self, path):if hasattr(self.loader, 'get_data'):return self.loader.get_data(path)raise NotImplementedError("Can't perform this operation for loaders without 'get_data()'")register_loader_type(object, NullProvider)class EggProvider(NullProvider):"""Provider based on a virtual filesystem"""def __init__(self, module):NullProvider.__init__(self, module)self._setup_prefix()def _setup_prefix(self):# we assume here that our metadata may be nested inside a "basket"# of multiple eggs; that's why we use module_path instead of .archivepath = self.module_pathold = Nonewhile path != old:if _is_egg_path(path):self.egg_name = os.path.basename(path)self.egg_info = os.path.join(path, 'EGG-INFO')self.egg_root = pathbreakold = pathpath, base = os.path.split(path)class DefaultProvider(EggProvider):"""Provides access to package resources in the filesystem"""def _has(self, path):return os.path.exists(path)def _isdir(self, path):return os.path.isdir(path)def _listdir(self, path):return os.listdir(path)def get_resource_stream(self, manager, resource_name):return open(self._fn(self.module_path, resource_name), 'rb')def _get(self, path):with open(path, 'rb') as stream:return stream.read()@classmethoddef _register(cls):loader_cls = getattr(importlib_machinery,'SourceFileLoader',type(None),)register_loader_type(loader_cls, cls)DefaultProvider._register()class EmptyProvider(NullProvider):"""Provider that returns nothing for all requests"""_isdir = _has = lambda self, path: False_get = lambda self, path: ''_listdir = lambda self, path: []module_path = Nonedef __init__(self):passempty_provider = EmptyProvider()class ZipManifests(dict):"""zip manifest builder"""@classmethoddef build(cls, path):"""Build a dictionary similar to the zipimport directorycaches, except instead of tuples, store ZipInfo objects.Use a platform-specific path separator (os.sep) for the path keysfor compatibility with pypy on Windows."""with zipfile.ZipFile(path) as zfile:items = ((name.replace('/', os.sep),zfile.getinfo(name),)for name in zfile.namelist())return dict(items)load = buildclass MemoizedZipManifests(ZipManifests):"""Memoized zipfile manifests."""manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')def load(self, path):"""Load a manifest at path or return a suitable manifest already loaded."""path = os.path.normpath(path)mtime = os.stat(path).st_mtimeif path not in self or self[path].mtime != mtime:manifest = self.build(path)self[path] = self.manifest_mod(manifest, mtime)return self[path].manifestclass ZipProvider(EggProvider):"""Resource support for zips and eggs"""eagers = None_zip_manifests = MemoizedZipManifests()def __init__(self, module):EggProvider.__init__(self, module)self.zip_pre = self.loader.archive + os.sepdef _zipinfo_name(self, fspath):# Convert a virtual filename (full path to file) into a zipfile subpath# usable with the zipimport directory cache for our target archiveif fspath.startswith(self.zip_pre):return fspath[len(self.zip_pre):]raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre))def _parts(self, zip_path):# Convert a zipfile subpath into an egg-relative path part list.# pseudo-fs pathfspath = self.zip_pre + zip_pathif fspath.startswith(self.egg_root + os.sep):return fspath[len(self.egg_root) + 1:].split(os.sep)raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root))@propertydef zipinfo(self):return self._zip_manifests.load(self.loader.archive)def get_resource_filename(self, manager, resource_name):if not self.egg_name:raise NotImplementedError("resource_filename() only supported for .egg, not .zip")# no need to lock for extraction, since we use temp nameszip_path = self._resource_to_zip(resource_name)eagers = self._get_eager_resources()if '/'.join(self._parts(zip_path)) in eagers:for name in eagers:self._extract_resource(manager, self._eager_to_zip(name))return self._extract_resource(manager, zip_path)@staticmethoddef _get_date_and_size(zip_stat):size = zip_stat.file_size# ymdhms+wday, yday, dstdate_time = zip_stat.date_time + (0, 0, -1)# 1980 offset already donetimestamp = time.mktime(date_time)return timestamp, sizedef _extract_resource(self, manager, zip_path):if zip_path in self._index():for name in self._index()[zip_path]:last = self._extract_resource(manager, os.path.join(zip_path, name))# return the extracted directory namereturn os.path.dirname(last)timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])if not WRITE_SUPPORT:raise IOError('"os.rename" and "os.unlink" are not supported ''on this platform')try:real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path))if self._is_current(real_path, zip_path):return real_pathoutf, tmpnam = _mkstemp(".$extract",dir=os.path.dirname(real_path),)os.write(outf, self.loader.get_data(zip_path))os.close(outf)utime(tmpnam, (timestamp, timestamp))manager.postprocess(tmpnam, real_path)try:rename(tmpnam, real_path)except os.error:if os.path.isfile(real_path):if self._is_current(real_path, zip_path):# the file became current since it was checked above,# so proceed.return real_path# Windows, del old file and retryelif os.name == 'nt':unlink(real_path)rename(tmpnam, real_path)return real_pathraiseexcept os.error:# report a user-friendly errormanager.extraction_error()return real_pathdef _is_current(self, file_path, zip_path):"""Return True if the file_path is current for this zip_path"""timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])if not os.path.isfile(file_path):return Falsestat = os.stat(file_path)if stat.st_size != size or stat.st_mtime != timestamp:return False# check that the contents matchzip_contents = self.loader.get_data(zip_path)with open(file_path, 'rb') as f:file_contents = f.read()return zip_contents == file_contentsdef _get_eager_resources(self):if self.eagers is None:eagers = []for name in ('native_libs.txt', 'eager_resources.txt'):if self.has_metadata(name):eagers.extend(self.get_metadata_lines(name))self.eagers = eagersreturn self.eagersdef _index(self):try:return self._dirindexexcept AttributeError:ind = {}for path in self.zipinfo:parts = path.split(os.sep)while parts:parent = os.sep.join(parts[:-1])if parent in ind:ind[parent].append(parts[-1])breakelse:ind[parent] = [parts.pop()]self._dirindex = indreturn inddef _has(self, fspath):zip_path = self._zipinfo_name(fspath)return zip_path in self.zipinfo or zip_path in self._index()def _isdir(self, fspath):return self._zipinfo_name(fspath) in self._index()def _listdir(self, fspath):return list(self._index().get(self._zipinfo_name(fspath), ()))def _eager_to_zip(self, resource_name):return self._zipinfo_name(self._fn(self.egg_root, resource_name))def _resource_to_zip(self, resource_name):return self._zipinfo_name(self._fn(self.module_path, resource_name))register_loader_type(zipimport.zipimporter, ZipProvider)class FileMetadata(EmptyProvider):"""Metadata handler for standalone PKG-INFO filesUsage::metadata = FileMetadata("/path/to/PKG-INFO")This provider rejects all data and metadata requests except for PKG-INFO,which is treated as existing, and will be the contents of the file atthe provided location."""def __init__(self, path):self.path = pathdef has_metadata(self, name):return name == 'PKG-INFO' and os.path.isfile(self.path)def get_metadata(self, name):if name != 'PKG-INFO':raise KeyError("No metadata except PKG-INFO is available")with io.open(self.path, encoding='utf-8', errors="replace") as f:metadata = f.read()self._warn_on_replacement(metadata)return metadatadef _warn_on_replacement(self, metadata):# Python 2.7 compat for: replacement_char = '�'replacement_char = b'\xef\xbf\xbd'.decode('utf-8')if replacement_char in metadata:tmpl = "{self.path} could not be properly decoded in UTF-8"msg = tmpl.format(**locals())warnings.warn(msg)def get_metadata_lines(self, name):return yield_lines(self.get_metadata(name))class PathMetadata(DefaultProvider):"""Metadata provider for egg directoriesUsage::# Development eggs:egg_info = "/path/to/PackageName.egg-info"base_dir = os.path.dirname(egg_info)metadata = PathMetadata(base_dir, egg_info)dist_name = os.path.splitext(os.path.basename(egg_info))[0]dist = Distribution(basedir, project_name=dist_name, metadata=metadata)# Unpacked egg directories:egg_path = "/path/to/PackageName-ver-pyver-etc.egg"metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))dist = Distribution.from_filename(egg_path, metadata=metadata)"""def __init__(self, path, egg_info):self.module_path = pathself.egg_info = egg_infoclass EggMetadata(ZipProvider):"""Metadata provider for .egg files"""def __init__(self, importer):"""Create a metadata provider from a zipimporter"""self.zip_pre = importer.archive + os.sepself.loader = importerif importer.prefix:self.module_path = os.path.join(importer.archive, importer.prefix)else:self.module_path = importer.archiveself._setup_prefix()_declare_state('dict', _distribution_finders={})def register_finder(importer_type, distribution_finder):"""Register `distribution_finder` to find distributions in sys.path items`importer_type` is the type or class of a PEP 302 "Importer" (sys.path itemhandler), and `distribution_finder` is a callable that, passed a pathitem and the importer instance, yields ``Distribution`` instances found onthat path item. See ``pkg_resources.find_on_path`` for an example."""_distribution_finders[importer_type] = distribution_finderdef find_distributions(path_item, only=False):"""Yield distributions accessible via `path_item`"""importer = get_importer(path_item)finder = _find_adapter(_distribution_finders, importer)return finder(importer, path_item, only)def find_eggs_in_zip(importer, path_item, only=False):"""Find eggs in zip files; possibly multiple nested eggs."""if importer.archive.endswith('.whl'):# wheels are not supported with this finder# they don't have PKG-INFO metadata, and won't ever contain eggsreturnmetadata = EggMetadata(importer)if metadata.has_metadata('PKG-INFO'):yield Distribution.from_filename(path_item, metadata=metadata)if only:# don't yield nested distrosreturnfor subitem in metadata.resource_listdir('/'):if _is_egg_path(subitem):subpath = os.path.join(path_item, subitem)dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)for dist in dists:yield distelif subitem.lower().endswith('.dist-info'):subpath = os.path.join(path_item, subitem)submeta = EggMetadata(zipimport.zipimporter(subpath))submeta.egg_info = subpathyield Distribution.from_location(path_item, subitem, submeta)register_finder(zipimport.zipimporter, find_eggs_in_zip)def find_nothing(importer, path_item, only=False):return ()register_finder(object, find_nothing)def _by_version_descending(names):"""Given a list of filenames, return them in descending orderby version number.>>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'>>> _by_version_descending(names)['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']>>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'>>> _by_version_descending(names)['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']>>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'>>> _by_version_descending(names)['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']"""def _by_version(name):"""Parse each component of the filename"""name, ext = os.path.splitext(name)parts = itertools.chain(name.split('-'), [ext])return [packaging.version.parse(part) for part in parts]return sorted(names, key=_by_version, reverse=True)def find_on_path(importer, path_item, only=False):"""Yield distributions accessible on a sys.path directory"""path_item = _normalize_cached(path_item)if _is_unpacked_egg(path_item):yield Distribution.from_filename(path_item, metadata=PathMetadata(path_item, os.path.join(path_item, 'EGG-INFO')))returnentries = safe_listdir(path_item)# for performance, before sorting by version,# screen entries for only those that will yield# distributionsfiltered = (entryfor entry in entriesif dist_factory(path_item, entry, only))# scan for .egg and .egg-info in directorypath_item_entries = _by_version_descending(filtered)for entry in path_item_entries:fullpath = os.path.join(path_item, entry)factory = dist_factory(path_item, entry, only)for dist in factory(fullpath):yield distdef dist_factory(path_item, entry, only):"""Return a dist_factory for a path_item and entry"""lower = entry.lower()is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info')))return (distributions_from_metadataif is_meta elsefind_distributionsif not only and _is_egg_path(entry) elseresolve_egg_linkif not only and lower.endswith('.egg-link') elseNoDists())class NoDists:""">>> bool(NoDists())False>>> list(NoDists()('anything'))[]"""def __bool__(self):return Falseif six.PY2:__nonzero__ = __bool__def __call__(self, fullpath):return iter(())def safe_listdir(path):"""Attempt to list contents of path, but suppress some exceptions."""try:return os.listdir(path)except (PermissionError, NotADirectoryError):passexcept OSError as e:# Ignore the directory if does not exist, not a directory or# permission deniedignorable = (e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT)# Python 2 on Windows needs to be handled this way :(or getattr(e, "winerror", None) == 267)if not ignorable:raisereturn ()def distributions_from_metadata(path):root = os.path.dirname(path)if os.path.isdir(path):if len(os.listdir(path)) == 0:# empty metadata dir; skipreturnmetadata = PathMetadata(root, path)else:metadata = FileMetadata(path)entry = os.path.basename(path)yield Distribution.from_location(root, entry, metadata, precedence=DEVELOP_DIST,)def non_empty_lines(path):"""Yield non-empty lines from file at path"""with open(path) as f:for line in f:line = line.strip()if line:yield linedef resolve_egg_link(path):"""Given a path to an .egg-link, resolve distributionspresent in the referenced path."""referenced_paths = non_empty_lines(path)resolved_paths = (os.path.join(os.path.dirname(path), ref)for ref in referenced_paths)dist_groups = map(find_distributions, resolved_paths)return next(dist_groups, ())register_finder(pkgutil.ImpImporter, find_on_path)if hasattr(importlib_machinery, 'FileFinder'):register_finder(importlib_machinery.FileFinder, find_on_path)_declare_state('dict', _namespace_handlers={})_declare_state('dict', _namespace_packages={})def register_namespace_handler(importer_type, namespace_handler):"""Register `namespace_handler` to declare namespace packages`importer_type` is the type or class of a PEP 302 "Importer" (sys.path itemhandler), and `namespace_handler` is a callable like this::def namespace_handler(importer, path_entry, moduleName, module):# return a path_entry to use for child packagesNamespace handlers are only called if the importer object has alreadyagreed that it can handle the relevant path item, and they should onlyreturn a subpath if the module __path__ does not already contain anequivalent subpath. For an example namespace handler, see``pkg_resources.file_ns_handler``."""_namespace_handlers[importer_type] = namespace_handlerdef _handle_ns(packageName, path_item):"""Ensure that named package includes a subpath of path_item (if needed)"""importer = get_importer(path_item)if importer is None:return Noneloader = importer.find_module(packageName)if loader is None:return Nonemodule = sys.modules.get(packageName)if module is None:module = sys.modules[packageName] = types.ModuleType(packageName)module.__path__ = []_set_parent_ns(packageName)elif not hasattr(module, '__path__'):raise TypeError("Not a package:", packageName)handler = _find_adapter(_namespace_handlers, importer)subpath = handler(importer, path_item, packageName, module)if subpath is not None:path = module.__path__path.append(subpath)loader.load_module(packageName)_rebuild_mod_path(path, packageName, module)return subpathdef _rebuild_mod_path(orig_path, package_name, module):"""Rebuild module.__path__ ensuring that all entries are orderedcorresponding to their sys.path order"""sys_path = [_normalize_cached(p) for p in sys.path]def safe_sys_path_index(entry):"""Workaround for #520 and #513."""try:return sys_path.index(entry)except ValueError:return float('inf')def position_in_sys_path(path):"""Return the ordinal of the path based on its position in sys.path"""path_parts = path.split(os.sep)module_parts = package_name.count('.') + 1parts = path_parts[:-module_parts]return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))if not isinstance(orig_path, list):# Is this behavior useful when module.__path__ is not a list?returnorig_path.sort(key=position_in_sys_path)module.__path__[:] = [_normalize_cached(p) for p in orig_path]def declare_namespace(packageName):"""Declare that package 'packageName' is a namespace package"""_imp.acquire_lock()try:if packageName in _namespace_packages:returnpath, parent = sys.path, Noneif '.' in packageName:parent = '.'.join(packageName.split('.')[:-1])declare_namespace(parent)if parent not in _namespace_packages:__import__(parent)try:path = sys.modules[parent].__path__except AttributeError:raise TypeError("Not a package:", parent)# Track what packages are namespaces, so when new path items are added,# they can be updated_namespace_packages.setdefault(parent, []).append(packageName)_namespace_packages.setdefault(packageName, [])for path_item in path:# Ensure all the parent's path items are reflected in the child,# if they apply_handle_ns(packageName, path_item)finally:_imp.release_lock()def fixup_namespace_packages(path_item, parent=None):"""Ensure that previously-declared namespace packages include path_item"""_imp.acquire_lock()try:for package in _namespace_packages.get(parent, ()):subpath = _handle_ns(package, path_item)if subpath:fixup_namespace_packages(subpath, package)finally:_imp.release_lock()def file_ns_handler(importer, path_item, packageName, module):"""Compute an ns-package subpath for a filesystem or zipfile importer"""subpath = os.path.join(path_item, packageName.split('.')[-1])normalized = _normalize_cached(subpath)for item in module.__path__:if _normalize_cached(item) == normalized:breakelse:# Only return the path if it's not already therereturn subpathregister_namespace_handler(pkgutil.ImpImporter, file_ns_handler)register_namespace_handler(zipimport.zipimporter, file_ns_handler)if hasattr(importlib_machinery, 'FileFinder'):register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)def null_ns_handler(importer, path_item, packageName, module):return Noneregister_namespace_handler(object, null_ns_handler)def normalize_path(filename):"""Normalize a file/dir name for comparison purposes"""return os.path.normcase(os.path.realpath(filename))def _normalize_cached(filename, _cache={}):try:return _cache[filename]except KeyError:_cache[filename] = result = normalize_path(filename)return resultdef _is_egg_path(path):"""Determine if given path appears to be an egg."""return path.lower().endswith('.egg')def _is_unpacked_egg(path):"""Determine if given path appears to be an unpacked egg."""return (_is_egg_path(path) andos.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')))def _set_parent_ns(packageName):parts = packageName.split('.')name = parts.pop()if parts:parent = '.'.join(parts)setattr(sys.modules[parent], name, sys.modules[packageName])def yield_lines(strs):"""Yield non-empty/non-comment lines of a string or sequence"""if isinstance(strs, six.string_types):for s in strs.splitlines():s = s.strip()# skip blank lines/commentsif s and not s.startswith('#'):yield selse:for ss in strs:for s in yield_lines(ss):yield sMODULE = re.compile(r"\w+(\.\w+)*$").matchEGG_NAME = re.compile(r"""(?P<name>[^-]+) (-(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))?)?)?""",re.VERBOSE | re.IGNORECASE,).matchclass EntryPoint(object):"""Object representing an advertised importable object"""def __init__(self, name, module_name, attrs=(), extras=(), dist=None):if not MODULE(module_name):raise ValueError("Invalid module name", module_name)self.name = nameself.module_name = module_nameself.attrs = tuple(attrs)self.extras = tuple(extras)self.dist = distdef __str__(self):s = "%s = %s" % (self.name, self.module_name)if self.attrs:s += ':' + '.'.join(self.attrs)if self.extras:s += ' [%s]' % ','.join(self.extras)return sdef __repr__(self):return "EntryPoint.parse(%r)" % str(self)def load(self, require=True, *args, **kwargs):"""Require packages for this EntryPoint, then resolve it."""if not require or args or kwargs:warnings.warn("Parameters to load are deprecated. Call .resolve and "".require separately.",DeprecationWarning,stacklevel=2,)if require:self.require(*args, **kwargs)return self.resolve()def resolve(self):"""Resolve the entry point from its module and attrs."""module = __import__(self.module_name, fromlist=['__name__'], level=0)try:return functools.reduce(getattr, self.attrs, module)except AttributeError as exc:raise ImportError(str(exc))def require(self, env=None, installer=None):if self.extras and not self.dist:raise UnknownExtra("Can't require() without a distribution", self)# Get the requirements for this entry point with all its extras and# then resolve them. We have to pass `extras` along when resolving so# that the working set knows what extras we want. Otherwise, for# dist-info distributions, the working set will assume that the# requirements for that extra are purely optional and skip over them.reqs = self.dist.requires(self.extras)items = working_set.resolve(reqs, env, installer, extras=self.extras)list(map(working_set.add, items))pattern = re.compile(r'\s*'r'(?P<name>.+?)\s*'r'=\s*'r'(?P<module>[\w.]+)\s*'r'(:\s*(?P<attr>[\w.]+))?\s*'r'(?P<extras>\[.*\])?\s*$')@classmethoddef parse(cls, src, dist=None):"""Parse a single entry point from string `src`Entry point syntax follows the form::name = some.module:some.attr [extra1, extra2]The entry name and module name are required, but the ``:attrs`` and``[extras]`` parts are optional"""m = cls.pattern.match(src)if not m:msg = "EntryPoint must be in 'name=module:attrs [extras]' format"raise ValueError(msg, src)res = m.groupdict()extras = cls._parse_extras(res['extras'])attrs = res['attr'].split('.') if res['attr'] else ()return cls(res['name'], res['module'], attrs, extras, dist)@classmethoddef _parse_extras(cls, extras_spec):if not extras_spec:return ()req = Requirement.parse('x' + extras_spec)if req.specs:raise ValueError()return req.extras@classmethoddef parse_group(cls, group, lines, dist=None):"""Parse an entry point group"""if not MODULE(group):raise ValueError("Invalid group name", group)this = {}for line in yield_lines(lines):ep = cls.parse(line, dist)if ep.name in this:raise ValueError("Duplicate entry point", group, ep.name)this[ep.name] = epreturn this@classmethoddef parse_map(cls, data, dist=None):"""Parse a map of entry point groups"""if isinstance(data, dict):data = data.items()else:data = split_sections(data)maps = {}for group, lines in data:if group is None:if not lines:continueraise ValueError("Entry points must be listed in groups")group = group.strip()if group in maps:raise ValueError("Duplicate group name", group)maps[group] = cls.parse_group(group, lines, dist)return mapsdef _remove_md5_fragment(location):if not location:return ''parsed = urllib.parse.urlparse(location)if parsed[-1].startswith('md5='):return urllib.parse.urlunparse(parsed[:-1] + ('',))return locationdef _version_from_file(lines):"""Given an iterable of lines from a Metadata file, returnthe value of the Version field, if present, or None otherwise."""is_version_line = lambda line: line.lower().startswith('version:')version_lines = filter(is_version_line, lines)line = next(iter(version_lines), '')_, _, value = line.partition(':')return safe_version(value.strip()) or Noneclass Distribution(object):"""Wrap an actual or potential sys.path entry w/metadata"""PKG_INFO = 'PKG-INFO'def __init__(self, location=None, metadata=None, project_name=None,version=None, py_version=PY_MAJOR, platform=None,precedence=EGG_DIST):self.project_name = safe_name(project_name or 'Unknown')if version is not None:self._version = safe_version(version)self.py_version = py_versionself.platform = platformself.location = locationself.precedence = precedenceself._provider = metadata or empty_provider@classmethoddef from_location(cls, location, basename, metadata=None, **kw):project_name, version, py_version, platform = [None] * 4basename, ext = os.path.splitext(basename)if ext.lower() in _distributionImpl:cls = _distributionImpl[ext.lower()]match = EGG_NAME(basename)if match:project_name, version, py_version, platform = match.group('name', 'ver', 'pyver', 'plat')return cls(location, metadata, project_name=project_name, version=version,py_version=py_version, platform=platform, **kw)._reload_version()def _reload_version(self):return self@propertydef hashcmp(self):return (self.parsed_version,self.precedence,self.key,_remove_md5_fragment(self.location),self.py_version or '',self.platform or '',)def __hash__(self):return hash(self.hashcmp)def __lt__(self, other):return self.hashcmp < other.hashcmpdef __le__(self, other):return self.hashcmp <= other.hashcmpdef __gt__(self, other):return self.hashcmp > other.hashcmpdef __ge__(self, other):return self.hashcmp >= other.hashcmpdef __eq__(self, other):if not isinstance(other, self.__class__):# It's not a Distribution, so they are not equalreturn Falsereturn self.hashcmp == other.hashcmpdef __ne__(self, other):return not self == other# These properties have to be lazy so that we don't have to load any# metadata until/unless it's actually needed. (i.e., some distributions# may not know their name or version without loading PKG-INFO)@propertydef key(self):try:return self._keyexcept AttributeError:self._key = key = self.project_name.lower()return key@propertydef parsed_version(self):if not hasattr(self, "_parsed_version"):self._parsed_version = parse_version(self.version)return self._parsed_versiondef _warn_legacy_version(self):LV = packaging.version.LegacyVersionis_legacy = isinstance(self._parsed_version, LV)if not is_legacy:return# While an empty version is technically a legacy version and# is not a valid PEP 440 version, it's also unlikely to# actually come from someone and instead it is more likely that# it comes from setuptools attempting to parse a filename and# including it in the list. So for that we'll gate this warning# on if the version is anything at all or not.if not self.version:returntmpl = textwrap.dedent("""'{project_name} ({version})' is being parsed as a legacy,non PEP 440,version. You may find odd behavior and sort order.In particular it will be sorted as less than 0.0. Itis recommended to migrate to PEP 440 compatibleversions.""").strip().replace('\n', ' ')warnings.warn(tmpl.format(**vars(self)), PEP440Warning)@propertydef version(self):try:return self._versionexcept AttributeError:version = _version_from_file(self._get_metadata(self.PKG_INFO))if version is None:tmpl = "Missing 'Version:' header and/or %s file"raise ValueError(tmpl % self.PKG_INFO, self)return version@propertydef _dep_map(self):try:return self.__dep_mapexcept AttributeError:dm = self.__dep_map = {None: []}for name in 'requires.txt', 'depends.txt':for extra, reqs in split_sections(self._get_metadata(name)):if extra:if ':' in extra:extra, marker = extra.split(':', 1)if invalid_marker(marker):# XXX warnreqs = []elif not evaluate_marker(marker):reqs = []extra = safe_extra(extra) or Nonedm.setdefault(extra, []).extend(parse_requirements(reqs))return dmdef requires(self, extras=()):"""List of Requirements needed for this distro if `extras` are used"""dm = self._dep_mapdeps = []deps.extend(dm.get(None, ()))for ext in extras:try:deps.extend(dm[safe_extra(ext)])except KeyError:raise UnknownExtra("%s has no such extra feature %r" % (self, ext))return depsdef _get_metadata(self, name):if self.has_metadata(name):for line in self.get_metadata_lines(name):yield linedef activate(self, path=None, replace=False):"""Ensure distribution is importable on `path` (default=sys.path)"""if path is None:path = sys.pathself.insert_on(path, replace=replace)if path is sys.path:fixup_namespace_packages(self.location)for pkg in self._get_metadata('namespace_packages.txt'):if pkg in sys.modules:declare_namespace(pkg)def egg_name(self):"""Return what this distribution's standard .egg filename should be"""filename = "%s-%s-py%s" % (to_filename(self.project_name), to_filename(self.version),self.py_version or PY_MAJOR)if self.platform:filename += '-' + self.platformreturn filenamedef __repr__(self):if self.location:return "%s (%s)" % (self, self.location)else:return str(self)def __str__(self):try:version = getattr(self, 'version', None)except ValueError:version = Noneversion = version or "[unknown version]"return "%s %s" % (self.project_name, version)def __getattr__(self, attr):"""Delegate all unrecognized public attributes to .metadata provider"""if attr.startswith('_'):raise AttributeError(attr)return getattr(self._provider, attr)@classmethoddef from_filename(cls, filename, metadata=None, **kw):return cls.from_location(_normalize_cached(filename), os.path.basename(filename), metadata,**kw)def as_requirement(self):"""Return a ``Requirement`` that matches this distribution exactly"""if isinstance(self.parsed_version, packaging.version.Version):spec = "%s==%s" % (self.project_name, self.parsed_version)else:spec = "%s===%s" % (self.project_name, self.parsed_version)return Requirement.parse(spec)def load_entry_point(self, group, name):"""Return the `name` entry point of `group` or raise ImportError"""ep = self.get_entry_info(group, name)if ep is None:raise ImportError("Entry point %r not found" % ((group, name),))return ep.load()def get_entry_map(self, group=None):"""Return the entry point map for `group`, or the full entry map"""try:ep_map = self._ep_mapexcept AttributeError:ep_map = self._ep_map = EntryPoint.parse_map(self._get_metadata('entry_points.txt'), self)if group is not None:return ep_map.get(group, {})return ep_mapdef get_entry_info(self, group, name):"""Return the EntryPoint object for `group`+`name`, or ``None``"""return self.get_entry_map(group).get(name)def insert_on(self, path, loc=None, replace=False):"""Ensure self.location is on pathIf replace=False (default):- If location is already in path anywhere, do nothing.- Else:- If it's an egg and its parent directory is on path,insert just ahead of the parent.- Else: add to the end of path.If replace=True:- If location is already on path anywhere (not eggs)or higher priority than its parent (eggs)do nothing.- Else:- If it's an egg and its parent directory is on path,insert just ahead of the parent,removing any lower-priority entries.- Else: add it to the front of path."""loc = loc or self.locationif not loc:returnnloc = _normalize_cached(loc)bdir = os.path.dirname(nloc)npath = [(p and _normalize_cached(p) or p) for p in path]for p, item in enumerate(npath):if item == nloc:if replace:breakelse:# don't modify path (even removing duplicates) if# found and not replacereturnelif item == bdir and self.precedence == EGG_DIST:# if it's an .egg, give it precedence over its directory# UNLESS it's already been added to sys.path and replace=Falseif (not replace) and nloc in npath[p:]:returnif path is sys.path:self.check_version_conflict()path.insert(p, loc)npath.insert(p, nloc)breakelse:if path is sys.path:self.check_version_conflict()if replace:path.insert(0, loc)else:path.append(loc)return# p is the spot where we found or inserted loc; now remove duplicateswhile True:try:np = npath.index(nloc, p + 1)except ValueError:breakelse:del npath[np], path[np]# ha!p = npreturndef check_version_conflict(self):if self.key == 'setuptools':# ignore the inevitable setuptools self-conflicts :(returnnsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))loc = normalize_path(self.location)for modname in self._get_metadata('top_level.txt'):if (modname not in sys.modules or modname in nspor modname in _namespace_packages):continueif modname in ('pkg_resources', 'setuptools', 'site'):continuefn = getattr(sys.modules[modname], '__file__', None)if fn and (normalize_path(fn).startswith(loc) orfn.startswith(self.location)):continueissue_warning("Module %s was already imported from %s, but %s is being added"" to sys.path" % (modname, fn, self.location),)def has_version(self):try:self.versionexcept ValueError:issue_warning("Unbuilt egg for " + repr(self))return Falsereturn Truedef clone(self, **kw):"""Copy this distribution, substituting in any changed keyword args"""names = 'project_name version py_version platform location precedence'for attr in names.split():kw.setdefault(attr, getattr(self, attr, None))kw.setdefault('metadata', self._provider)return self.__class__(**kw)@propertydef extras(self):return [dep for dep in self._dep_map if dep]class EggInfoDistribution(Distribution):def _reload_version(self):"""Packages installed by distutils (e.g. numpy or scipy),which uses an old safe_version, and sotheir version numbers can get mangled whenconverted to filenames (e.g., 1.11.0.dev0+2329eae to1.11.0.dev0_2329eae). These distributions will not beparsed properlydownstream by Distribution and safe_version, sotake an extra step and try to get the version number fromthe metadata file itself instead of the filename."""md_version = _version_from_file(self._get_metadata(self.PKG_INFO))if md_version:self._version = md_versionreturn selfclass DistInfoDistribution(Distribution):"""Wrap an actual or potential sys.path entryw/metadata, .dist-info style."""PKG_INFO = 'METADATA'EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")@propertydef _parsed_pkg_info(self):"""Parse and cache metadata"""try:return self._pkg_infoexcept AttributeError:metadata = self.get_metadata(self.PKG_INFO)self._pkg_info = email.parser.Parser().parsestr(metadata)return self._pkg_info@propertydef _dep_map(self):try:return self.__dep_mapexcept AttributeError:self.__dep_map = self._compute_dependencies()return self.__dep_mapdef _compute_dependencies(self):"""Recompute this distribution's dependencies."""dm = self.__dep_map = {None: []}reqs = []# Including any condition expressionsfor req in self._parsed_pkg_info.get_all('Requires-Dist') or []:reqs.extend(parse_requirements(req))def reqs_for_extra(extra):for req in reqs:if not req.marker or req.marker.evaluate({'extra': extra}):yield reqcommon = frozenset(reqs_for_extra(None))dm[None].extend(common)for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:s_extra = safe_extra(extra.strip())dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)return dm_distributionImpl = {'.egg': Distribution,'.egg-info': EggInfoDistribution,'.dist-info': DistInfoDistribution,}def issue_warning(*args, **kw):level = 1g = globals()try:# find the first stack frame that is *not* code in# the pkg_resources module, to use for the warningwhile sys._getframe(level).f_globals is g:level += 1except ValueError:passwarnings.warn(stacklevel=level + 1, *args, **kw)class RequirementParseError(ValueError):def __str__(self):return ' '.join(self.args)def parse_requirements(strs):"""Yield ``Requirement`` objects for each specification in `strs``strs` must be a string, or a (possibly-nested) iterable thereof."""# create a steppable iterator, so we can handle \-continuationslines = iter(yield_lines(strs))for line in lines:# Drop comments -- a hash without a space may be in a URL.if ' #' in line:line = line[:line.find(' #')]# If there is a line continuation, drop it, and append the next line.if line.endswith('\\'):line = line[:-2].strip()line += next(lines)yield Requirement(line)class Requirement(packaging.requirements.Requirement):def __init__(self, requirement_string):"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""try:super(Requirement, self).__init__(requirement_string)except packaging.requirements.InvalidRequirement as e:raise RequirementParseError(str(e))self.unsafe_name = self.nameproject_name = safe_name(self.name)self.project_name, self.key = project_name, project_name.lower()self.specs = [(spec.operator, spec.version) for spec in self.specifier]self.extras = tuple(map(safe_extra, self.extras))self.hashCmp = (self.key,self.specifier,frozenset(self.extras),str(self.marker) if self.marker else None,)self.__hash = hash(self.hashCmp)def __eq__(self, other):return (isinstance(other, Requirement) andself.hashCmp == other.hashCmp)def __ne__(self, other):return not self == otherdef __contains__(self, item):if isinstance(item, Distribution):if item.key != self.key:return Falseitem = item.version# Allow prereleases always in order to match the previous behavior of# this method. In the future this should be smarter and follow PEP 440# more accurately.return self.specifier.contains(item, prereleases=True)def __hash__(self):return self.__hashdef __repr__(self):return "Requirement.parse(%r)" % str(self)@staticmethoddef parse(s):req, = parse_requirements(s)return reqdef _always_object(classes):"""Ensure object appears in the mro evenfor old-style classes."""if object not in classes:return classes + (object,)return classesdef _find_adapter(registry, ob):"""Return an adapter factory for `ob` from `registry`"""types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))for t in types:if t in registry:return registry[t]def ensure_directory(path):"""Ensure that the parent directory of `path` exists"""dirname = os.path.dirname(path)py31compat.makedirs(dirname, exist_ok=True)def _bypass_ensure_directory(path):"""Sandbox-bypassing version of ensure_directory()"""if not WRITE_SUPPORT:raise IOError('"os.mkdir" not supported on this platform.')dirname, filename = split(path)if dirname and filename and not isdir(dirname):_bypass_ensure_directory(dirname)mkdir(dirname, 0o755)def split_sections(s):"""Split a string or iterable thereof into (section, content) pairsEach ``section`` is a stripped version of the section header ("[section]")and each ``content`` is a list of stripped lines excluding blank lines andcomment-only lines. If there are any such lines before the first sectionheader, they're returned in a first ``section`` of ``None``."""section = Nonecontent = []for line in yield_lines(s):if line.startswith("["):if line.endswith("]"):if section or content:yield section, contentsection = line[1:-1].strip()content = []else:raise ValueError("Invalid section heading", line)else:content.append(line)# wrap up last segmentyield section, contentdef _mkstemp(*args, **kw):old_open = os.opentry:# temporarily bypass sandboxingos.open = os_openreturn tempfile.mkstemp(*args, **kw)finally:# and then put it backos.open = old_open# Silence the PEP440Warning by default, so that end users don't get hit by it# randomly just because they use pkg_resources. We want to append the rule# because we want earlier uses of filterwarnings to take precedence over this# one.warnings.filterwarnings("ignore", category=PEP440Warning, append=True)# from jaraco.functools 1.3def _call_aside(f, *args, **kwargs):f(*args, **kwargs)return f@_call_asidedef _initialize(g=globals()):"Set up global resource manager (deliberately not state-saved)"manager = ResourceManager()g['_manager'] = managerg.update((name, getattr(manager, name))for name in dir(manager)if not name.startswith('_'))@_call_asidedef _initialize_master_working_set():"""Prepare the master working set and make the ``require()``API available.This function has explicit effects on the global stateof pkg_resources. It is intended to be invoked once atthe initialization of this module.Invocation by other packages is unsupported and doneat their own risk."""working_set = WorkingSet._build_master()_declare_state('object', working_set=working_set)require = working_set.requireiter_entry_points = working_set.iter_entry_pointsadd_activation_listener = working_set.subscriberun_script = working_set.run_script# backward compatibilityrun_main = run_script# Activate all distributions already on sys.path with replace=False and# ensure that all distributions added to the working set in the future# (e.g. by calling ``require()``) will get activated as well,# with higher priority (replace=True).tuple(dist.activate(replace=False)for dist in working_set)add_activation_listener(lambda dist: dist.activate(replace=True),existing=False,)working_set.entries = []# match orderlist(map(working_set.add_entry, sys.path))globals().update(locals())
pip
{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Build Tools", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.commands": {"wrap_console": {"pip": "pip:main", "pip3": "pip:main", "pip3.5": "pip:main"}}, "python.details": {"contacts": [{"email": "python-virtualenv@groups.google.com", "name": "The pip developers", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://pip.pypa.io/"}}, "python.exports": {"console_scripts": {"pip": "pip:main", "pip3": "pip:main", "pip3.5": "pip:main"}}}, "extras": ["testing"], "generator": "bdist_wheel (0.29.0)", "keywords": ["easy_install", "distutils", "setuptools", "egg", "virtualenv"], "license": "MIT", "metadata_version": "2.0", "name": "pip", "requires_python": ">=2.6,!=3.0.*,!=3.1.*,!=3.2.*", "run_requires": [{"extra": "testing", "requires": ["mock", "pretend", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "summary": "The PyPA recommended tool for installing Python packages.", "test_requires": [{"requires": ["mock", "pretend", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "version": "9.0.1"}
[console_scripts]pip = pip:mainpip3 = pip:mainpip3.5 = pip:main
Wheel-Version: 1.0Generator: bdist_wheel (0.29.0)Root-Is-Purelib: trueTag: py2-none-anyTag: py3-none-any
pip/__init__.py,sha256=00QWSreEBjb8Y8sPs8HeqgLXSB-3UrONJxo4J5APxEc,11348pip/__main__.py,sha256=V6Kh-IEDEFpt1cahRE6MajUF_14qJR_Qsvn4MjWZXzE,584pip/basecommand.py,sha256=TTlmZesQ4Vuxcto2KqwZGmgmN5ioHEl_DeFev9ie_SA,11910pip/baseparser.py,sha256=AKMOeF3fTrRroiv0DmTQbdiLW0DQux2KqGC_dJJB9d0,10465pip/cmdoptions.py,sha256=8JCcF2kKAF2cFnV77oW-3DsHJifr9jF2WuChzzwgcwg,16474pip/download.py,sha256=rA0wbmqC2n9ejX481YJSidmKgQqQDjdaxkHkHlAN68k,32171pip/exceptions.py,sha256=BvqH-Jw3tP2b-2IJ2kjrQemOAPMqKrQMLRIZHZQpJXk,8121pip/index.py,sha256=L6UhtAEZc2qw7BqfQrkPQcw2gCgEw3GukLRSA95BNyI,39950pip/locations.py,sha256=9rJRlgonC6QC2zGDIn_7mXaoZ9_tF_IHM2BQhWVRgbo,5626pip/pep425tags.py,sha256=q3kec4f6NHszuGYIhGIbVvs896D06uJAnKFgJ_wce44,10980pip/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156pip/wheel.py,sha256=QSWmGs2ui-n4UMWm0JUY6aMCcwNKungVzbWsxI9KlJQ,32010pip/_vendor/__init__.py,sha256=WaaSJ3roSSJ_Uv4yKAxlGohKEH9YUA3aIh1Xg2IjfgU,4670pip/_vendor/appdirs.py,sha256=-9UOIZy62ahCQVY9-b7Nn6_5_4Y6ooHnv72tM8iHi9Y,22368pip/_vendor/distro.py,sha256=A4Douw9pcqdYxDTp5b-OR02fxVXnfWs-wC1wA89rhRk,38349pip/_vendor/ipaddress.py,sha256=wimbqcE7rwwETlucn8A_4Qd_-NKXPOBcNxJHarUoXng,80176pip/_vendor/ordereddict.py,sha256=4KsFuc6V8IgHROCHUu-4vCrr21ZPPea7Z0cvX9AjQ7w,4094pip/_vendor/pyparsing.py,sha256=7vAuUVbh6txUKQR2IzJ8_9DKmD5vtm5MDssWkI0ka8o,224171pip/_vendor/re-vendor.py,sha256=PcdZ40d0ohMsdJmA4t0AeAWbPXi1tFsvAwA5KE5FGeY,773pip/_vendor/retrying.py,sha256=k3fflf5_Mm0XcIJYhB7Tj34bqCCPhUDkYbx1NvW2FPE,9972pip/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098pip/_vendor/cachecontrol/__init__.py,sha256=UPyFlz0dIjxusu5ITig9UDFJdSY5LTwijhldn0AfyzU,302pip/_vendor/cachecontrol/_cmd.py,sha256=MPxZfZd2LKDzVrs55X3wA1rsI2YuP8evLZSwQj0dIk0,1320pip/_vendor/cachecontrol/adapter.py,sha256=RaGYyRA-RA1J0AnE67GzEYFPBu4YH4EQUvQqTKa57iM,4608pip/_vendor/cachecontrol/cache.py,sha256=xtl-V-pr9KSt9VvFDRCB9yrHPEvqvbk-5M1vAInZb5k,790pip/_vendor/cachecontrol/compat.py,sha256=uyovOpd1ehI3J1XeBqJvcsIp6fvkjBpoQmu_0J2st8c,416pip/_vendor/cachecontrol/controller.py,sha256=elDsLcaYA15ncodRmHnWQp6ekU_ocEGtDeGLbsnTjzo,13024pip/_vendor/cachecontrol/filewrapper.py,sha256=_K8cStmXqD33m15PfsQ8rlpo6FfXjVbKmjvLXyICRgI,2531pip/_vendor/cachecontrol/heuristics.py,sha256=WtJrVsyWjpP9WoUiDVdTZZRNBCz5ZVptaQpYnqofDQU,4141pip/_vendor/cachecontrol/serialize.py,sha256=XM6elG9DSNexwaOCgMjUtfrHHW5NAB6TSbIf3x235xs,6536pip/_vendor/cachecontrol/wrapper.py,sha256=Kqyu_3TW_54XDudha4-HF21vyEOAJ4ZnRXFysTiLmXA,498pip/_vendor/cachecontrol/caches/__init__.py,sha256=uWnUtyMvHY_LULaL_4_IR1F_xPgK5zHfJyRnBq4DnPE,369pip/_vendor/cachecontrol/caches/file_cache.py,sha256=FsDug3bwUAQ3okjjfGzxlDaBf2fwVSn1iBKMTL6SyGU,3532pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=XywqxkS9MkCaflTOY_wjrE02neKdywB9YwlOBbP7Ywc,973pip/_vendor/colorama/__init__.py,sha256=9xByrTvk9upkL5NGV5It2Eje4-kzNLwa_1lGPWpXoNU,240pip/_vendor/colorama/ansi.py,sha256=Fi0un-QLqRm-v7o_nKiOqyC8PapBJK7DLV_q9LKtTO0,2524pip/_vendor/colorama/ansitowin32.py,sha256=gJZB35Lbdjatykd2zrUUnokMzkvcFgscyn_tNxxMFHA,9668pip/_vendor/colorama/initialise.py,sha256=cHqVJtb82OG7HUCxvQ2joG7N_CoxbIKbI_fgryZkj20,1917pip/_vendor/colorama/win32.py,sha256=_SCEoTK_GA2tU1nhbayKKac-v9Jn98lCPIFOeFMGCHQ,5365pip/_vendor/colorama/winterm.py,sha256=V7U7ojwG1q4n6PKripjEvW_htYQi5ueXSM3LUUoqqDY,6290pip/_vendor/distlib/__init__.py,sha256=-aUeNNCfiIG_1Tqf19BH0xLNuBKGX1I7lNhcLYgFUEA,581pip/_vendor/distlib/compat.py,sha256=FzKlP9dNUMH-j_1LCVnjgx6KgUbpnRjTjYkTkDYRPlI,40801pip/_vendor/distlib/database.py,sha256=jniJmYk0Mj2t6gZYbnn68TvQwnVZ0kXyeuf_3AxFclk,49672pip/_vendor/distlib/index.py,sha256=Cw8gxFq_7xXvdgExL3efjLAY3EAPDMSL3VA42RkbQBs,21085pip/_vendor/distlib/locators.py,sha256=hD_Hm3aSL9DklY9Cxyct2n_74gZ0xNFFGB5L7M6ds14,51013pip/_vendor/distlib/manifest.py,sha256=3qEuZhHlDbvyYZ1BZbdapDAivgMgUwWpZ00cmXqcn18,14810pip/_vendor/distlib/markers.py,sha256=iRrVWwpyVwjkKJSX8NEQ92_MRMwpROcfNGKCD-Ch1QM,6282pip/_vendor/distlib/metadata.py,sha256=hUsf7Qh2Ae4CCkL33qK8ppwC8ZTzT7ep6Hj9RKpijKU,38833pip/_vendor/distlib/resources.py,sha256=VFBVbFqLVqDBSQDXcFQHrX1KEcuoDxTK699Ydi_beyc,10766pip/_vendor/distlib/scripts.py,sha256=xpehNfISGPTNxQZu02K9Rw2QbNx_2Q4emePv3W5X0iw,15224pip/_vendor/distlib/t32.exe,sha256=cp0UAUDDr1tGAx8adlKxWbCHIa-oB3bxev5zYzgAr8E,89088pip/_vendor/distlib/t64.exe,sha256=FiljDPcX9qvoe9FYE_9pNEHqbqMnhcCOuI_oLJ4F9F8,97792pip/_vendor/distlib/util.py,sha256=E2wU-RZShPMFUMJr9kPmemTULinM4qDzosNPihCuKE0,52991pip/_vendor/distlib/version.py,sha256=CgghOUylxGD7dEA2S3MvWjx7mY_2bWsluF0Of3Yxl4Y,23711pip/_vendor/distlib/w32.exe,sha256=LItrBJesEqt2QTQuB-yha2YbMegURHmHmdSxhjBqmnc,85504pip/_vendor/distlib/w64.exe,sha256=n_PioBC7ltz7sAk1WLbLzZJgS4R2axSy_0HPf8ZCsEg,94208pip/_vendor/distlib/wheel.py,sha256=UP53cKxOM5r7bHSS-n5prF6hwJEVsMW9ZNJutOuC26c,39115pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971pip/_vendor/distlib/_backport/shutil.py,sha256=VW1t3uYqUjWZH7jV-6QiimLhnldoV5uIpH4EuiT1jfw,25647pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617pip/_vendor/distlib/_backport/sysconfig.py,sha256=eSEyJg7jxF_eHlHG8IOtl93kb07UoMIRp1wYsPeGi9k,26955pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628pip/_vendor/html5lib/__init__.py,sha256=JsIwmFldk-9raBadPSTS74JrfmJvozc-3aekMi7Hr9s,780pip/_vendor/html5lib/_ihatexml.py,sha256=tzXygYmisUmiEUt2v7E1Ab50AKQsrD-SglPRnY75vME,16705pip/_vendor/html5lib/_inputstream.py,sha256=C4lX5gUBwebOWy41hYP2ZBpkPVNvxk_hZBm3OVyPZM4,32532pip/_vendor/html5lib/_tokenizer.py,sha256=YAaOEBD6qc5ISq9Xt9Nif1OFgcybTTfMdwqBkZhpAq4,76580pip/_vendor/html5lib/_utils.py,sha256=bS6THVlL8ZyTcI6CIxiM6xxuHsE8i1j5Ogd3Ha1G84U,4096pip/_vendor/html5lib/constants.py,sha256=Dfc1Fv3_9frktgWjg4tbj-CjMMp02Ko9qMe4il1BVdo,83387pip/_vendor/html5lib/html5parser.py,sha256=Dmlu9hlq5w_id6mBZyY_sE5LukIACgvG4kpgIsded8Q,117170pip/_vendor/html5lib/serializer.py,sha256=Urrsa0cPPLqNX-UbJWS2gUhs_06qVbNxZvUnrmGZK6E,14177pip/_vendor/html5lib/_trie/__init__.py,sha256=8VR1bcgD2OpeS2XExpu5yBhP_Q1K-lwKbBKICBPf1kU,289pip/_vendor/html5lib/_trie/_base.py,sha256=6P_AcIoGjtwB2qAlhV8H4VP-ztQxoXFGwt4NyMqG_Kw,979pip/_vendor/html5lib/_trie/datrie.py,sha256=EQpqSfkZRuTbE-DuhW7xMdVDxdZNZ0CfmnYfHA_3zxM,1178pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=DXv-P2vdQ5F3OTWM6QZ6KhyDlAWm90pbfrD1Bk9D_l0,621pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=2Q_JnMscn_tNbV_qpgYN_5M3PnBGfmuvECMKDExHUcY,2742pip/_vendor/html5lib/filters/lint.py,sha256=qf5cLrT6xXd8V7GH1R_3lKxIjuJSfpbWTpSwaglYdDw,3365pip/_vendor/html5lib/filters/optionaltags.py,sha256=EHig4kM-QiLjuxVJ3FAAFNy-10k4aV6HJbQzHKZ_3u8,10534pip/_vendor/html5lib/filters/sanitizer.py,sha256=7PqJrhm6mo3JvaHk2IQW7i74Or7Qtd-FV8UftJIyDys,25112pip/_vendor/html5lib/filters/whitespace.py,sha256=KPt067nYTqqi8KLTClyynn4eVzNDC_-MApXNVHRXVX0,1139pip/_vendor/html5lib/treeadapters/__init__.py,sha256=l3LcqMSEyoh99Jh_eWjGexHnIvKhLAXoP-LDz88whuM,208pip/_vendor/html5lib/treeadapters/genshi.py,sha256=6VIuHDNoExv1JWv3ePj6V5CM-tcyiUSWe5_Hd2ejbwY,1555pip/_vendor/html5lib/treeadapters/sax.py,sha256=3of4vvaUYIAic7pngebwJV24hpOS7Zg9ggJa_WQegy4,1661pip/_vendor/html5lib/treebuilders/__init__.py,sha256=UlB4orkTgZhFIKQdXrtiWn9cpKSsuhnOQOIHeD0Fv4k,3406pip/_vendor/html5lib/treebuilders/base.py,sha256=4vdjm_Z2f_GTQBwKnWlrzVcctTb-K5sfN8pXDaWODiA,13942pip/_vendor/html5lib/treebuilders/dom.py,sha256=SY3MsijXyzdNPc8aK5IQsupBoM8J67y56DgNtGvsb9g,8835pip/_vendor/html5lib/treebuilders/etree.py,sha256=aqIBOGj_dFYqBURIcTegGNBhAIJOw5iFDHb4jrkYH-8,12764pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=CEgwHMIQZvIDFAqct4kqPkVtyKIm9efHFq_VeExEPCA,14161pip/_vendor/html5lib/treewalkers/__init__.py,sha256=CFpUOCfLuhAgVJ8NYk9wviCu1khYnv7XRStvyzU1Fws,5544pip/_vendor/html5lib/treewalkers/base.py,sha256=ei-2cFbNFd0gRjyaFmxnxZGLNID4o0bHFCH9bMyZ5Bk,4939pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413pip/_vendor/html5lib/treewalkers/etree.py,sha256=8jVLEY2FjgN4RFugwhAh44l9ScVYoDStQFCnlPwvafI,4684pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=sY6wfRshWTllu6n48TPWpKsQRPp-0CQrT0hj_AdzHSU,6309pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309pip/_vendor/lockfile/__init__.py,sha256=Tqpz90DwKYfhPsfzVOJl84TL87pdFE5ePNHdXAxs4Tk,9371pip/_vendor/lockfile/linklockfile.py,sha256=C7OH3H4GdK68u4FQgp8fkP2kO4fyUTSyj3X6blgfobc,2652pip/_vendor/lockfile/mkdirlockfile.py,sha256=e3qgIL-etZMLsS-3ft19iW_8IQ360HNkGOqE3yBKsUw,3096pip/_vendor/lockfile/pidlockfile.py,sha256=ukH9uk6NFuxyVmG5QiWw4iKq3fT7MjqUguX95avYPIY,6090pip/_vendor/lockfile/sqlitelockfile.py,sha256=o2TMkMRY0iwn-iL1XMRRIFStMUkS4i3ajceeYNntKFg,5506pip/_vendor/lockfile/symlinklockfile.py,sha256=ABwXXmvTHvCl5viPblShL3PG-gGsLiT1roAMfDRwhi8,2616pip/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720pip/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513pip/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860pip/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416pip/_vendor/packaging/markers.py,sha256=mtg2nphJE1oQO39g1DgsdPsMO-guBBClpR-AEYFrbMg,8230pip/_vendor/packaging/requirements.py,sha256=SD7dVJGjdPUqtoHb47qwK6wWJTQd-ZXWjxpJg83UcBA,4327pip/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025pip/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421pip/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556pip/_vendor/pkg_resources/__init__.py,sha256=CcwuHtCBZn9OTkmgF9cFpadIAMhlrnZTVKTOo4V2p58,103230pip/_vendor/progress/__init__.py,sha256=Wn1074LUDZovd4zfoVYojnPBgOc6ctHbQX7rp_p8lRA,3023pip/_vendor/progress/bar.py,sha256=YNPJeRrwYVKFO2nyaEwsQjYByamMWTgJMvQO1NpD-AY,2685pip/_vendor/progress/counter.py,sha256=kEqA8jWEdwrc6P_9VaRx7bjOHwk9gxl-Q9oVbQ08v5c,1502pip/_vendor/progress/helpers.py,sha256=FehfwZTv-5cCfsbcMlvlUkm3xZ0cRhsev6XVpmeTF4c,2854pip/_vendor/progress/spinner.py,sha256=iCVtUQbaJUFHTjn1ZLPQLPYeao4lC9aXAa_HxIeUK6k,1314pip/_vendor/requests/__init__.py,sha256=Cde-qxOWcslaEcPvKAJQPFbY8_va8PMbU7Rssr7vViI,2326pip/_vendor/requests/adapters.py,sha256=DJdgax91PyS2s6_oZPELbuLWNlM2xGguNu62sqcOUik,19740pip/_vendor/requests/api.py,sha256=PgminOpD8hLLKLNs0RWLKr1HpNc4Qxr_6uen8q2c9CI,5794pip/_vendor/requests/auth.py,sha256=eBLtJlcTZxRG7xKXCvGQBLO9a-PxFgMf2qTUbtZwMJM,8175pip/_vendor/requests/cacert.pem,sha256=5xzWFRrSP0ZsXiW6emg8UQ_w497lT4qWCv32OO8R1ME,344712pip/_vendor/requests/certs.py,sha256=Aa-oStu9f2lVi8VM9Aw1xaAtTIz7bhu5CGKNPEW1waM,625pip/_vendor/requests/compat.py,sha256=0cgWB43LEX5OrX1O4k-bPbFlIbWXgEd412DSDJtF1Y8,1687pip/_vendor/requests/cookies.py,sha256=awMI0hm3SKheMEDTqO8AIadc2XmnCGKPCTNw_4hlM3Q,18208pip/_vendor/requests/exceptions.py,sha256=x-MGvDASYKSstuCNYTA5IT_EAcxTp5knE3WPMrgkrlI,2860pip/_vendor/requests/hooks.py,sha256=HXAHoC1FNTFRZX6-lNdvPM7Tst4kvGwYTN-AOKRxoRU,767pip/_vendor/requests/models.py,sha256=YHuL2khGDFxeWc-NMJIcfFqvYJ0dKs1mXfj1Fuff1J8,30532pip/_vendor/requests/sessions.py,sha256=H7HpKRLKeu1MSH5W1-PI2GMCFLN4bz5i3OFqjjgzE5k,25609pip/_vendor/requests/status_codes.py,sha256=uwVHcMPkHV3FElDLlnDTH3KULZIAGxaovbBxrjWm8N0,3316pip/_vendor/requests/structures.py,sha256=yexCvWbX40M6E8mLQOpAGZZ-ZoAnyaT2dni-Bp-b42g,3012pip/_vendor/requests/utils.py,sha256=9d3jqnA8avsF9N1QPmsk2pJgo2pxuExrN2hoIhtLggY,24163pip/_vendor/requests/packages/__init__.py,sha256=CVheqNRcXIkAi5037RhxeqbAqd0QhrK1o9R9kS2xvuI,1384pip/_vendor/requests/packages/chardet/__init__.py,sha256=XuTKCYOR7JwsoHxqZTYH86LVyMDbDI3s1s0W_qoGEBM,1295pip/_vendor/requests/packages/chardet/big5freq.py,sha256=D8oTdz-GM7Jg8TsaWJDm65vM_OLHC3xub6qUJ3rOgsQ,82594pip/_vendor/requests/packages/chardet/big5prober.py,sha256=XX96C--6WKYW36mL-z7pJSAtc169Z8ZImByCP4pEN9A,1684pip/_vendor/requests/packages/chardet/chardetect.py,sha256=f4299UZG6uWd3i3r_N0OdrFj2sA9JFI54PAmDLAFmWA,2504pip/_vendor/requests/packages/chardet/chardistribution.py,sha256=cUARQFr1oTLXeJCDQrDRkUP778AvSMzhSCnG8VLCV58,9226pip/_vendor/requests/packages/chardet/charsetgroupprober.py,sha256=0lKk7VE516fgMw119tNefFqLOxKfIE9WfdkpIT69OKU,3791pip/_vendor/requests/packages/chardet/charsetprober.py,sha256=Z48o2KiOj23FNqYH8FqzhH5m1qdm3rI8DcTm2Yqtklg,1902pip/_vendor/requests/packages/chardet/codingstatemachine.py,sha256=E85rYhHVMw9xDEJVgiQhp0OnLGr6i2r8_7QOWMKTH08,2318pip/_vendor/requests/packages/chardet/compat.py,sha256=5mm6yrHwef1JEG5OxkPJlSq5lkjLVpEGh3iPgFBkpkM,1157pip/_vendor/requests/packages/chardet/constants.py,sha256=-UnY8U7EP7z9fTyd09yq35BEkSFEAUAiv9ohd1DW1s4,1335pip/_vendor/requests/packages/chardet/cp949prober.py,sha256=FMvdLyB7fejPXRsTbca7LK1P3RUvvssmjUNyaEfz8zY,1782pip/_vendor/requests/packages/chardet/escprober.py,sha256=q5TcQKeVq31WxrW7Sv8yjpZkjEoaHO8S92EJZ9hodys,3187pip/_vendor/requests/packages/chardet/escsm.py,sha256=7iljEKN8lXTh8JFXPUSwlibMno6R6ksq4evLxbkzfro,7839pip/_vendor/requests/packages/chardet/eucjpprober.py,sha256=5IpfSEjAb7h3hcGMd6dkU80O900C2N6xku28rdYFKuc,3678pip/_vendor/requests/packages/chardet/euckrfreq.py,sha256=T5saK5mImySG5ygQPtsp6o2uKulouCwYm2ElOyFkJqU,45978pip/_vendor/requests/packages/chardet/euckrprober.py,sha256=Wo7dnZ5Erw_nB4H-m5alMiOxOuJUmGHlwCSaGqExDZA,1675pip/_vendor/requests/packages/chardet/euctwfreq.py,sha256=G_I0BW9i1w0ONeeUwIYqV7_U09buIHdqh-wNHVaql7I,34872pip/_vendor/requests/packages/chardet/euctwprober.py,sha256=upS2P6GuT5ujOxXYw-RJLcT7A4PTuo27KGUKU4UZpIQ,1676pip/_vendor/requests/packages/chardet/gb2312freq.py,sha256=M2gFdo_qQ_BslStEchrPW5CrPEZEacC0uyDLw4ok-kY,36011pip/_vendor/requests/packages/chardet/gb2312prober.py,sha256=VWnjoRa83Y6V6oczMaxyUr0uy48iCnC2nzk9zfEIRHc,1681pip/_vendor/requests/packages/chardet/hebrewprober.py,sha256=8pdoUfsVXf_L4BnJde_BewS6H2yInV5688eu0nFhLHY,13359pip/_vendor/requests/packages/chardet/jisfreq.py,sha256=ZcL4R5ekHHbP2KCYGakVMBsiKqZZZAABzhwi-uRkOps,47315pip/_vendor/requests/packages/chardet/jpcntx.py,sha256=yftmp0QaF6RJO5SJs8I7LU5AF4rwP23ebeCQL4BM1OY,19348pip/_vendor/requests/packages/chardet/langbulgarianmodel.py,sha256=ZyPsA796MSVhYdfWhMCgKWckupAKAnKqWcE3Cl3ej6o,12784pip/_vendor/requests/packages/chardet/langcyrillicmodel.py,sha256=fkcd5OvogUp-GrNDWAZPgkYsSRCD2omotAEvqjlmLKE,17725pip/_vendor/requests/packages/chardet/langgreekmodel.py,sha256=QHMy31CH_ot67UCtmurCEKqKx2WwoaKrw2YCYYBK2Lw,12628pip/_vendor/requests/packages/chardet/langhebrewmodel.py,sha256=4ASl5vzKJPng4H278VHKtRYC03TpQpenlHTcsmZH1rE,11318pip/_vendor/requests/packages/chardet/langhungarianmodel.py,sha256=SXwuUzh49_cBeMXhshRHdrhlkz0T8_pZWV_pdqBKNFk,12536pip/_vendor/requests/packages/chardet/langthaimodel.py,sha256=-k7djh3dGKngAGnt3WfuoJN7acDcWcmHAPojhaUd7q4,11275pip/_vendor/requests/packages/chardet/latin1prober.py,sha256=238JHOxH8aRudJY2NmeSv5s7i0Qe3GuklIU3HlYybvg,5232pip/_vendor/requests/packages/chardet/mbcharsetprober.py,sha256=9rOCjDVsmSMp6e7q2syqak22j7lrbUZhJhMee2gbVL0,3268pip/_vendor/requests/packages/chardet/mbcsgroupprober.py,sha256=SHRzNPLpDXfMJLA8phCHVU0WgqbgDCNxDQMolGX_7yk,1967pip/_vendor/requests/packages/chardet/mbcssm.py,sha256=IKwJXyxu34n6NojmxVxC60MLFtJKm-hIfxaFEnb3uBA,19590pip/_vendor/requests/packages/chardet/sbcharsetprober.py,sha256=Xq0lODqJnDgxglBiQI4BqTFiPbn63-0a5XNA5-hVu7U,4793pip/_vendor/requests/packages/chardet/sbcsgroupprober.py,sha256=8hLyH8RAG-aohBo7o_KciWVgRo42ZE_zEtuNG1JMRYI,3291pip/_vendor/requests/packages/chardet/sjisprober.py,sha256=UYOmiMDzttYIkSDoOB08UEagivJpUXz4tuWiWzTiOr8,3764pip/_vendor/requests/packages/chardet/universaldetector.py,sha256=h-E2x6XSCzlNjycYWG0Fe4Cf1SGdaIzUNu2HCphpMZA,6840pip/_vendor/requests/packages/chardet/utf8prober.py,sha256=7tdNZGrJY7jZUBD483GGMkiP0Tx8Fp-cGvWHoAsilHg,2652pip/_vendor/requests/packages/urllib3/__init__.py,sha256=EF9pbHgMzqQek2Y6EZ82A8B6wETFeW7bK0K-HoZ3Ffo,2852pip/_vendor/requests/packages/urllib3/_collections.py,sha256=RP-cHyTx4AgYwvoETK8q1IVRbWFJnE0VV692ZHSbU68,10553pip/_vendor/requests/packages/urllib3/connection.py,sha256=QCmkelYgtbc06DfJtgs22na78kRTLCTbLb-OSWLbt-A,11617pip/_vendor/requests/packages/urllib3/connectionpool.py,sha256=fls19n1Y4jnwOBsZz_9F01i08xH2gZXEIyyDmWd-mKU,33591pip/_vendor/requests/packages/urllib3/exceptions.py,sha256=zGjhZCR1wefEnCN5b7WouQ3UhXesJ2bRKYIeWusaFJs,5599pip/_vendor/requests/packages/urllib3/fields.py,sha256=WUMvCLvnw7XemBq6AmCgNPJwyIJL_vWaMHaA2FLlscM,5931pip/_vendor/requests/packages/urllib3/filepost.py,sha256=NvLlFsdt8ih_Q4S2ekQF3CJG0nOXs32YI-G04_AdT2g,2320pip/_vendor/requests/packages/urllib3/poolmanager.py,sha256=9Uf0fUk0aR_s1auXgwceoN2gbaIQ08lrum_cGEA9-_U,13092pip/_vendor/requests/packages/urllib3/request.py,sha256=jET7OvA3FSjxABBRGhCyMdPvM9XuJA6df9gRhkJiJiY,5988pip/_vendor/requests/packages/urllib3/response.py,sha256=wxJSV_6pyh6Cgx7XFVGpNhpZCbh4eL7lCSFaU4ixXXc,18615pip/_vendor/requests/packages/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0pip/_vendor/requests/packages/urllib3/contrib/appengine.py,sha256=NdN_xOgDLMadUPe_dN3wdan_DH9-fxVNqFgq19tbqQs,7937pip/_vendor/requests/packages/urllib3/contrib/ntlmpool.py,sha256=r-vMDMXAGbix9a7-IhbKVTATmAst-5g4hKYOLf8Kd5M,4531pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py,sha256=JsdAh0gL4XvQzhOEBRoFtJN91qLf1LFIDEFZs95445I,11778pip/_vendor/requests/packages/urllib3/contrib/socks.py,sha256=uPHtE6R8uyUbD9R8l2wO80c87WDGZ9rou3kNOwV74eA,5668pip/_vendor/requests/packages/urllib3/packages/__init__.py,sha256=nlChrGzkjCkmhCX9HrF_qHPUgosfsPQkVIJxiiLhk9g,109pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py,sha256=VQaPONfhVMsb8B63Xg7ZOydJqIE_jzeMhVN3Pec6ogw,8935pip/_vendor/requests/packages/urllib3/packages/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py,sha256=cOWMIn1orgJoA35p6pSzO_-Dc6iOX9Dhl6D2sL9b_2o,460pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=fK28k37hL7-D79v9iM2fHgNK9Q1Pw0M7qVRL4rkfFjQ,3778pip/_vendor/requests/packages/urllib3/util/__init__.py,sha256=n2QE9_0Bb6u8tf7LUc4qKe8V-Hz9G8lEOc9j_30Q8d0,892pip/_vendor/requests/packages/urllib3/util/connection.py,sha256=7B5Mmepg5Xd399VKE__VHxD2ObapYFrB3mWJ_EnIebs,4744pip/_vendor/requests/packages/urllib3/util/request.py,sha256=ZMDewRK-mjlK72szGIIjzYnLIn-zPP0WgJUMjKeZ6Tg,2128pip/_vendor/requests/packages/urllib3/util/response.py,sha256=1UFd5TIp9MyBp4xgnZoyQZscZVPPr0tWRaXNR5w_vds,2165pip/_vendor/requests/packages/urllib3/util/retry.py,sha256=5eA3GHR_L14qz66NU6gr-v5VbKYsvdEqOvCcsx1oLKo,10664pip/_vendor/requests/packages/urllib3/util/ssl_.py,sha256=7xR_jvQLTQA1U006wJ1bl2KuLGnD1qQvUcFM2uysedw,11622pip/_vendor/requests/packages/urllib3/util/timeout.py,sha256=ioAIYptFyBG7eU_r8_ZmO45hpj1dJE6WCvrGR9dNFjs,9596pip/_vendor/requests/packages/urllib3/util/url.py,sha256=EcX4ZfmgKWcqM4sY9FlC-yN4y_snuURPV0TpUPHNjnc,5879pip/_vendor/webencodings/__init__.py,sha256=t7rAQQxXwalY-ak9hTl73qHjhia9UH-sL-e00qQrBpo,10576pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305pip/_vendor/webencodings/tests.py,sha256=7vTk7LgOJn_t1XtT_viofZlEJ7cJCzPe_hvVHOkcQl8,6562pip/_vendor/webencodings/x_user_defined.py,sha256=72cfPRhbfkRCGkkA8ZnvVV7UnoiLb5uPMhXwhrXiLPk,4306pip/commands/__init__.py,sha256=2Uq3HCdjchJD9FL1LB7rd5v6UySVAVizX0W3EX3hIoE,2244pip/commands/check.py,sha256=-A7GI1-WZBh9a4P6UoH_aR-J7I8Lz8ly7m3wnCjmevs,1382pip/commands/completion.py,sha256=kkPgVX7SUcJ_8Juw5GkgWaxHN9_45wmAr9mGs1zXEEs,2453pip/commands/download.py,sha256=8RuuPmSYgAq3iEDTqZY_1PDXRqREdUULHNjWJeAv7Mo,7810pip/commands/freeze.py,sha256=h6-yFMpjCjbNj8-gOm5UuoF6cg14N5rPV4TCi3_CeuI,2835pip/commands/hash.py,sha256=MCt4jEFyfoce0lVeNEz1x49uaTY-VDkKiBvvxrVcHkw,1597pip/commands/help.py,sha256=84HWkEdnGP_AEBHnn8gJP2Te0XTXRKFoXqXopbOZTNo,982pip/commands/install.py,sha256=ovG9p9n1X2NPqMgFVtSuT9kMbLAdx1r3YSSiXSvgOKI,17412pip/commands/list.py,sha256=93bCiFyt2Qut_YHkYHJMZHpXladmxsjS-yOtZeb3uqI,11369pip/commands/search.py,sha256=oTs9QNdefnrmCV_JeftG0PGiMuYVmiEDF1OUaYsmDao,4502pip/commands/show.py,sha256=ZYM57_7U8KP9MQIIyHKQdZxmiEZByy-DRzB697VFoTY,5891pip/commands/uninstall.py,sha256=tz8cXz4WdpUdnt3RvpdQwH6_SNMB50egBIZWa1dwfcc,2884pip/commands/wheel.py,sha256=z5SEhws2YRMb0Ml1IEkg6jFZMLRpLl86bHCrQbYt5zo,7729pip/compat/__init__.py,sha256=2Xs_IpsmdRgHbQgQO0c8_lPvHJnQXHyGWxPbLbYJL4c,4672pip/compat/dictconfig.py,sha256=dRrelPDWrceDSzFT51RTEVY2GuM7UDyc5Igh_tn4Fvk,23096pip/models/__init__.py,sha256=0Rs7_RA4DxeOkWT5Cq4CQzDrSEhvYcN3TH2cazr72PE,71pip/models/index.py,sha256=pUfbO__v3mD9j-2n_ClwPS8pVyx4l2wIwyvWt8GMCRA,487pip/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0pip/operations/check.py,sha256=uwUN9cs1sPo7c0Sj6pRrSv7b22Pk29SXUImTelVchMQ,1590pip/operations/freeze.py,sha256=k-7w7LsM-RpPv7ERBzHiPpYkH-GuYfHLyR-Cp_1VPL0,5194pip/req/__init__.py,sha256=vFwZY8_Vc1WU1zFAespg1My_r_AT3n7cN0W9eX0EFqk,276pip/req/req_file.py,sha256=fG9MDsXUNPhmGwxUiwrIXEynyD8Q7s3L47-hLZPDXq0,11926pip/req/req_install.py,sha256=gYrH-lwQMmt55VVbav_EtRIPu94cQbHFHm_Kq6AeHbg,46487pip/req/req_set.py,sha256=jHspXqcA2FxcF05dgUIAZ5huYPv6bn0wRUX0Z7PKmaA,34462pip/req/req_uninstall.py,sha256=fdH2VgCjEC8NRYDS7fRu3ZJaBBUEy-N5muwxDX5MBNM,6897pip/utils/__init__.py,sha256=HX_wYS15oiYOz-H3qG1Kbi1CY7AGWCNK5jloiD0fauc,27187pip/utils/appdirs.py,sha256=kj2LK-I2fC5QnEh_A_v-ev_IQMcXaWWF5DE39sNvCLQ,8811pip/utils/build.py,sha256=4smLRrfSCmXmjEnVnMFh2tBEpNcSLRe6J0ejZJ-wWJE,1312pip/utils/deprecation.py,sha256=X_FMjtDbMJqfqEkdRrki-mYyIdPB6I6DHUTCA_ChY6M,2232pip/utils/encoding.py,sha256=NQxGiFS5GbeAveLZTnx92t5r0PYqvt0iRnP2u9SGG1w,971pip/utils/filesystem.py,sha256=ZEVBuYM3fqr2_lgOESh4Y7fPFszGD474zVm_M3Mb5Tk,899pip/utils/glibc.py,sha256=jcQYjt_oJLPKVZB28Kauy4Sw70zS-wawxoU1HHX36_0,2939pip/utils/hashes.py,sha256=oMk7cd3PbJgzpSQyXq1MytMud5f6H5Oa2YY5hYuCq6I,2866pip/utils/logging.py,sha256=7yWu4gZw-Qclj7X80QVdpGWkdTWGKT4LiUVKcE04pro,3327pip/utils/outdated.py,sha256=fNwOCL5r2EftPGhgCYGMKu032HC8cV-JAr9lp0HmToM,5455pip/utils/packaging.py,sha256=qhmli14odw6DIhWJgQYS2Q0RrSbr8nXNcG48f5yTRms,2080pip/utils/setuptools_build.py,sha256=0blfscmNJW_iZ5DcswJeDB_PbtTEjfK9RL1R1WEDW2E,278pip/utils/ui.py,sha256=pbDkSAeumZ6jdZcOJ2yAbx8iBgeP2zfpqNnLJK1gskQ,11597pip/vcs/__init__.py,sha256=WafFliUTHMmsSISV8PHp1M5EXDNSWyJr78zKaQmPLdY,12374pip/vcs/bazaar.py,sha256=tYTwc4b4off8mr0O2o8SiGejqBDJxcbDBMSMd9-ISYc,3803pip/vcs/git.py,sha256=5LfWryi78A-2ULjEZJvCTarJ_3l8venwXASlwm8hiug,11197pip/vcs/mercurial.py,sha256=xG6rDiwHCRytJEs23SIHBXl_SwQo2jkkdD_6rVVP5h4,3472pip/vcs/subversion.py,sha256=GAuX2Sk7IZvJyEzENKcVld_wGBrQ3fpXDlXjapZEYdI,9350pip-9.0.1.dist-info/DESCRIPTION.rst,sha256=Va8Wj1XBpTbVQ2Z41mZRJdALEeziiS_ZewWn1H2ecY4,1287pip-9.0.1.dist-info/METADATA,sha256=mvs_tLoKAbECXY_6QHiVWQsagSL-1UjolQTpScT8JSk,2529pip-9.0.1.dist-info/RECORD,,pip-9.0.1.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110pip-9.0.1.dist-info/entry_points.txt,sha256=GWc-Wb9WUKZ1EuVWNz-G0l3BeIpbNJLx0OJbZ61AAV0,68pip-9.0.1.dist-info/metadata.json,sha256=aqvkETDy4mHUBob-2Fn5WWlXORi_M2OSfQ2HQCUU_Fk,1565pip-9.0.1.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4../../../bin/pip,sha256=DHsfGOLi18h1PCKR6z7X2uKE5s8Wd5CKrPoDTiD_WSQ,279../../../bin/pip2,sha256=DHsfGOLi18h1PCKR6z7X2uKE5s8Wd5CKrPoDTiD_WSQ,279../../../bin/pip2.7,sha256=DHsfGOLi18h1PCKR6z7X2uKE5s8Wd5CKrPoDTiD_WSQ,279pip-9.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4pip/_vendor/requests/compat.pyc,,pip/_vendor/cachecontrol/cache.pyc,,pip/_vendor/requests/certs.pyc,,pip/_vendor/distlib/util.pyc,,pip/_vendor/progress/spinner.pyc,,pip/_vendor/html5lib/_tokenizer.pyc,,pip/__init__.pyc,,pip/_vendor/requests/packages/chardet/latin1prober.pyc,,pip/_vendor/distlib/_backport/__init__.pyc,,pip/operations/__init__.pyc,,pip/_vendor/distlib/metadata.pyc,,pip/compat/__init__.pyc,,pip/_vendor/webencodings/tests.pyc,,pip/_vendor/html5lib/treebuilders/__init__.pyc,,pip/compat/dictconfig.pyc,,pip/_vendor/html5lib/_trie/datrie.pyc,,pip/_vendor/requests/packages/urllib3/request.pyc,,pip/_vendor/requests/packages/chardet/escprober.pyc,,pip/_vendor/requests/adapters.pyc,,pip/_vendor/requests/packages/chardet/langbulgarianmodel.pyc,,pip/commands/completion.pyc,,pip/_vendor/html5lib/html5parser.pyc,,pip/_vendor/html5lib/filters/lint.pyc,,pip/pep425tags.pyc,,pip/_vendor/packaging/__init__.pyc,,pip/_vendor/lockfile/linklockfile.pyc,,pip/_vendor/requests/packages/chardet/__init__.pyc,,pip/_vendor/packaging/utils.pyc,,pip/_vendor/html5lib/filters/whitespace.pyc,,pip/_vendor/requests/packages/chardet/euckrprober.pyc,,pip/_vendor/html5lib/treebuilders/etree_lxml.pyc,,pip/_vendor/distlib/database.pyc,,pip/_vendor/html5lib/treeadapters/sax.pyc,,pip/_vendor/requests/packages/chardet/euckrfreq.pyc,,pip/_vendor/requests/auth.pyc,,pip/_vendor/packaging/__about__.pyc,,pip/_vendor/progress/__init__.pyc,,pip/_vendor/cachecontrol/_cmd.pyc,,pip/_vendor/webencodings/__init__.pyc,,pip/commands/list.pyc,,pip/_vendor/distlib/scripts.pyc,,pip/_vendor/html5lib/filters/sanitizer.pyc,,pip/utils/packaging.pyc,,pip/vcs/git.pyc,,pip/cmdoptions.pyc,,pip/_vendor/lockfile/symlinklockfile.pyc,,pip/commands/hash.pyc,,pip/commands/check.pyc,,pip/_vendor/requests/packages/urllib3/util/ssl_.pyc,,pip/_vendor/html5lib/_trie/_base.pyc,,pip/req/req_file.pyc,,pip/_vendor/requests/packages/urllib3/poolmanager.pyc,,pip/req/req_set.pyc,,pip/req/__init__.pyc,,pip/_vendor/ordereddict.pyc,,pip/_vendor/colorama/win32.pyc,,pip/_vendor/distlib/resources.pyc,,pip/_vendor/requests/packages/chardet/hebrewprober.pyc,,pip/utils/__init__.pyc,,pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyc,,pip/_vendor/lockfile/__init__.pyc,,pip/_vendor/requests/packages/urllib3/packages/six.pyc,,pip/_vendor/progress/helpers.pyc,,pip/_vendor/ipaddress.pyc,,pip/vcs/bazaar.pyc,,pip/_vendor/pyparsing.pyc,,pip/_vendor/html5lib/__init__.pyc,,pip/_vendor/requests/packages/chardet/charsetgroupprober.pyc,,pip/_vendor/cachecontrol/serialize.pyc,,pip/_vendor/requests/packages/urllib3/packages/__init__.pyc,,pip/_vendor/distlib/_backport/misc.pyc,,pip/_vendor/requests/packages/chardet/codingstatemachine.pyc,,pip/_vendor/requests/packages/urllib3/util/url.pyc,,pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyc,,pip/_vendor/cachecontrol/controller.pyc,,pip/utils/deprecation.pyc,,pip/_vendor/distlib/_backport/sysconfig.pyc,,pip/_vendor/requests/packages/chardet/langgreekmodel.pyc,,pip/_vendor/requests/packages/__init__.pyc,,pip/commands/show.pyc,,pip/_vendor/requests/packages/urllib3/util/__init__.pyc,,pip/_vendor/html5lib/_utils.pyc,,pip/_vendor/html5lib/filters/alphabeticalattributes.pyc,,pip/_vendor/requests/packages/chardet/jpcntx.pyc,,pip/_vendor/colorama/initialise.pyc,,pip/_vendor/requests/packages/urllib3/contrib/appengine.pyc,,pip/_vendor/html5lib/treeadapters/genshi.pyc,,pip/commands/help.pyc,,pip/_vendor/requests/utils.pyc,,pip/_vendor/colorama/__init__.pyc,,pip/_vendor/packaging/_compat.pyc,,pip/_vendor/distlib/version.pyc,,pip/utils/ui.pyc,,pip/_vendor/requests/packages/urllib3/exceptions.pyc,,pip/commands/uninstall.pyc,,pip/_vendor/distlib/index.pyc,,pip/_vendor/cachecontrol/heuristics.pyc,,pip/_vendor/requests/sessions.pyc,,pip/_vendor/html5lib/treewalkers/__init__.pyc,,pip/_vendor/requests/packages/chardet/big5prober.pyc,,pip/_vendor/requests/packages/chardet/langthaimodel.pyc,,pip/utils/glibc.pyc,,pip/utils/hashes.pyc,,pip/_vendor/distlib/_backport/tarfile.pyc,,pip/_vendor/html5lib/treewalkers/etree_lxml.pyc,,pip/_vendor/requests/packages/urllib3/util/response.pyc,,pip/_vendor/requests/packages/chardet/mbcsgroupprober.pyc,,pip/_vendor/html5lib/filters/__init__.pyc,,pip/baseparser.pyc,,pip/status_codes.pyc,,pip/_vendor/distlib/__init__.pyc,,pip/_vendor/pkg_resources/__init__.pyc,,pip/commands/search.pyc,,pip/_vendor/html5lib/_ihatexml.pyc,,pip/_vendor/requests/packages/chardet/langhungarianmodel.pyc,,pip/_vendor/requests/models.pyc,,pip/_vendor/requests/structures.pyc,,pip/_vendor/packaging/version.pyc,,pip/_vendor/cachecontrol/adapter.pyc,,pip/_vendor/requests/packages/urllib3/fields.pyc,,pip/_vendor/requests/packages/urllib3/contrib/ntlmpool.pyc,,pip/vcs/__init__.pyc,,pip/vcs/mercurial.pyc,,pip/_vendor/html5lib/filters/base.pyc,,pip/models/__init__.pyc,,pip/_vendor/html5lib/constants.pyc,,pip/_vendor/packaging/specifiers.pyc,,pip/_vendor/requests/packages/chardet/gb2312freq.pyc,,pip/_vendor/html5lib/treewalkers/genshi.pyc,,pip/download.pyc,,pip/commands/__init__.pyc,,pip/_vendor/requests/packages/urllib3/__init__.pyc,,pip/_vendor/html5lib/treebuilders/base.pyc,,pip/_vendor/distlib/compat.pyc,,pip/models/index.pyc,,pip/_vendor/lockfile/mkdirlockfile.pyc,,pip/req/req_uninstall.pyc,,pip/_vendor/distlib/wheel.pyc,,pip/_vendor/requests/packages/chardet/euctwprober.pyc,,pip/utils/build.pyc,,pip/_vendor/requests/packages/chardet/escsm.pyc,,pip/_vendor/requests/status_codes.pyc,,pip/_vendor/requests/exceptions.pyc,,pip/_vendor/appdirs.pyc,,pip/_vendor/distlib/markers.pyc,,pip/index.pyc,,pip/utils/logging.pyc,,pip/_vendor/packaging/requirements.pyc,,pip/_vendor/requests/packages/chardet/cp949prober.pyc,,pip/utils/outdated.pyc,,pip/_vendor/requests/api.pyc,,pip/_vendor/requests/packages/urllib3/filepost.pyc,,pip/_vendor/requests/packages/chardet/big5freq.pyc,,pip/_vendor/html5lib/treebuilders/etree.pyc,,pip/_vendor/cachecontrol/caches/file_cache.pyc,,pip/_vendor/html5lib/treebuilders/dom.pyc,,pip/_vendor/requests/packages/chardet/mbcssm.pyc,,pip/_vendor/distlib/_backport/shutil.pyc,,pip/_vendor/requests/packages/chardet/sbcsgroupprober.pyc,,pip/utils/encoding.pyc,,pip/__main__.pyc,,pip/_vendor/cachecontrol/compat.pyc,,pip/operations/freeze.pyc,,pip/_vendor/lockfile/sqlitelockfile.pyc,,pip/_vendor/requests/packages/chardet/utf8prober.pyc,,pip/_vendor/requests/packages/chardet/langhebrewmodel.pyc,,pip/_vendor/requests/packages/chardet/compat.pyc,,pip/_vendor/progress/counter.pyc,,pip/commands/install.pyc,,pip/utils/filesystem.pyc,,pip/_vendor/six.pyc,,pip/_vendor/requests/packages/chardet/langcyrillicmodel.pyc,,pip/_vendor/requests/packages/chardet/constants.pyc,,pip/_vendor/requests/packages/urllib3/packages/ordered_dict.pyc,,pip/_vendor/requests/packages/chardet/eucjpprober.pyc,,pip/_vendor/retrying.pyc,,pip/_vendor/html5lib/treewalkers/etree.pyc,,pip/_vendor/cachecontrol/wrapper.pyc,,pip/_vendor/requests/hooks.pyc,,pip/_vendor/packaging/_structures.pyc,,pip/_vendor/requests/packages/urllib3/connectionpool.pyc,,pip/_vendor/requests/packages/chardet/mbcharsetprober.pyc,,pip/locations.pyc,,pip/_vendor/html5lib/_inputstream.pyc,,pip/_vendor/requests/packages/chardet/jisfreq.pyc,,pip/utils/setuptools_build.pyc,,pip/vcs/subversion.pyc,,pip/_vendor/requests/packages/urllib3/util/connection.pyc,,pip/exceptions.pyc,,pip/basecommand.pyc,,pip/_vendor/html5lib/_trie/py.pyc,,pip/_vendor/distlib/locators.pyc,,pip/_vendor/re-vendor.pyc,,pip/_vendor/html5lib/treewalkers/dom.pyc,,pip/_vendor/requests/packages/urllib3/contrib/__init__.pyc,,pip/_vendor/requests/packages/chardet/euctwfreq.pyc,,pip/commands/download.pyc,,pip/_vendor/requests/packages/chardet/chardistribution.pyc,,pip/_vendor/cachecontrol/caches/__init__.pyc,,pip/_vendor/webencodings/labels.pyc,,pip/_vendor/webencodings/x_user_defined.pyc,,pip/_vendor/html5lib/serializer.pyc,,pip/commands/wheel.pyc,,pip/_vendor/requests/packages/chardet/sbcharsetprober.pyc,,pip/_vendor/colorama/ansitowin32.pyc,,pip/commands/freeze.pyc,,pip/_vendor/cachecontrol/filewrapper.pyc,,pip/_vendor/requests/packages/chardet/sjisprober.pyc,,pip/_vendor/html5lib/_trie/__init__.pyc,,pip/_vendor/requests/packages/urllib3/util/timeout.pyc,,pip/_vendor/requests/cookies.pyc,,pip/_vendor/requests/packages/urllib3/_collections.pyc,,pip/_vendor/webencodings/mklabels.pyc,,pip/_vendor/html5lib/treewalkers/base.pyc,,pip/_vendor/requests/packages/urllib3/util/request.pyc,,pip/_vendor/distlib/manifest.pyc,,pip/_vendor/requests/packages/urllib3/response.pyc,,pip/req/req_install.pyc,,pip/_vendor/html5lib/treeadapters/__init__.pyc,,pip/_vendor/cachecontrol/caches/redis_cache.pyc,,pip/_vendor/html5lib/filters/inject_meta_charset.pyc,,pip/_vendor/requests/packages/chardet/charsetprober.pyc,,pip/_vendor/requests/packages/urllib3/util/retry.pyc,,pip/_vendor/cachecontrol/__init__.pyc,,pip/_vendor/__init__.pyc,,pip/_vendor/requests/packages/chardet/universaldetector.pyc,,pip/_vendor/colorama/ansi.pyc,,pip/_vendor/packaging/markers.pyc,,pip/_vendor/requests/packages/chardet/gb2312prober.pyc,,pip/_vendor/distro.pyc,,pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.pyc,,pip/_vendor/progress/bar.pyc,,pip/wheel.pyc,,pip/_vendor/lockfile/pidlockfile.pyc,,pip/_vendor/requests/packages/urllib3/contrib/socks.pyc,,pip/_vendor/requests/__init__.pyc,,pip/_vendor/requests/packages/chardet/chardetect.pyc,,pip/_vendor/html5lib/filters/optionaltags.pyc,,pip/_vendor/requests/packages/urllib3/connection.pyc,,pip/utils/appdirs.pyc,,pip/_vendor/colorama/winterm.pyc,,pip/operations/check.pyc,,
Metadata-Version: 2.0Name: pipVersion: 9.0.1Summary: The PyPA recommended tool for installing Python packages.Home-page: https://pip.pypa.io/Author: The pip developersAuthor-email: python-virtualenv@groups.google.comLicense: MITKeywords: easy_install distutils setuptools egg virtualenvPlatform: UNKNOWNClassifier: Development Status :: 5 - Production/StableClassifier: Intended Audience :: DevelopersClassifier: License :: OSI Approved :: MIT LicenseClassifier: Topic :: Software Development :: Build ToolsClassifier: Programming Language :: Python :: 2Classifier: Programming Language :: Python :: 2.6Classifier: Programming Language :: Python :: 2.7Classifier: Programming Language :: Python :: 3Classifier: Programming Language :: Python :: 3.3Classifier: Programming Language :: Python :: 3.4Classifier: Programming Language :: Python :: 3.5Classifier: Programming Language :: Python :: Implementation :: PyPyRequires-Python: >=2.6,!=3.0.*,!=3.1.*,!=3.2.*Provides-Extra: testingRequires-Dist: mock; extra == 'testing'Requires-Dist: pretend; extra == 'testing'Requires-Dist: pytest; extra == 'testing'Requires-Dist: scripttest (>=1.3); extra == 'testing'Requires-Dist: virtualenv (>=1.10); extra == 'testing'pip===The `PyPA recommended<https://packaging.python.org/en/latest/current/>`_tool for installing Python packages.* `Installation <https://pip.pypa.io/en/stable/installing.html>`_* `Documentation <https://pip.pypa.io/>`_* `Changelog <https://pip.pypa.io/en/stable/news.html>`_* `Github Page <https://github.com/pypa/pip>`_* `Issue Tracking <https://github.com/pypa/pip/issues>`_* `User mailing list <http://groups.google.com/group/python-virtualenv>`_* `Dev mailing list <http://groups.google.com/group/pypa-dev>`_* User IRC: #pypa on Freenode.* Dev IRC: #pypa-dev on Freenode... image:: https://img.shields.io/pypi/v/pip.svg:target: https://pypi.python.org/pypi/pip.. image:: https://img.shields.io/travis/pypa/pip/master.svg:target: http://travis-ci.org/pypa/pip.. image:: https://img.shields.io/appveyor/ci/pypa/pip.svg:target: https://ci.appveyor.com/project/pypa/pip/history.. image:: https://readthedocs.org/projects/pip/badge/?version=stable:target: https://pip.pypa.io/en/stableCode of Conduct---------------Everyone interacting in the pip project's codebases, issue trackers, chatrooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_... _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
pip
pip===The `PyPA recommended<https://packaging.python.org/en/latest/current/>`_tool for installing Python packages.* `Installation <https://pip.pypa.io/en/stable/installing.html>`_* `Documentation <https://pip.pypa.io/>`_* `Changelog <https://pip.pypa.io/en/stable/news.html>`_* `Github Page <https://github.com/pypa/pip>`_* `Issue Tracking <https://github.com/pypa/pip/issues>`_* `User mailing list <http://groups.google.com/group/python-virtualenv>`_* `Dev mailing list <http://groups.google.com/group/pypa-dev>`_* User IRC: #pypa on Freenode.* Dev IRC: #pypa-dev on Freenode... image:: https://img.shields.io/pypi/v/pip.svg:target: https://pypi.python.org/pypi/pip.. image:: https://img.shields.io/travis/pypa/pip/master.svg:target: http://travis-ci.org/pypa/pip.. image:: https://img.shields.io/appveyor/ci/pypa/pip.svg:target: https://ci.appveyor.com/project/pypa/pip/history.. image:: https://readthedocs.org/projects/pip/badge/?version=stable:target: https://pip.pypa.io/en/stableCode of Conduct---------------Everyone interacting in the pip project's codebases, issue trackers, chatrooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_... _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
"""Support for installing and building the "wheel" binary package format."""from __future__ import absolute_importimport compileallimport csvimport errnoimport functoolsimport hashlibimport loggingimport osimport os.pathimport reimport shutilimport statimport sysimport tempfileimport warningsfrom base64 import urlsafe_b64encodefrom email.parser import Parserfrom pip._vendor.six import StringIOimport pipfrom pip.compat import expanduserfrom pip.download import path_to_url, unpack_urlfrom pip.exceptions import (InstallationError, InvalidWheelFilename, UnsupportedWheel)from pip.locations import distutils_scheme, PIP_DELETE_MARKER_FILENAMEfrom pip import pep425tagsfrom pip.utils import (call_subprocess, ensure_dir, captured_stdout, rmtree, read_chunks,)from pip.utils.ui import open_spinnerfrom pip.utils.logging import indent_logfrom pip.utils.setuptools_build import SETUPTOOLS_SHIMfrom pip._vendor.distlib.scripts import ScriptMakerfrom pip._vendor import pkg_resourcesfrom pip._vendor.packaging.utils import canonicalize_namefrom pip._vendor.six.moves import configparserwheel_ext = '.whl'VERSION_COMPATIBLE = (1, 0)logger = logging.getLogger(__name__)class WheelCache(object):"""A cache of wheels for future installs."""def __init__(self, cache_dir, format_control):"""Create a wheel cache.:param cache_dir: The root of the cache.:param format_control: A pip.index.FormatControl object to limitbinaries being read from the cache."""self._cache_dir = expanduser(cache_dir) if cache_dir else Noneself._format_control = format_controldef cached_wheel(self, link, package_name):return cached_wheel(self._cache_dir, link, self._format_control, package_name)def _cache_for_link(cache_dir, link):"""Return a directory to store cached wheels in for link.Because there are M wheels for any one sdist, we provide a directoryto cache them in, and then consult that directory when looking upcache hits.We only insert things into the cache if they have plausible versionnumbers, so that we don't contaminate the cache with things that were notunique. E.g. ./package might have dozens of installs done for it and builda version of 0.0...and if we built and cached a wheel, we'd end up usingthe same wheel even if the source has been edited.:param cache_dir: The cache_dir being used by pip.:param link: The link of the sdist for which this will cache wheels."""# We want to generate an url to use as our cache key, we don't want to just# re-use the URL because it might have other items in the fragment and we# don't care about those.key_parts = [link.url_without_fragment]if link.hash_name is not None and link.hash is not None:key_parts.append("=".join([link.hash_name, link.hash]))key_url = "#".join(key_parts)# Encode our key url with sha224, we'll use this because it has similar# security properties to sha256, but with a shorter total output (and thus# less secure). However the differences don't make a lot of difference for# our use case here.hashed = hashlib.sha224(key_url.encode()).hexdigest()# We want to nest the directories some to prevent having a ton of top level# directories where we might run out of sub directories on some FS.parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]# Inside of the base location for cached wheels, expand our parts and join# them all together.return os.path.join(cache_dir, "wheels", *parts)def cached_wheel(cache_dir, link, format_control, package_name):if not cache_dir:return linkif not link:return linkif link.is_wheel:return linkif not link.is_artifact:return linkif not package_name:return linkcanonical_name = canonicalize_name(package_name)formats = pip.index.fmt_ctl_formats(format_control, canonical_name)if "binary" not in formats:return linkroot = _cache_for_link(cache_dir, link)try:wheel_names = os.listdir(root)except OSError as e:if e.errno in (errno.ENOENT, errno.ENOTDIR):return linkraisecandidates = []for wheel_name in wheel_names:try:wheel = Wheel(wheel_name)except InvalidWheelFilename:continueif not wheel.supported():# Built for a different python/arch/etccontinuecandidates.append((wheel.support_index_min(), wheel_name))if not candidates:return linkcandidates.sort()path = os.path.join(root, candidates[0][1])return pip.index.Link(path_to_url(path))def rehash(path, algo='sha256', blocksize=1 << 20):"""Return (hash, length) for path using hashlib.new(algo)"""h = hashlib.new(algo)length = 0with open(path, 'rb') as f:for block in read_chunks(f, size=blocksize):length += len(block)h.update(block)digest = 'sha256=' + urlsafe_b64encode(h.digest()).decode('latin1').rstrip('=')return (digest, length)def open_for_csv(name, mode):if sys.version_info[0] < 3:nl = {}bin = 'b'else:nl = {'newline': ''}bin = ''return open(name, mode + bin, **nl)def fix_script(path):"""Replace #!python with #!/path/to/pythonReturn True if file was changed."""# XXX RECORD hashes will need to be updatedif os.path.isfile(path):with open(path, 'rb') as script:firstline = script.readline()if not firstline.startswith(b'#!python'):return Falseexename = sys.executable.encode(sys.getfilesystemencoding())firstline = b'#!' + exename + os.linesep.encode("ascii")rest = script.read()with open(path, 'wb') as script:script.write(firstline)script.write(rest)return Truedist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?)\.dist-info$""", re.VERBOSE)def root_is_purelib(name, wheeldir):"""Return True if the extracted wheel in wheeldir should go into purelib."""name_folded = name.replace("-", "_")for item in os.listdir(wheeldir):match = dist_info_re.match(item)if match and match.group('name') == name_folded:with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:for line in wheel:line = line.lower().rstrip()if line == "root-is-purelib: true":return Truereturn Falsedef get_entrypoints(filename):if not os.path.exists(filename):return {}, {}# This is done because you can pass a string to entry_points wrappers which# means that they may or may not be valid INI files. The attempt here is to# strip leading and trailing whitespace in order to make them valid INI# files.with open(filename) as fp:data = StringIO()for line in fp:data.write(line.strip())data.write("\n")data.seek(0)cp = configparser.RawConfigParser()cp.optionxform = lambda option: optioncp.readfp(data)console = {}gui = {}if cp.has_section('console_scripts'):console = dict(cp.items('console_scripts'))if cp.has_section('gui_scripts'):gui = dict(cp.items('gui_scripts'))return console, guidef move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,pycompile=True, scheme=None, isolated=False, prefix=None):"""Install a wheel"""if not scheme:scheme = distutils_scheme(name, user=user, home=home, root=root, isolated=isolated,prefix=prefix,)if root_is_purelib(name, wheeldir):lib_dir = scheme['purelib']else:lib_dir = scheme['platlib']info_dir = []data_dirs = []source = wheeldir.rstrip(os.path.sep) + os.path.sep# Record details of the files moved# installed = files copied from the wheel to the destination# changed = files changed while installing (scripts #! line typically)# generated = files newly generated during the install (script wrappers)installed = {}changed = set()generated = []# Compile all of the pyc files that we're going to be installingif pycompile:with captured_stdout() as stdout:with warnings.catch_warnings():warnings.filterwarnings('ignore')compileall.compile_dir(source, force=True, quiet=True)logger.debug(stdout.getvalue())def normpath(src, p):return os.path.relpath(src, p).replace(os.path.sep, '/')def record_installed(srcfile, destfile, modified=False):"""Map archive RECORD paths to installation RECORD paths."""oldpath = normpath(srcfile, wheeldir)newpath = normpath(destfile, lib_dir)installed[oldpath] = newpathif modified:changed.add(destfile)def clobber(source, dest, is_base, fixer=None, filter=None):ensure_dir(dest) # common for the 'include' pathfor dir, subdirs, files in os.walk(source):basedir = dir[len(source):].lstrip(os.path.sep)destdir = os.path.join(dest, basedir)if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):continuefor s in subdirs:destsubdir = os.path.join(dest, basedir, s)if is_base and basedir == '' and destsubdir.endswith('.data'):data_dirs.append(s)continueelif (is_base ands.endswith('.dist-info') andcanonicalize_name(s).startswith(canonicalize_name(req.name))):assert not info_dir, ('Multiple .dist-info directories: ' +destsubdir + ', ' +', '.join(info_dir))info_dir.append(destsubdir)for f in files:# Skip unwanted filesif filter and filter(f):continuesrcfile = os.path.join(dir, f)destfile = os.path.join(dest, basedir, f)# directory creation is lazy and after the file filtering above# to ensure we don't install empty dirs; empty dirs can't be# uninstalled.ensure_dir(destdir)# We use copyfile (not move, copy, or copy2) to be extra sure# that we are not moving directories over (copyfile fails for# directories) as well as to ensure that we are not copying# over any metadata because we want more control over what# metadata we actually copy over.shutil.copyfile(srcfile, destfile)# Copy over the metadata for the file, currently this only# includes the atime and mtime.st = os.stat(srcfile)if hasattr(os, "utime"):os.utime(destfile, (st.st_atime, st.st_mtime))# If our file is executable, then make our destination file# executable.if os.access(srcfile, os.X_OK):st = os.stat(srcfile)permissions = (st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)os.chmod(destfile, permissions)changed = Falseif fixer:changed = fixer(destfile)record_installed(srcfile, destfile, changed)clobber(source, lib_dir, True)assert info_dir, "%s .dist-info directory not found" % req# Get the defined entry pointsep_file = os.path.join(info_dir[0], 'entry_points.txt')console, gui = get_entrypoints(ep_file)def is_entrypoint_wrapper(name):# EP, EP.exe and EP-script.py are scripts generated for# entry point EP by setuptoolsif name.lower().endswith('.exe'):matchname = name[:-4]elif name.lower().endswith('-script.py'):matchname = name[:-10]elif name.lower().endswith(".pya"):matchname = name[:-4]else:matchname = name# Ignore setuptools-generated scriptsreturn (matchname in console or matchname in gui)for datadir in data_dirs:fixer = Nonefilter = Nonefor subdir in os.listdir(os.path.join(wheeldir, datadir)):fixer = Noneif subdir == 'scripts':fixer = fix_scriptfilter = is_entrypoint_wrappersource = os.path.join(wheeldir, datadir, subdir)dest = scheme[subdir]clobber(source, dest, False, fixer=fixer, filter=filter)maker = ScriptMaker(None, scheme['scripts'])# Ensure old scripts are overwritten.# See https://github.com/pypa/pip/issues/1800maker.clobber = True# Ensure we don't generate any variants for scripts because this is almost# never what somebody wants.# See https://bitbucket.org/pypa/distlib/issue/35/maker.variants = set(('', ))# This is required because otherwise distlib creates scripts that are not# executable.# See https://bitbucket.org/pypa/distlib/issue/32/maker.set_mode = True# Simplify the script and fix the fact that the default script swallows# every single stack trace.# See https://bitbucket.org/pypa/distlib/issue/34/# See https://bitbucket.org/pypa/distlib/issue/33/def _get_script_text(entry):if entry.suffix is None:raise InstallationError("Invalid script entry point: %s for req: %s - A callable ""suffix is required. Cf https://packaging.python.org/en/""latest/distributing.html#console-scripts for more ""information." % (entry, req))return maker.script_template % {"module": entry.prefix,"import_name": entry.suffix.split(".")[0],"func": entry.suffix,}maker._get_script_text = _get_script_textmaker.script_template = """# -*- coding: utf-8 -*-import reimport sysfrom %(module)s import %(import_name)sif __name__ == '__main__':sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])sys.exit(%(func)s())"""# Special case pip and setuptools to generate versioned wrappers## The issue is that some projects (specifically, pip and setuptools) use# code in setup.py to create "versioned" entry points - pip2.7 on Python# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into# the wheel metadata at build time, and so if the wheel is installed with# a *different* version of Python the entry points will be wrong. The# correct fix for this is to enhance the metadata to be able to describe# such versioned entry points, but that won't happen till Metadata 2.0 is# available.# In the meantime, projects using versioned entry points will either have# incorrect versioned entry points, or they will not be able to distribute# "universal" wheels (i.e., they will need a wheel per Python version).## Because setuptools and pip are bundled with _ensurepip and virtualenv,# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we# override the versioned entry points in the wheel and generate the# correct ones. This code is purely a short-term measure until Metadata 2.0# is available.## To add the level of hack in this section of code, in order to support# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment# variable which will control which version scripts get installed.## ENSUREPIP_OPTIONS=altinstall# - Only pipX.Y and easy_install-X.Y will be generated and installed# ENSUREPIP_OPTIONS=install# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note# that this option is technically if ENSUREPIP_OPTIONS is set and is# not altinstall# DEFAULT# - The default behavior is to install pip, pipX, pipX.Y, easy_install# and easy_install-X.Y.pip_script = console.pop('pip', None)if pip_script:if "ENSUREPIP_OPTIONS" not in os.environ:spec = 'pip = ' + pip_scriptgenerated.extend(maker.make(spec))if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":spec = 'pip%s = %s' % (sys.version[:1], pip_script)generated.extend(maker.make(spec))spec = 'pip%s = %s' % (sys.version[:3], pip_script)generated.extend(maker.make(spec))# Delete any other versioned pip entry pointspip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]for k in pip_ep:del console[k]easy_install_script = console.pop('easy_install', None)if easy_install_script:if "ENSUREPIP_OPTIONS" not in os.environ:spec = 'easy_install = ' + easy_install_scriptgenerated.extend(maker.make(spec))spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)generated.extend(maker.make(spec))# Delete any other versioned easy_install entry pointseasy_install_ep = [k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)]for k in easy_install_ep:del console[k]# Generate the console and GUI entry points specified in the wheelif len(console) > 0:generated.extend(maker.make_multiple(['%s = %s' % kv for kv in console.items()]))if len(gui) > 0:generated.extend(maker.make_multiple(['%s = %s' % kv for kv in gui.items()],{'gui': True}))# Record pip as the installerinstaller = os.path.join(info_dir[0], 'INSTALLER')temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')with open(temp_installer, 'wb') as installer_file:installer_file.write(b'pip\n')shutil.move(temp_installer, installer)generated.append(installer)# Record details of all files installedrecord = os.path.join(info_dir[0], 'RECORD')temp_record = os.path.join(info_dir[0], 'RECORD.pip')with open_for_csv(record, 'r') as record_in:with open_for_csv(temp_record, 'w+') as record_out:reader = csv.reader(record_in)writer = csv.writer(record_out)for row in reader:row[0] = installed.pop(row[0], row[0])if row[0] in changed:row[1], row[2] = rehash(row[0])writer.writerow(row)for f in generated:h, l = rehash(f)writer.writerow((normpath(f, lib_dir), h, l))for f in installed:writer.writerow((installed[f], '', ''))shutil.move(temp_record, record)def _unique(fn):@functools.wraps(fn)def unique(*args, **kw):seen = set()for item in fn(*args, **kw):if item not in seen:seen.add(item)yield itemreturn unique# TODO: this goes somewhere besides the wheel module@_uniquedef uninstallation_paths(dist):"""Yield all the uninstallation paths for dist based on RECORD-without-.pycYield paths to all the files in RECORD. For each .py file in RECORD, addthe .pyc in the same directory.UninstallPathSet.add() takes care of the __pycache__ .pyc."""from pip.utils import FakeFile # circular importr = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))for row in r:path = os.path.join(dist.location, row[0])yield pathif path.endswith('.py'):dn, fn = os.path.split(path)base = fn[:-3]path = os.path.join(dn, base + '.pyc')yield pathdef wheel_version(source_dir):"""Return the Wheel-Version of an extracted wheel, if possible.Otherwise, return False if we couldn't parse / extract it."""try:dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]wheel_data = dist.get_metadata('WHEEL')wheel_data = Parser().parsestr(wheel_data)version = wheel_data['Wheel-Version'].strip()version = tuple(map(int, version.split('.')))return versionexcept:return Falsedef check_compatibility(version, name):"""Raises errors or warns if called with an incompatible Wheel-Version.Pip should refuse to install a Wheel-Version that's a major seriesahead of what it's compatible with (e.g 2.0 > 1.1); and warn wheninstalling a version only minor version ahead (e.g 1.2 > 1.1).version: a 2-tuple representing a Wheel-Version (Major, Minor)name: name of wheel or package to raise exception about:raises UnsupportedWheel: when an incompatible Wheel-Version is given"""if not version:raise UnsupportedWheel("%s is in an unsupported or invalid wheel" % name)if version[0] > VERSION_COMPATIBLE[0]:raise UnsupportedWheel("%s's Wheel-Version (%s) is not compatible with this version ""of pip" % (name, '.'.join(map(str, version))))elif version > VERSION_COMPATIBLE:logger.warning('Installing from a newer Wheel-Version (%s)','.'.join(map(str, version)),)class Wheel(object):"""A wheel file"""# TODO: maybe move the install code into this classwheel_file_re = re.compile(r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)\.whl|\.dist-info)$""",re.VERBOSE)def __init__(self, filename):""":raises InvalidWheelFilename: when the filename is invalid for a wheel"""wheel_info = self.wheel_file_re.match(filename)if not wheel_info:raise InvalidWheelFilename("%s is not a valid wheel filename." % filename)self.filename = filenameself.name = wheel_info.group('name').replace('_', '-')# we'll assume "_" means "-" due to wheel naming scheme# (https://github.com/pypa/pip/issues/1150)self.version = wheel_info.group('ver').replace('_', '-')self.pyversions = wheel_info.group('pyver').split('.')self.abis = wheel_info.group('abi').split('.')self.plats = wheel_info.group('plat').split('.')# All the tag combinations from this fileself.file_tags = set((x, y, z) for x in self.pyversionsfor y in self.abis for z in self.plats)def support_index_min(self, tags=None):"""Return the lowest index that one of the wheel's file_tag combinationsachieves in the supported_tags list e.g. if there are 8 supported tags,and one of the file tags is first in the list, then return 0. ReturnsNone is the wheel is not supported."""if tags is None: # for mocktags = pep425tags.supported_tagsindexes = [tags.index(c) for c in self.file_tags if c in tags]return min(indexes) if indexes else Nonedef supported(self, tags=None):"""Is this wheel supported on this system?"""if tags is None: # for mocktags = pep425tags.supported_tagsreturn bool(set(tags).intersection(self.file_tags))class WheelBuilder(object):"""Build wheels from a RequirementSet."""def __init__(self, requirement_set, finder, build_options=None,global_options=None):self.requirement_set = requirement_setself.finder = finderself._cache_root = requirement_set._wheel_cache._cache_dirself._wheel_dir = requirement_set.wheel_download_dirself.build_options = build_options or []self.global_options = global_options or []def _build_one(self, req, output_dir, python_tag=None):"""Build one wheel.:return: The filename of the built wheel, or None if the build failed."""tempd = tempfile.mkdtemp('pip-wheel-')try:if self.__build_one(req, tempd, python_tag=python_tag):try:wheel_name = os.listdir(tempd)[0]wheel_path = os.path.join(output_dir, wheel_name)shutil.move(os.path.join(tempd, wheel_name), wheel_path)logger.info('Stored in directory: %s', output_dir)return wheel_pathexcept:pass# Ignore return, we can't do anything else useful.self._clean_one(req)return Nonefinally:rmtree(tempd)def _base_setup_args(self, req):return [sys.executable, "-u", '-c',SETUPTOOLS_SHIM % req.setup_py] + list(self.global_options)def __build_one(self, req, tempd, python_tag=None):base_args = self._base_setup_args(req)spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,)with open_spinner(spin_message) as spinner:logger.debug('Destination directory: %s', tempd)wheel_args = base_args + ['bdist_wheel', '-d', tempd] \+ self.build_optionsif python_tag is not None:wheel_args += ["--python-tag", python_tag]try:call_subprocess(wheel_args, cwd=req.setup_py_dir,show_stdout=False, spinner=spinner)return Trueexcept:spinner.finish("error")logger.error('Failed building wheel for %s', req.name)return Falsedef _clean_one(self, req):base_args = self._base_setup_args(req)logger.info('Running setup.py clean for %s', req.name)clean_args = base_args + ['clean', '--all']try:call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False)return Trueexcept:logger.error('Failed cleaning build dir for %s', req.name)return Falsedef build(self, autobuilding=False):"""Build wheels.:param unpack: If True, replace the sdist we built from with thenewly built wheel, in preparation for installation.:return: True if all the wheels built correctly."""assert self._wheel_dir or (autobuilding and self._cache_root)# unpack sdists and constructs req setself.requirement_set.prepare_files(self.finder)reqset = self.requirement_set.requirements.values()buildset = []for req in reqset:if req.constraint:continueif req.is_wheel:if not autobuilding:logger.info('Skipping %s, due to already being wheel.', req.name)elif autobuilding and req.editable:passelif autobuilding and req.link and not req.link.is_artifact:passelif autobuilding and not req.source_dir:passelse:if autobuilding:link = req.linkbase, ext = link.splitext()if pip.index.egg_info_matches(base, None, link) is None:# Doesn't look like a package - don't autobuild a wheel# because we'll have no way to lookup the result sanelycontinueif "binary" not in pip.index.fmt_ctl_formats(self.finder.format_control,canonicalize_name(req.name)):logger.info("Skipping bdist_wheel for %s, due to binaries ""being disabled for it.", req.name)continuebuildset.append(req)if not buildset:return True# Build the wheels.logger.info('Building wheels for collected packages: %s',', '.join([req.name for req in buildset]),)with indent_log():build_success, build_failure = [], []for req in buildset:python_tag = Noneif autobuilding:python_tag = pep425tags.implementation_tagoutput_dir = _cache_for_link(self._cache_root, req.link)try:ensure_dir(output_dir)except OSError as e:logger.warning("Building wheel for %s failed: %s",req.name, e)build_failure.append(req)continueelse:output_dir = self._wheel_dirwheel_file = self._build_one(req, output_dir,python_tag=python_tag,)if wheel_file:build_success.append(req)if autobuilding:# XXX: This is mildly duplicative with prepare_files,# but not close enough to pull out to a single common# method.# The code below assumes temporary source dirs -# prevent it doing bad things.if req.source_dir and not os.path.exists(os.path.join(req.source_dir, PIP_DELETE_MARKER_FILENAME)):raise AssertionError("bad source dir - missing marker")# Delete the source we built the wheel fromreq.remove_temporary_source()# set the build directory again - name is known from# the work prepare_files did.req.source_dir = req.build_location(self.requirement_set.build_dir)# Update the link for this.req.link = pip.index.Link(path_to_url(wheel_file))assert req.link.is_wheel# extract the wheel into the dirunpack_url(req.link, req.source_dir, None, False,session=self.requirement_set.session)else:build_failure.append(req)# notify success/failureif build_success:logger.info('Successfully built %s',' '.join([req.name for req in build_success]),)if build_failure:logger.info('Failed to build %s',' '.join([req.name for req in build_failure]),)# Return True if all builds were successfulreturn len(build_failure) == 0
from __future__ import absolute_importimport loggingimport osimport refrom pip._vendor.six.moves.urllib import parse as urllib_parsefrom pip.index import Linkfrom pip.utils import rmtree, display_pathfrom pip.utils.logging import indent_logfrom pip.vcs import vcs, VersionControl_svn_xml_url_re = re.compile('url="([^"]+)"')_svn_rev_re = re.compile('committed-rev="(\d+)"')_svn_url_re = re.compile(r'URL: (.+)')_svn_revision_re = re.compile(r'Revision: (.+)')_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')logger = logging.getLogger(__name__)class Subversion(VersionControl):name = 'svn'dirname = '.svn'repo_name = 'checkout'schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')def get_info(self, location):"""Returns (url, revision), where both are strings"""assert not location.rstrip('/').endswith(self.dirname), \'Bad directory: %s' % locationoutput = self.run_command(['info', location],show_stdout=False,extra_environ={'LANG': 'C'},)match = _svn_url_re.search(output)if not match:logger.warning('Cannot determine URL of svn checkout %s',display_path(location),)logger.debug('Output that cannot be parsed: \n%s', output)return None, Noneurl = match.group(1).strip()match = _svn_revision_re.search(output)if not match:logger.warning('Cannot determine revision of svn checkout %s',display_path(location),)logger.debug('Output that cannot be parsed: \n%s', output)return url, Nonereturn url, match.group(1)def export(self, location):"""Export the svn repository at the url to the destination location"""url, rev = self.get_url_rev()rev_options = get_rev_options(url, rev)url = self.remove_auth_from_url(url)logger.info('Exporting svn repository %s to %s', url, location)with indent_log():if os.path.exists(location):# Subversion doesn't like to check out over an existing# directory --force fixes this, but was only added in svn 1.5rmtree(location)self.run_command(['export'] + rev_options + [url, location],show_stdout=False)def switch(self, dest, url, rev_options):self.run_command(['switch'] + rev_options + [url, dest])def update(self, dest, rev_options):self.run_command(['update'] + rev_options + [dest])def obtain(self, dest):url, rev = self.get_url_rev()rev_options = get_rev_options(url, rev)url = self.remove_auth_from_url(url)if rev:rev_display = ' (to revision %s)' % revelse:rev_display = ''if self.check_destination(dest, url, rev_options, rev_display):logger.info('Checking out %s%s to %s',url,rev_display,display_path(dest),)self.run_command(['checkout', '-q'] + rev_options + [url, dest])def get_location(self, dist, dependency_links):for url in dependency_links:egg_fragment = Link(url).egg_fragmentif not egg_fragment:continueif '-' in egg_fragment:# FIXME: will this work when a package has - in the name?key = '-'.join(egg_fragment.split('-')[:-1]).lower()else:key = egg_fragmentif key == dist.key:return url.split('#', 1)[0]return Nonedef get_revision(self, location):"""Return the maximum revision for all files under a given location"""# Note: taken from setuptools.command.egg_inforevision = 0for base, dirs, files in os.walk(location):if self.dirname not in dirs:dirs[:] = []continue # no sense walking uncontrolled subdirsdirs.remove(self.dirname)entries_fn = os.path.join(base, self.dirname, 'entries')if not os.path.exists(entries_fn):# FIXME: should we warn?continuedirurl, localrev = self._get_svn_url_rev(base)if base == location:base_url = dirurl + '/' # save the root urlelif not dirurl or not dirurl.startswith(base_url):dirs[:] = []continue # not part of the same svn tree, skip itrevision = max(revision, localrev)return revisiondef get_url_rev(self):# hotfix the URL scheme after removing svn+ from svn+ssh:// readd iturl, rev = super(Subversion, self).get_url_rev()if url.startswith('ssh://'):url = 'svn+' + urlreturn url, revdef get_url(self, location):# In cases where the source is in a subdirectory, not alongside# setup.py we have to look up in the location until we find a real# setup.pyorig_location = locationwhile not os.path.exists(os.path.join(location, 'setup.py')):last_location = locationlocation = os.path.dirname(location)if location == last_location:# We've traversed up to the root of the filesystem without# finding setup.pylogger.warning("Could not find setup.py for directory %s (tried all ""parent directories)",orig_location,)return Nonereturn self._get_svn_url_rev(location)[0]def _get_svn_url_rev(self, location):from pip.exceptions import InstallationErrorentries_path = os.path.join(location, self.dirname, 'entries')if os.path.exists(entries_path):with open(entries_path) as f:data = f.read()else: # subversion >= 1.7 does not have the 'entries' filedata = ''if (data.startswith('8') ordata.startswith('9') ordata.startswith('10')):data = list(map(str.splitlines, data.split('\n\x0c\n')))del data[0][0] # get rid of the '8'url = data[0][3]revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]elif data.startswith('<?xml'):match = _svn_xml_url_re.search(data)if not match:raise ValueError('Badly formatted data: %r' % data)url = match.group(1) # get repository URLrevs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]else:try:# subversion >= 1.7xml = self.run_command(['info', '--xml', location],show_stdout=False,)url = _svn_info_xml_url_re.search(xml).group(1)revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)]except InstallationError:url, revs = None, []if revs:rev = max(revs)else:rev = 0return url, revdef get_src_requirement(self, dist, location):repo = self.get_url(location)if repo is None:return None# FIXME: why not project name?egg_project_name = dist.egg_name().split('-', 1)[0]rev = self.get_revision(location)return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name)def check_version(self, dest, rev_options):"""Always assume the versions don't match"""return False@staticmethoddef remove_auth_from_url(url):# Return a copy of url with 'username:password@' removed.# username/pass params are passed to subversion through flags# and are not recognized in the url.# parsed urlpurl = urllib_parse.urlsplit(url)stripped_netloc = \purl.netloc.split('@')[-1]# stripped urlurl_pieces = (purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment)surl = urllib_parse.urlunsplit(url_pieces)return surldef get_rev_options(url, rev):if rev:rev_options = ['-r', rev]else:rev_options = []r = urllib_parse.urlsplit(url)if hasattr(r, 'username'):# >= Python-2.5username, password = r.username, r.passwordelse:netloc = r[1]if '@' in netloc:auth = netloc.split('@')[0]if ':' in auth:username, password = auth.split(':', 1)else:username, password = auth, Noneelse:username, password = None, Noneif username:rev_options += ['--username', username]if password:rev_options += ['--password', password]return rev_optionsvcs.register(Subversion)
from __future__ import absolute_importimport loggingimport osimport tempfilefrom pip.utils import display_path, rmtreefrom pip.vcs import vcs, VersionControlfrom pip.download import path_to_urlfrom pip._vendor.six.moves import configparserlogger = logging.getLogger(__name__)class Mercurial(VersionControl):name = 'hg'dirname = '.hg'repo_name = 'clone'schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')def export(self, location):"""Export the Hg repository at the url to the destination location"""temp_dir = tempfile.mkdtemp('-export', 'pip-')self.unpack(temp_dir)try:self.run_command(['archive', location], show_stdout=False, cwd=temp_dir)finally:rmtree(temp_dir)def switch(self, dest, url, rev_options):repo_config = os.path.join(dest, self.dirname, 'hgrc')config = configparser.SafeConfigParser()try:config.read(repo_config)config.set('paths', 'default', url)with open(repo_config, 'w') as config_file:config.write(config_file)except (OSError, configparser.NoSectionError) as exc:logger.warning('Could not switch Mercurial repository to %s: %s', url, exc,)else:self.run_command(['update', '-q'] + rev_options, cwd=dest)def update(self, dest, rev_options):self.run_command(['pull', '-q'], cwd=dest)self.run_command(['update', '-q'] + rev_options, cwd=dest)def obtain(self, dest):url, rev = self.get_url_rev()if rev:rev_options = [rev]rev_display = ' (to revision %s)' % revelse:rev_options = []rev_display = ''if self.check_destination(dest, url, rev_options, rev_display):logger.info('Cloning hg %s%s to %s',url,rev_display,display_path(dest),)self.run_command(['clone', '--noupdate', '-q', url, dest])self.run_command(['update', '-q'] + rev_options, cwd=dest)def get_url(self, location):url = self.run_command(['showconfig', 'paths.default'],show_stdout=False, cwd=location).strip()if self._is_local_repository(url):url = path_to_url(url)return url.strip()def get_revision(self, location):current_revision = self.run_command(['parents', '--template={rev}'],show_stdout=False, cwd=location).strip()return current_revisiondef get_revision_hash(self, location):current_rev_hash = self.run_command(['parents', '--template={node}'],show_stdout=False, cwd=location).strip()return current_rev_hashdef get_src_requirement(self, dist, location):repo = self.get_url(location)if not repo.lower().startswith('hg:'):repo = 'hg+' + repoegg_project_name = dist.egg_name().split('-', 1)[0]if not repo:return Nonecurrent_rev_hash = self.get_revision_hash(location)return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name)def check_version(self, dest, rev_options):"""Always assume the versions don't match"""return Falsevcs.register(Mercurial)
from __future__ import absolute_importimport loggingimport tempfileimport os.pathfrom pip.compat import samefilefrom pip.exceptions import BadCommandfrom pip._vendor.six.moves.urllib import parse as urllib_parsefrom pip._vendor.six.moves.urllib import request as urllib_requestfrom pip._vendor.packaging.version import parse as parse_versionfrom pip.utils import display_path, rmtreefrom pip.vcs import vcs, VersionControlurlsplit = urllib_parse.urlspliturlunsplit = urllib_parse.urlunsplitlogger = logging.getLogger(__name__)class Git(VersionControl):name = 'git'dirname = '.git'repo_name = 'clone'schemes = ('git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',)def __init__(self, url=None, *args, **kwargs):# Works around an apparent Git bug# (see http://article.gmane.org/gmane.comp.version-control.git/146500)if url:scheme, netloc, path, query, fragment = urlsplit(url)if scheme.endswith('file'):initial_slashes = path[:-len(path.lstrip('/'))]newpath = (initial_slashes +urllib_request.url2pathname(path).replace('\\', '/').lstrip('/'))url = urlunsplit((scheme, netloc, newpath, query, fragment))after_plus = scheme.find('+') + 1url = scheme[:after_plus] + urlunsplit((scheme[after_plus:], netloc, newpath, query, fragment),)super(Git, self).__init__(url, *args, **kwargs)def get_git_version(self):VERSION_PFX = 'git version 'version = self.run_command(['version'], show_stdout=False)if version.startswith(VERSION_PFX):version = version[len(VERSION_PFX):]else:version = ''# get first 3 positions of the git version becasue# on windows it is x.y.z.windows.t, and this parses as# LegacyVersion which always smaller than a Version.version = '.'.join(version.split('.')[:3])return parse_version(version)def export(self, location):"""Export the Git repository at the url to the destination location"""temp_dir = tempfile.mkdtemp('-export', 'pip-')self.unpack(temp_dir)try:if not location.endswith('/'):location = location + '/'self.run_command(['checkout-index', '-a', '-f', '--prefix', location],show_stdout=False, cwd=temp_dir)finally:rmtree(temp_dir)def check_rev_options(self, rev, dest, rev_options):"""Check the revision options before checkout to compensate that tagsand branches may need origin/ as a prefix.Returns the SHA1 of the branch or tag if found."""revisions = self.get_short_refs(dest)origin_rev = 'origin/%s' % revif origin_rev in revisions:# remote branchreturn [revisions[origin_rev]]elif rev in revisions:# a local tag or branch namereturn [revisions[rev]]else:logger.warning("Could not find a tag or branch '%s', assuming commit.", rev,)return rev_optionsdef check_version(self, dest, rev_options):"""Compare the current sha to the ref. ref may be a branch or tag name,but current rev will always point to a sha. This means that a branchor tag will never compare as True. So this ultimately only matchesagainst exact shas."""return self.get_revision(dest).startswith(rev_options[0])def switch(self, dest, url, rev_options):self.run_command(['config', 'remote.origin.url', url], cwd=dest)self.run_command(['checkout', '-q'] + rev_options, cwd=dest)self.update_submodules(dest)def update(self, dest, rev_options):# First fetch changes from the default remoteif self.get_git_version() >= parse_version('1.9.0'):# fetch tags in addition to everything elseself.run_command(['fetch', '-q', '--tags'], cwd=dest)else:self.run_command(['fetch', '-q'], cwd=dest)# Then reset to wanted revision (maybe even origin/master)if rev_options:rev_options = self.check_rev_options(rev_options[0], dest, rev_options,)self.run_command(['reset', '--hard', '-q'] + rev_options, cwd=dest)#: update submodulesself.update_submodules(dest)def obtain(self, dest):url, rev = self.get_url_rev()if rev:rev_options = [rev]rev_display = ' (to %s)' % revelse:rev_options = ['origin/master']rev_display = ''if self.check_destination(dest, url, rev_options, rev_display):logger.info('Cloning %s%s to %s', url, rev_display, display_path(dest),)self.run_command(['clone', '-q', url, dest])if rev:rev_options = self.check_rev_options(rev, dest, rev_options)# Only do a checkout if rev_options differs from HEADif not self.check_version(dest, rev_options):self.run_command(['checkout', '-q'] + rev_options,cwd=dest,)#: repo may contain submodulesself.update_submodules(dest)def get_url(self, location):"""Return URL of the first remote encountered."""remotes = self.run_command(['config', '--get-regexp', 'remote\..*\.url'],show_stdout=False, cwd=location)remotes = remotes.splitlines()found_remote = remotes[0]for remote in remotes:if remote.startswith('remote.origin.url '):found_remote = remotebreakurl = found_remote.split(' ')[1]return url.strip()def get_revision(self, location):current_rev = self.run_command(['rev-parse', 'HEAD'], show_stdout=False, cwd=location)return current_rev.strip()def get_full_refs(self, location):"""Yields tuples of (commit, ref) for branches and tags"""output = self.run_command(['show-ref'],show_stdout=False, cwd=location)for line in output.strip().splitlines():commit, ref = line.split(' ', 1)yield commit.strip(), ref.strip()def is_ref_remote(self, ref):return ref.startswith('refs/remotes/')def is_ref_branch(self, ref):return ref.startswith('refs/heads/')def is_ref_tag(self, ref):return ref.startswith('refs/tags/')def is_ref_commit(self, ref):"""A ref is a commit sha if it is not anything else"""return not any((self.is_ref_remote(ref),self.is_ref_branch(ref),self.is_ref_tag(ref),))# Should deprecate `get_refs` since it's ambiguousdef get_refs(self, location):return self.get_short_refs(location)def get_short_refs(self, location):"""Return map of named refs (branches or tags) to commit hashes."""rv = {}for commit, ref in self.get_full_refs(location):ref_name = Noneif self.is_ref_remote(ref):ref_name = ref[len('refs/remotes/'):]elif self.is_ref_branch(ref):ref_name = ref[len('refs/heads/'):]elif self.is_ref_tag(ref):ref_name = ref[len('refs/tags/'):]if ref_name is not None:rv[ref_name] = commitreturn rvdef _get_subdirectory(self, location):"""Return the relative path of setup.py to the git repo root."""# find the repo rootgit_dir = self.run_command(['rev-parse', '--git-dir'],show_stdout=False, cwd=location).strip()if not os.path.isabs(git_dir):git_dir = os.path.join(location, git_dir)root_dir = os.path.join(git_dir, '..')# find setup.pyorig_location = locationwhile not os.path.exists(os.path.join(location, 'setup.py')):last_location = locationlocation = os.path.dirname(location)if location == last_location:# We've traversed up to the root of the filesystem without# finding setup.pylogger.warning("Could not find setup.py for directory %s (tried all ""parent directories)",orig_location,)return None# relative path of setup.py to repo rootif samefile(root_dir, location):return Nonereturn os.path.relpath(location, root_dir)def get_src_requirement(self, dist, location):repo = self.get_url(location)if not repo.lower().startswith('git:'):repo = 'git+' + repoegg_project_name = dist.egg_name().split('-', 1)[0]if not repo:return Nonecurrent_rev = self.get_revision(location)req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)subdirectory = self._get_subdirectory(location)if subdirectory:req += '&subdirectory=' + subdirectoryreturn reqdef get_url_rev(self):"""Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.That's required because although they use SSH they sometimes doesn'twork with a ssh:// scheme (e.g. Github). But we need a scheme forparsing. Hence we remove it again afterwards and return it as a stub."""if '://' not in self.url:assert 'file:' not in self.urlself.url = self.url.replace('git+', 'git+ssh://')url, rev = super(Git, self).get_url_rev()url = url.replace('ssh://', '')else:url, rev = super(Git, self).get_url_rev()return url, revdef update_submodules(self, location):if not os.path.exists(os.path.join(location, '.gitmodules')):returnself.run_command(['submodule', 'update', '--init', '--recursive', '-q'],cwd=location,)@classmethoddef controls_location(cls, location):if super(Git, cls).controls_location(location):return Truetry:r = cls().run_command(['rev-parse'],cwd=location,show_stdout=False,on_returncode='ignore')return not rexcept BadCommand:logger.debug("could not determine if %s is under git control ""because git is not available", location)return Falsevcs.register(Git)
from __future__ import absolute_importimport loggingimport osimport tempfile# TODO: Get this into six.moves.urllib.parsetry:from urllib import parse as urllib_parseexcept ImportError:import urlparse as urllib_parsefrom pip.utils import rmtree, display_pathfrom pip.vcs import vcs, VersionControlfrom pip.download import path_to_urllogger = logging.getLogger(__name__)class Bazaar(VersionControl):name = 'bzr'dirname = '.bzr'repo_name = 'branch'schemes = ('bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp','bzr+lp',)def __init__(self, url=None, *args, **kwargs):super(Bazaar, self).__init__(url, *args, **kwargs)# Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical# Register lp but do not expose as a scheme to support bzr+lp.if getattr(urllib_parse, 'uses_fragment', None):urllib_parse.uses_fragment.extend(['lp'])urllib_parse.non_hierarchical.extend(['lp'])def export(self, location):"""Export the Bazaar repository at the url to the destination location"""temp_dir = tempfile.mkdtemp('-export', 'pip-')self.unpack(temp_dir)if os.path.exists(location):# Remove the location to make sure Bazaar can export it correctlyrmtree(location)try:self.run_command(['export', location], cwd=temp_dir,show_stdout=False)finally:rmtree(temp_dir)def switch(self, dest, url, rev_options):self.run_command(['switch', url], cwd=dest)def update(self, dest, rev_options):self.run_command(['pull', '-q'] + rev_options, cwd=dest)def obtain(self, dest):url, rev = self.get_url_rev()if rev:rev_options = ['-r', rev]rev_display = ' (to revision %s)' % revelse:rev_options = []rev_display = ''if self.check_destination(dest, url, rev_options, rev_display):logger.info('Checking out %s%s to %s',url,rev_display,display_path(dest),)self.run_command(['branch', '-q'] + rev_options + [url, dest])def get_url_rev(self):# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd iturl, rev = super(Bazaar, self).get_url_rev()if url.startswith('ssh://'):url = 'bzr+' + urlreturn url, revdef get_url(self, location):urls = self.run_command(['info'], show_stdout=False, cwd=location)for line in urls.splitlines():line = line.strip()for x in ('checkout of branch: ','parent branch: '):if line.startswith(x):repo = line.split(x)[1]if self._is_local_repository(repo):return path_to_url(repo)return reporeturn Nonedef get_revision(self, location):revision = self.run_command(['revno'], show_stdout=False, cwd=location)return revision.splitlines()[-1]def get_src_requirement(self, dist, location):repo = self.get_url(location)if not repo:return Noneif not repo.lower().startswith('bzr:'):repo = 'bzr+' + repoegg_project_name = dist.egg_name().split('-', 1)[0]current_rev = self.get_revision(location)return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)def check_version(self, dest, rev_options):"""Always assume the versions don't match"""return Falsevcs.register(Bazaar)
"""Handles all VCS (version control) support"""from __future__ import absolute_importimport errnoimport loggingimport osimport shutilimport sysfrom pip._vendor.six.moves.urllib import parse as urllib_parsefrom pip.exceptions import BadCommandfrom pip.utils import (display_path, backup_dir, call_subprocess,rmtree, ask_path_exists)__all__ = ['vcs', 'get_src_requirement']logger = logging.getLogger(__name__)class VcsSupport(object):_registry = {}schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']def __init__(self):# Register more schemes with urlparse for various version control# systemsurllib_parse.uses_netloc.extend(self.schemes)# Python >= 2.7.4, 3.3 doesn't have uses_fragmentif getattr(urllib_parse, 'uses_fragment', None):urllib_parse.uses_fragment.extend(self.schemes)super(VcsSupport, self).__init__()def __iter__(self):return self._registry.__iter__()@propertydef backends(self):return list(self._registry.values())@propertydef dirnames(self):return [backend.dirname for backend in self.backends]@propertydef all_schemes(self):schemes = []for backend in self.backends:schemes.extend(backend.schemes)return schemesdef register(self, cls):if not hasattr(cls, 'name'):logger.warning('Cannot register VCS %s', cls.__name__)returnif cls.name not in self._registry:self._registry[cls.name] = clslogger.debug('Registered VCS backend: %s', cls.name)def unregister(self, cls=None, name=None):if name in self._registry:del self._registry[name]elif cls in self._registry.values():del self._registry[cls.name]else:logger.warning('Cannot unregister because no class or name given')def get_backend_name(self, location):"""Return the name of the version control backend if found at givenlocation, e.g. vcs.get_backend_name('/path/to/vcs/checkout')"""for vc_type in self._registry.values():if vc_type.controls_location(location):logger.debug('Determine that %s uses VCS: %s',location, vc_type.name)return vc_type.namereturn Nonedef get_backend(self, name):name = name.lower()if name in self._registry:return self._registry[name]def get_backend_from_location(self, location):vc_type = self.get_backend_name(location)if vc_type:return self.get_backend(vc_type)return Nonevcs = VcsSupport()class VersionControl(object):name = ''dirname = ''# List of supported schemes for this Version Controlschemes = ()def __init__(self, url=None, *args, **kwargs):self.url = urlsuper(VersionControl, self).__init__(*args, **kwargs)def _is_local_repository(self, repo):"""posix absolute paths start with os.path.sep,win32 ones start with drive (like c:\\folder)"""drive, tail = os.path.splitdrive(repo)return repo.startswith(os.path.sep) or drive# See issue #1083 for why this method was introduced:# https://github.com/pypa/pip/issues/1083def translate_egg_surname(self, surname):# For example, Django has branches of the form "stable/1.7.x".return surname.replace('/', '_')def export(self, location):"""Export the repository at the url to the destination locationi.e. only download the files, without vcs informations"""raise NotImplementedErrordef get_url_rev(self):"""Returns the correct repository URL and revision by parsing the givenrepository URL"""error_message = ("Sorry, '%s' is a malformed VCS url. ""The format is <vcs>+<protocol>://<url>, ""e.g. svn+http://myrepo/svn/MyApp#egg=MyApp")assert '+' in self.url, error_message % self.urlurl = self.url.split('+', 1)[1]scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)rev = Noneif '@' in path:path, rev = path.rsplit('@', 1)url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))return url, revdef get_info(self, location):"""Returns (url, revision), where both are strings"""assert not location.rstrip('/').endswith(self.dirname), \'Bad directory: %s' % locationreturn self.get_url(location), self.get_revision(location)def normalize_url(self, url):"""Normalize a URL for comparison by unquoting it and removing anytrailing slash."""return urllib_parse.unquote(url).rstrip('/')def compare_urls(self, url1, url2):"""Compare two repo URLs for identity, ignoring incidental differences."""return (self.normalize_url(url1) == self.normalize_url(url2))def obtain(self, dest):"""Called when installing or updating an editable package, takes thesource path of the checkout."""raise NotImplementedErrordef switch(self, dest, url, rev_options):"""Switch the repo at ``dest`` to point to ``URL``."""raise NotImplementedErrordef update(self, dest, rev_options):"""Update an already-existing repo to the given ``rev_options``."""raise NotImplementedErrordef check_version(self, dest, rev_options):"""Return True if the version is identical to what exists anddoesn't need to be updated."""raise NotImplementedErrordef check_destination(self, dest, url, rev_options, rev_display):"""Prepare a location to receive a checkout/clone.Return True if the location is ready for (and requires) acheckout/clone, False otherwise."""checkout = Trueprompt = Falseif os.path.exists(dest):checkout = Falseif os.path.exists(os.path.join(dest, self.dirname)):existing_url = self.get_url(dest)if self.compare_urls(existing_url, url):logger.debug('%s in %s exists, and has correct URL (%s)',self.repo_name.title(),display_path(dest),url,)if not self.check_version(dest, rev_options):logger.info('Updating %s %s%s',display_path(dest),self.repo_name,rev_display,)self.update(dest, rev_options)else:logger.info('Skipping because already up-to-date.')else:logger.warning('%s %s in %s exists with URL %s',self.name,self.repo_name,display_path(dest),existing_url,)prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',('s', 'i', 'w', 'b'))else:logger.warning('Directory %s already exists, and is not a %s %s.',dest,self.name,self.repo_name,)prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))if prompt:logger.warning('The plan is to install the %s repository %s',self.name,url,)response = ask_path_exists('What to do? %s' % prompt[0],prompt[1])if response == 's':logger.info('Switching %s %s to %s%s',self.repo_name,display_path(dest),url,rev_display,)self.switch(dest, url, rev_options)elif response == 'i':# do nothingpasselif response == 'w':logger.warning('Deleting %s', display_path(dest))rmtree(dest)checkout = Trueelif response == 'b':dest_dir = backup_dir(dest)logger.warning('Backing up %s to %s', display_path(dest), dest_dir,)shutil.move(dest, dest_dir)checkout = Trueelif response == 'a':sys.exit(-1)return checkoutdef unpack(self, location):"""Clean up current location and download the url repository(and vcs infos) into location"""if os.path.exists(location):rmtree(location)self.obtain(location)def get_src_requirement(self, dist, location):"""Return a string representing the requirement needed toredownload the files currently present in location, somethinglike:{repository_url}@{revision}#egg={project_name}-{version_identifier}"""raise NotImplementedErrordef get_url(self, location):"""Return the url used at locationUsed in get_info or check_destination"""raise NotImplementedErrordef get_revision(self, location):"""Return the current revision of the files at locationUsed in get_info"""raise NotImplementedErrordef run_command(self, cmd, show_stdout=True, cwd=None,on_returncode='raise',command_desc=None,extra_environ=None, spinner=None):"""Run a VCS subcommandThis is simply a wrapper around call_subprocess that adds the VCScommand name, and checks that the VCS is available"""cmd = [self.name] + cmdtry:return call_subprocess(cmd, show_stdout, cwd,on_returncode,command_desc, extra_environ,spinner)except OSError as e:# errno.ENOENT = no such file or directory# In other words, the VCS executable isn't availableif e.errno == errno.ENOENT:raise BadCommand('Cannot find command %r' % self.name)else:raise # re-raise exception if a different error occurred@classmethoddef controls_location(cls, location):"""Check if a location is controlled by the vcs.It is meant to be overridden to implement smarter detectionmechanisms for specific vcs."""logger.debug('Checking in %s for %s (%s)...',location, cls.dirname, cls.name)path = os.path.join(location, cls.dirname)return os.path.exists(path)def get_src_requirement(dist, location):version_control = vcs.get_backend_from_location(location)if version_control:try:return version_control().get_src_requirement(dist,location)except BadCommand:logger.warning('cannot determine version of editable source in %s ''(%s command not found in path)',location,version_control.name,)return dist.as_requirement()logger.warning('cannot determine version of editable source in %s (is not SVN ''checkout, Git clone, Mercurial clone or Bazaar branch)',location,)return dist.as_requirement()
from __future__ import absolute_importfrom __future__ import divisionimport itertoolsimport sysfrom signal import signal, SIGINT, default_int_handlerimport timeimport contextlibimport loggingfrom pip.compat import WINDOWSfrom pip.utils import format_sizefrom pip.utils.logging import get_indentationfrom pip._vendor import sixfrom pip._vendor.progress.bar import Bar, IncrementalBarfrom pip._vendor.progress.helpers import (WritelnMixin,HIDE_CURSOR, SHOW_CURSOR)from pip._vendor.progress.spinner import Spinnertry:from pip._vendor import colorama# Lots of different errors can come from this, including SystemError and# ImportError.except Exception:colorama = Nonelogger = logging.getLogger(__name__)def _select_progress_class(preferred, fallback):encoding = getattr(preferred.file, "encoding", None)# If we don't know what encoding this file is in, then we'll just assume# that it doesn't support unicode and use the ASCII bar.if not encoding:return fallback# Collect all of the possible characters we want to use with the preferred# bar.characters = [getattr(preferred, "empty_fill", six.text_type()),getattr(preferred, "fill", six.text_type()),]characters += list(getattr(preferred, "phases", []))# Try to decode the characters we're using for the bar using the encoding# of the given file, if this works then we'll assume that we can use the# fancier bar and if not we'll fall back to the plaintext bar.try:six.text_type().join(characters).encode(encoding)except UnicodeEncodeError:return fallbackelse:return preferred_BaseBar = _select_progress_class(IncrementalBar, Bar)class InterruptibleMixin(object):"""Helper to ensure that self.finish() gets called on keyboard interrupt.This allows downloads to be interrupted without leaving temporary state(like hidden cursors) behind.This class is similar to the progress library's existing SigIntMixinhelper, but as of version 1.2, that helper has the following problems:1. It calls sys.exit().2. It discards the existing SIGINT handler completely.3. It leaves its own handler in place even after an uninterrupted finish,which will have unexpected delayed effects if the user triggers anunrelated keyboard interrupt some time after a progress-displayingdownload has already completed, for example."""def __init__(self, *args, **kwargs):"""Save the original SIGINT handler for later."""super(InterruptibleMixin, self).__init__(*args, **kwargs)self.original_handler = signal(SIGINT, self.handle_sigint)# If signal() returns None, the previous handler was not installed from# Python, and we cannot restore it. This probably should not happen,# but if it does, we must restore something sensible instead, at least.# The least bad option should be Python's default SIGINT handler, which# just raises KeyboardInterrupt.if self.original_handler is None:self.original_handler = default_int_handlerdef finish(self):"""Restore the original SIGINT handler after finishing.This should happen regardless of whether the progress display finishesnormally, or gets interrupted."""super(InterruptibleMixin, self).finish()signal(SIGINT, self.original_handler)def handle_sigint(self, signum, frame):"""Call self.finish() before delegating to the original SIGINT handler.This handler should only be in place while the progress display isactive."""self.finish()self.original_handler(signum, frame)class DownloadProgressMixin(object):def __init__(self, *args, **kwargs):super(DownloadProgressMixin, self).__init__(*args, **kwargs)self.message = (" " * (get_indentation() + 2)) + self.message@propertydef downloaded(self):return format_size(self.index)@propertydef download_speed(self):# Avoid zero division errors...if self.avg == 0.0:return "..."return format_size(1 / self.avg) + "/s"@propertydef pretty_eta(self):if self.eta:return "eta %s" % self.eta_tdreturn ""def iter(self, it, n=1):for x in it:yield xself.next(n)self.finish()class WindowsMixin(object):def __init__(self, *args, **kwargs):# The Windows terminal does not support the hide/show cursor ANSI codes# even with colorama. So we'll ensure that hide_cursor is False on# Windows.# This call neds to go before the super() call, so that hide_cursor# is set in time. The base progress bar class writes the "hide cursor"# code to the terminal in its init, so if we don't set this soon# enough, we get a "hide" with no corresponding "show"...if WINDOWS and self.hide_cursor:self.hide_cursor = Falsesuper(WindowsMixin, self).__init__(*args, **kwargs)# Check if we are running on Windows and we have the colorama module,# if we do then wrap our file with it.if WINDOWS and colorama:self.file = colorama.AnsiToWin32(self.file)# The progress code expects to be able to call self.file.isatty()# but the colorama.AnsiToWin32() object doesn't have that, so we'll# add it.self.file.isatty = lambda: self.file.wrapped.isatty()# The progress code expects to be able to call self.file.flush()# but the colorama.AnsiToWin32() object doesn't have that, so we'll# add it.self.file.flush = lambda: self.file.wrapped.flush()class DownloadProgressBar(WindowsMixin, InterruptibleMixin,DownloadProgressMixin, _BaseBar):file = sys.stdoutmessage = "%(percent)d%%"suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,DownloadProgressMixin, WritelnMixin, Spinner):file = sys.stdoutsuffix = "%(downloaded)s %(download_speed)s"def next_phase(self):if not hasattr(self, "_phaser"):self._phaser = itertools.cycle(self.phases)return next(self._phaser)def update(self):message = self.message % selfphase = self.next_phase()suffix = self.suffix % selfline = ''.join([message," " if message else "",phase," " if suffix else "",suffix,])self.writeln(line)################################################################# Generic "something is happening" spinners## We don't even try using progress.spinner.Spinner here because it's actually# simpler to reimplement from scratch than to coerce their code into doing# what we need.################################################################@contextlib.contextmanagerdef hidden_cursor(file):# The Windows terminal does not support the hide/show cursor ANSI codes,# even via colorama. So don't even try.if WINDOWS:yield# We don't want to clutter the output with control characters if we're# writing to a file, or if the user is running with --quiet.# See https://github.com/pypa/pip/issues/3418elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:yieldelse:file.write(HIDE_CURSOR)try:yieldfinally:file.write(SHOW_CURSOR)class RateLimiter(object):def __init__(self, min_update_interval_seconds):self._min_update_interval_seconds = min_update_interval_secondsself._last_update = 0def ready(self):now = time.time()delta = now - self._last_updatereturn delta >= self._min_update_interval_secondsdef reset(self):self._last_update = time.time()class InteractiveSpinner(object):def __init__(self, message, file=None, spin_chars="-\\|/",# Empirically, 8 updates/second looks nicemin_update_interval_seconds=0.125):self._message = messageif file is None:file = sys.stdoutself._file = fileself._rate_limiter = RateLimiter(min_update_interval_seconds)self._finished = Falseself._spin_cycle = itertools.cycle(spin_chars)self._file.write(" " * get_indentation() + self._message + " ... ")self._width = 0def _write(self, status):assert not self._finished# Erase what we wrote before by backspacing to the beginning, writing# spaces to overwrite the old text, and then backspacing againbackup = "\b" * self._widthself._file.write(backup + " " * self._width + backup)# Now we have a blank slate to add our statusself._file.write(status)self._width = len(status)self._file.flush()self._rate_limiter.reset()def spin(self):if self._finished:returnif not self._rate_limiter.ready():returnself._write(next(self._spin_cycle))def finish(self, final_status):if self._finished:returnself._write(final_status)self._file.write("\n")self._file.flush()self._finished = True# Used for dumb terminals, non-interactive installs (no tty), etc.# We still print updates occasionally (once every 60 seconds by default) to# act as a keep-alive for systems like Travis-CI that take lack-of-output as# an indication that a task has frozen.class NonInteractiveSpinner(object):def __init__(self, message, min_update_interval_seconds=60):self._message = messageself._finished = Falseself._rate_limiter = RateLimiter(min_update_interval_seconds)self._update("started")def _update(self, status):assert not self._finishedself._rate_limiter.reset()logger.info("%s: %s", self._message, status)def spin(self):if self._finished:returnif not self._rate_limiter.ready():returnself._update("still running...")def finish(self, final_status):if self._finished:returnself._update("finished with status '%s'" % (final_status,))self._finished = True@contextlib.contextmanagerdef open_spinner(message):# Interactive spinner goes directly to sys.stdout rather than being routed# through the logging system, but it acts like it has level INFO,# i.e. it's only displayed if we're at level INFO or better.# Non-interactive spinner goes through the logging system, so it is always# in sync with logging configuration.if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:spinner = InteractiveSpinner(message)else:spinner = NonInteractiveSpinner(message)try:with hidden_cursor(sys.stdout):yield spinnerexcept KeyboardInterrupt:spinner.finish("canceled")raiseexcept Exception:spinner.finish("error")raiseelse:spinner.finish("done")
# Shim to wrap setup.py invocation with setuptoolsSETUPTOOLS_SHIM = ("import setuptools, tokenize;__file__=%r;""f=getattr(tokenize, 'open', open)(__file__);""code=f.read().replace('\\r\\n', '\\n');""f.close();""exec(compile(code, __file__, 'exec'))")
from __future__ import absolute_importfrom email.parser import FeedParserimport loggingimport sysfrom pip._vendor.packaging import specifiersfrom pip._vendor.packaging import versionfrom pip._vendor import pkg_resourcesfrom pip import exceptionslogger = logging.getLogger(__name__)def check_requires_python(requires_python):"""Check if the python version in use match the `requires_python` specifier.Returns `True` if the version of python in use matches the requirement.Returns `False` if the version of python in use does not matches therequirement.Raises an InvalidSpecifier if `requires_python` have an invalid format."""if requires_python is None:# The package provides no informationreturn Truerequires_python_specifier = specifiers.SpecifierSet(requires_python)# We only use major.minor.micropython_version = version.parse('.'.join(map(str, sys.version_info[:3])))return python_version in requires_python_specifierdef get_metadata(dist):if (isinstance(dist, pkg_resources.DistInfoDistribution) anddist.has_metadata('METADATA')):return dist.get_metadata('METADATA')elif dist.has_metadata('PKG-INFO'):return dist.get_metadata('PKG-INFO')def check_dist_requires_python(dist):metadata = get_metadata(dist)feed_parser = FeedParser()feed_parser.feed(metadata)pkg_info_dict = feed_parser.close()requires_python = pkg_info_dict.get('Requires-Python')try:if not check_requires_python(requires_python):raise exceptions.UnsupportedPythonVersion("%s requires Python '%s' but the running Python is %s" % (dist.project_name,requires_python,'.'.join(map(str, sys.version_info[:3])),))except specifiers.InvalidSpecifier as e:logger.warning("Package %s has an invalid Requires-Python entry %s - %s" % (dist.project_name, requires_python, e))return
from __future__ import absolute_importimport datetimeimport jsonimport loggingimport os.pathimport sysfrom pip._vendor import lockfilefrom pip._vendor.packaging import version as packaging_versionfrom pip.compat import total_seconds, WINDOWSfrom pip.models import PyPIfrom pip.locations import USER_CACHE_DIR, running_under_virtualenvfrom pip.utils import ensure_dir, get_installed_versionfrom pip.utils.filesystem import check_path_ownerSELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"logger = logging.getLogger(__name__)class VirtualenvSelfCheckState(object):def __init__(self):self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json")# Load the existing statetry:with open(self.statefile_path) as statefile:self.state = json.load(statefile)except (IOError, ValueError):self.state = {}def save(self, pypi_version, current_time):# Attempt to write out our version check filewith open(self.statefile_path, "w") as statefile:json.dump({"last_check": current_time.strftime(SELFCHECK_DATE_FMT),"pypi_version": pypi_version,},statefile,sort_keys=True,separators=(",", ":"))class GlobalSelfCheckState(object):def __init__(self):self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json")# Load the existing statetry:with open(self.statefile_path) as statefile:self.state = json.load(statefile)[sys.prefix]except (IOError, ValueError, KeyError):self.state = {}def save(self, pypi_version, current_time):# Check to make sure that we own the directoryif not check_path_owner(os.path.dirname(self.statefile_path)):return# Now that we've ensured the directory is owned by this user, we'll go# ahead and make sure that all our directories are created.ensure_dir(os.path.dirname(self.statefile_path))# Attempt to write out our version check filewith lockfile.LockFile(self.statefile_path):if os.path.exists(self.statefile_path):with open(self.statefile_path) as statefile:state = json.load(statefile)else:state = {}state[sys.prefix] = {"last_check": current_time.strftime(SELFCHECK_DATE_FMT),"pypi_version": pypi_version,}with open(self.statefile_path, "w") as statefile:json.dump(state, statefile, sort_keys=True,separators=(",", ":"))def load_selfcheck_statefile():if running_under_virtualenv():return VirtualenvSelfCheckState()else:return GlobalSelfCheckState()def pip_version_check(session):"""Check for an update for pip.Limit the frequency of checks to once per week. State is stored either inthe active virtualenv or in the user's USER_CACHE_DIR keyed off the prefixof the pip script path."""installed_version = get_installed_version("pip")if installed_version is None:returnpip_version = packaging_version.parse(installed_version)pypi_version = Nonetry:state = load_selfcheck_statefile()current_time = datetime.datetime.utcnow()# Determine if we need to refresh the stateif "last_check" in state.state and "pypi_version" in state.state:last_check = datetime.datetime.strptime(state.state["last_check"],SELFCHECK_DATE_FMT)if total_seconds(current_time - last_check) < 7 * 24 * 60 * 60:pypi_version = state.state["pypi_version"]# Refresh the version if we need to or just see if we need to warnif pypi_version is None:resp = session.get(PyPI.pip_json_url,headers={"Accept": "application/json"},)resp.raise_for_status()pypi_version = [v for v in sorted(list(resp.json()["releases"]),key=packaging_version.parse,)if not packaging_version.parse(v).is_prerelease][-1]# save that we've performed a checkstate.save(pypi_version, current_time)remote_version = packaging_version.parse(pypi_version)# Determine if our pypi_version is olderif (pip_version < remote_version andpip_version.base_version != remote_version.base_version):# Advise "python -m pip" on Windows to avoid issues# with overwriting pip.exe.if WINDOWS:pip_cmd = "python -m pip"else:pip_cmd = "pip"logger.warning("You are using pip version %s, however version %s is ""available.\nYou should consider upgrading via the ""'%s install --upgrade pip' command.",pip_version, pypi_version, pip_cmd)except Exception:logger.debug("There was an error checking the latest version of pip",exc_info=True,)
from __future__ import absolute_importimport contextlibimport loggingimport logging.handlersimport ostry:import threadingexcept ImportError:import dummy_threading as threadingfrom pip.compat import WINDOWSfrom pip.utils import ensure_dirtry:from pip._vendor import colorama# Lots of different errors can come from this, including SystemError and# ImportError.except Exception:colorama = None_log_state = threading.local()_log_state.indentation = 0@contextlib.contextmanagerdef indent_log(num=2):"""A context manager which will cause the log output to be indented for anylog messages emitted inside it."""_log_state.indentation += numtry:yieldfinally:_log_state.indentation -= numdef get_indentation():return getattr(_log_state, 'indentation', 0)class IndentingFormatter(logging.Formatter):def format(self, record):"""Calls the standard formatter, but will indent all of the log messagesby our current indentation level."""formatted = logging.Formatter.format(self, record)formatted = "".join([(" " * get_indentation()) + linefor line in formatted.splitlines(True)])return formatteddef _color_wrap(*colors):def wrapped(inp):return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])return wrappedclass ColorizedStreamHandler(logging.StreamHandler):# Don't build up a list of colors if we don't have coloramaif colorama:COLORS = [# This needs to be in order from highest logging level to lowest.(logging.ERROR, _color_wrap(colorama.Fore.RED)),(logging.WARNING, _color_wrap(colorama.Fore.YELLOW)),]else:COLORS = []def __init__(self, stream=None):logging.StreamHandler.__init__(self, stream)if WINDOWS and colorama:self.stream = colorama.AnsiToWin32(self.stream)def should_color(self):# Don't colorize things if we do not have coloramaif not colorama:return Falsereal_stream = (self.stream if not isinstance(self.stream, colorama.AnsiToWin32)else self.stream.wrapped)# If the stream is a tty we should color itif hasattr(real_stream, "isatty") and real_stream.isatty():return True# If we have an ASNI term we should color itif os.environ.get("TERM") == "ANSI":return True# If anything else we should not color itreturn Falsedef format(self, record):msg = logging.StreamHandler.format(self, record)if self.should_color():for level, color in self.COLORS:if record.levelno >= level:msg = color(msg)breakreturn msgclass BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):def _open(self):ensure_dir(os.path.dirname(self.baseFilename))return logging.handlers.RotatingFileHandler._open(self)class MaxLevelFilter(logging.Filter):def __init__(self, level):self.level = leveldef filter(self, record):return record.levelno < self.level
from __future__ import absolute_importimport hashlibfrom pip.exceptions import HashMismatch, HashMissing, InstallationErrorfrom pip.utils import read_chunksfrom pip._vendor.six import iteritems, iterkeys, itervalues# The recommended hash algo of the moment. Change this whenever the state of# the art changes; it won't hurt backward compatibility.FAVORITE_HASH = 'sha256'# Names of hashlib algorithms allowed by the --hash option and ``pip hash``# Currently, those are the ones at least as collision-resistant as sha256.STRONG_HASHES = ['sha256', 'sha384', 'sha512']class Hashes(object):"""A wrapper that builds multiple hashes at once and checks them againstknown-good values"""def __init__(self, hashes=None):""":param hashes: A dict of algorithm names pointing to lists of allowedhex digests"""self._allowed = {} if hashes is None else hashesdef check_against_chunks(self, chunks):"""Check good hashes against ones built from iterable of chunks ofdata.Raise HashMismatch if none match."""gots = {}for hash_name in iterkeys(self._allowed):try:gots[hash_name] = hashlib.new(hash_name)except (ValueError, TypeError):raise InstallationError('Unknown hash name: %s' % hash_name)for chunk in chunks:for hash in itervalues(gots):hash.update(chunk)for hash_name, got in iteritems(gots):if got.hexdigest() in self._allowed[hash_name]:returnself._raise(gots)def _raise(self, gots):raise HashMismatch(self._allowed, gots)def check_against_file(self, file):"""Check good hashes against a file-like objectRaise HashMismatch if none match."""return self.check_against_chunks(read_chunks(file))def check_against_path(self, path):with open(path, 'rb') as file:return self.check_against_file(file)def __nonzero__(self):"""Return whether I know any known-good hashes."""return bool(self._allowed)def __bool__(self):return self.__nonzero__()class MissingHashes(Hashes):"""A workalike for Hashes used when we're missing a hash for a requirementIt computes the actual hash of the requirement and raises a HashMissingexception showing it to the user."""def __init__(self):"""Don't offer the ``hashes`` kwarg."""# Pass our favorite hash in to generate a "gotten hash". With the# empty list, it will never match, so an error will always raise.super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})def _raise(self, gots):raise HashMissing(gots[FAVORITE_HASH].hexdigest())
from __future__ import absolute_importimport reimport ctypesimport platformimport warningsdef glibc_version_string():"Returns glibc version string, or None if not using glibc."# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen# manpage says, "If filename is NULL, then the returned handle is for the# main program". This way we can let the linker do the work to figure out# which libc our process is actually using.process_namespace = ctypes.CDLL(None)try:gnu_get_libc_version = process_namespace.gnu_get_libc_versionexcept AttributeError:# Symbol doesn't exist -> therefore, we are not linked to# glibc.return None# Call gnu_get_libc_version, which returns a string like "2.5"gnu_get_libc_version.restype = ctypes.c_char_pversion_str = gnu_get_libc_version()# py2 / py3 compatibility:if not isinstance(version_str, str):version_str = version_str.decode("ascii")return version_str# Separated out from have_compatible_glibc for easier unit testingdef check_glibc_version(version_str, required_major, minimum_minor):# Parse string and check against requested version.## We use a regexp instead of str.split because we want to discard any# random junk that might come after the minor version -- this might happen# in patched/forked versions of glibc (e.g. Linaro's version of glibc# uses version strings like "2.20-2014.11"). See gh-3588.m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)if not m:warnings.warn("Expected glibc version with 2 components major.minor,"" got: %s" % version_str, RuntimeWarning)return Falsereturn (int(m.group("major")) == required_major andint(m.group("minor")) >= minimum_minor)def have_compatible_glibc(required_major, minimum_minor):version_str = glibc_version_string()if version_str is None:return Falsereturn check_glibc_version(version_str, required_major, minimum_minor)# platform.libc_ver regularly returns completely nonsensical glibc# versions. E.g. on my computer, platform says:## ~$ python2.7 -c 'import platform; print(platform.libc_ver())'# ('glibc', '2.7')# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'# ('glibc', '2.9')## But the truth is:## ~$ ldd --version# ldd (Debian GLIBC 2.22-11) 2.22## This is unfortunate, because it means that the linehaul data on libc# versions that was generated by pip 8.1.2 and earlier is useless and# misleading. Solution: instead of using platform, use our code that actually# works.def libc_ver():glibc_version = glibc_version_string()if glibc_version is None:# For non-glibc platforms, fall back on platform.libc_verreturn platform.libc_ver()else:return ("glibc", glibc_version)
import osimport os.pathfrom pip.compat import get_path_uiddef check_path_owner(path):# If we don't have a way to check the effective uid of this process, then# we'll just assume that we own the directory.if not hasattr(os, "geteuid"):return Trueprevious = Nonewhile path != previous:if os.path.lexists(path):# Check if path is writable by current user.if os.geteuid() == 0:# Special handling for root user in order to handle properly# cases where users use sudo without -H flag.try:path_uid = get_path_uid(path)except OSError:return Falsereturn path_uid == 0else:return os.access(path, os.W_OK)else:previous, path = path, os.path.dirname(path)
import codecsimport localeimport reBOMS = [(codecs.BOM_UTF8, 'utf8'),(codecs.BOM_UTF16, 'utf16'),(codecs.BOM_UTF16_BE, 'utf16-be'),(codecs.BOM_UTF16_LE, 'utf16-le'),(codecs.BOM_UTF32, 'utf32'),(codecs.BOM_UTF32_BE, 'utf32-be'),(codecs.BOM_UTF32_LE, 'utf32-le'),]ENCODING_RE = re.compile(b'coding[:=]\s*([-\w.]+)')def auto_decode(data):"""Check a bytes string for a BOM to correctly detect the encodingFallback to locale.getpreferredencoding(False) like open() on Python3"""for bom, encoding in BOMS:if data.startswith(bom):return data[len(bom):].decode(encoding)# Lets check the first two lines as in PEP263for line in data.split(b'\n')[:2]:if line[0:1] == b'#' and ENCODING_RE.search(line):encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')return data.decode(encoding)return data.decode(locale.getpreferredencoding(False))
"""A module that implements tooling to enable easy warnings about deprecations."""from __future__ import absolute_importimport loggingimport warningsclass PipDeprecationWarning(Warning):passclass Pending(object):passclass RemovedInPip10Warning(PipDeprecationWarning):passclass RemovedInPip11Warning(PipDeprecationWarning, Pending):passclass Python26DeprecationWarning(PipDeprecationWarning):pass# Warnings <-> Logging Integration_warnings_showwarning = Nonedef _showwarning(message, category, filename, lineno, file=None, line=None):if file is not None:if _warnings_showwarning is not None:_warnings_showwarning(message, category, filename, lineno, file, line,)else:if issubclass(category, PipDeprecationWarning):# We use a specially named logger which will handle all of the# deprecation messages for pip.logger = logging.getLogger("pip.deprecations")# This is purposely using the % formatter here instead of letting# the logging module handle the interpolation. This is because we# want it to appear as if someone typed this entire message out.log_message = "DEPRECATION: %s" % message# PipDeprecationWarnings that are Pending still have at least 2# versions to go until they are removed so they can just be# warnings. Otherwise, they will be removed in the very next# version of pip. We want these to be more obvious so we use the# ERROR logging level.if issubclass(category, Pending):logger.warning(log_message)else:logger.error(log_message)else:_warnings_showwarning(message, category, filename, lineno, file, line,)def install_warning_logger():# Enable our Deprecation Warningswarnings.simplefilter("default", PipDeprecationWarning, append=True)global _warnings_showwarningif _warnings_showwarning is None:_warnings_showwarning = warnings.showwarningwarnings.showwarning = _showwarning
from __future__ import absolute_importimport os.pathimport tempfilefrom pip.utils import rmtreeclass BuildDirectory(object):def __init__(self, name=None, delete=None):# If we were not given an explicit directory, and we were not given an# explicit delete option, then we'll default to deleting.if name is None and delete is None:delete = Trueif name is None:# We realpath here because some systems have their default tmpdir# symlinked to another directory. This tends to confuse build# scripts, so we canonicalize the path by traversing potential# symlinks here.name = os.path.realpath(tempfile.mkdtemp(prefix="pip-build-"))# If we were not given an explicit directory, and we were not given# an explicit delete option, then we'll default to deleting.if delete is None:delete = Trueself.name = nameself.delete = deletedef __repr__(self):return "<{} {!r}>".format(self.__class__.__name__, self.name)def __enter__(self):return self.namedef __exit__(self, exc, value, tb):self.cleanup()def cleanup(self):if self.delete:rmtree(self.name)
"""This code was taken from https://github.com/ActiveState/appdirs and modifiedto suit our purposes."""from __future__ import absolute_importimport osimport sysfrom pip.compat import WINDOWS, expanduserfrom pip._vendor.six import PY2, text_typedef user_cache_dir(appname):r"""Return full path to the user-specific cache dir for this application."appname" is the name of application.Typical user cache directories are:macOS: ~/Library/Caches/<AppName>Unix: ~/.cache/<AppName> (XDG default)Windows: C:\Users\<username>\AppData\Local\<AppName>\CacheOn Windows the only suggestion in the MSDN docs is that local settings goin the `CSIDL_LOCAL_APPDATA` directory. This is identical to thenon-roaming app data dir (the default returned by `user_data_dir`). Appstypically put cache data somewhere *under* the given dir here. Someexamples:...\Mozilla\Firefox\Profiles\<ProfileName>\Cache...\Acme\SuperApp\Cache\1.0OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value."""if WINDOWS:# Get the base pathpath = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))# When using Python 2, return paths as bytes on Windows like we do on# other operating systems. See helper function docs for more details.if PY2 and isinstance(path, text_type):path = _win_path_to_bytes(path)# Add our app name and Cache directory to itpath = os.path.join(path, appname, "Cache")elif sys.platform == "darwin":# Get the base pathpath = expanduser("~/Library/Caches")# Add our app name to itpath = os.path.join(path, appname)else:# Get the base pathpath = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))# Add our app name to itpath = os.path.join(path, appname)return pathdef user_data_dir(appname, roaming=False):"""Return full path to the user-specific data dir for this application."appname" is the name of application.If None, just the system directory is returned."roaming" (boolean, default False) can be set True to use the Windowsroaming appdata directory. That means that for users on a Windowsnetwork setup for roaming profiles, this user data will besync'd on login. See<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>for a discussion of issues.Typical user data directories are:macOS: ~/Library/Application Support/<AppName>Unix: ~/.local/share/<AppName> # or in$XDG_DATA_HOME, if definedWin XP (not roaming): C:\Documents and Settings\<username>\ ......Application Data\<AppName>Win XP (roaming): C:\Documents and Settings\<username>\Local ......Settings\Application Data\<AppName>Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName>Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName>For Unix, we follow the XDG spec and support $XDG_DATA_HOME.That means, by default "~/.local/share/<AppName>"."""if WINDOWS:const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)elif sys.platform == "darwin":path = os.path.join(expanduser('~/Library/Application Support/'),appname,)else:path = os.path.join(os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),appname,)return pathdef user_config_dir(appname, roaming=True):"""Return full path to the user-specific config dir for this application."appname" is the name of application.If None, just the system directory is returned."roaming" (boolean, default True) can be set False to not use theWindows roaming appdata directory. That means that for users on aWindows network setup for roaming profiles, this user data will besync'd on login. See<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>for a discussion of issues.Typical user data directories are:macOS: same as user_data_dirUnix: ~/.config/<AppName>Win *: same as user_data_dirFor Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.That means, by default "~/.config/<AppName>"."""if WINDOWS:path = user_data_dir(appname, roaming=roaming)elif sys.platform == "darwin":path = user_data_dir(appname)else:path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))path = os.path.join(path, appname)return path# for the discussion regarding site_config_dirs locations# see <https://github.com/pypa/pip/issues/1733>def site_config_dirs(appname):"""Return a list of potential user-shared config dirs for this application."appname" is the name of application.Typical user config directories are:macOS: /Library/Application Support/<AppName>/Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in$XDG_CONFIG_DIRSWin XP: C:\Documents and Settings\All Users\Application ......Data\<AppName>\Vista: (Fail! "C:\ProgramData" is a hidden *system* directoryon Vista.)Win 7: Hidden, but writeable on Win 7:C:\ProgramData\<AppName>\"""if WINDOWS:path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))pathlist = [os.path.join(path, appname)]elif sys.platform == 'darwin':pathlist = [os.path.join('/Library/Application Support', appname)]else:# try looking in $XDG_CONFIG_DIRSxdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')if xdg_config_dirs:pathlist = [os.path.join(expanduser(x), appname)for x in xdg_config_dirs.split(os.pathsep)]else:pathlist = []# always look in /etc directly as wellpathlist.append('/etc')return pathlist# -- Windows support functions --def _get_win_folder_from_registry(csidl_name):"""This is a fallback technique at best. I'm not sure if using theregistry for this guarantees us the correct answer for all CSIDL_*names."""import _winregshell_folder_name = {"CSIDL_APPDATA": "AppData","CSIDL_COMMON_APPDATA": "Common AppData","CSIDL_LOCAL_APPDATA": "Local AppData",}[csidl_name]key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")directory, _type = _winreg.QueryValueEx(key, shell_folder_name)return directorydef _get_win_folder_with_ctypes(csidl_name):csidl_const = {"CSIDL_APPDATA": 26,"CSIDL_COMMON_APPDATA": 35,"CSIDL_LOCAL_APPDATA": 28,}[csidl_name]buf = ctypes.create_unicode_buffer(1024)ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)# Downgrade to short path name if have highbit chars. See# <http://bugs.activestate.com/show_bug.cgi?id=85099>.has_high_char = Falsefor c in buf:if ord(c) > 255:has_high_char = Truebreakif has_high_char:buf2 = ctypes.create_unicode_buffer(1024)if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):buf = buf2return buf.valueif WINDOWS:try:import ctypes_get_win_folder = _get_win_folder_with_ctypesexcept ImportError:_get_win_folder = _get_win_folder_from_registrydef _win_path_to_bytes(path):"""Encode Windows paths to bytes. Only used on Python 2.Motivation is to be consistent with other operating systems where pathsare also returned as bytes. This avoids problems mixing bytes and Unicodeelsewhere in the codebase. For more details and discussion see<https://github.com/pypa/pip/issues/3463>.If encoding using ASCII and MBCS fails, return the original Unicode path."""for encoding in ('ASCII', 'MBCS'):try:return path.encode(encoding)except (UnicodeEncodeError, LookupError):passreturn path
from __future__ import absolute_importfrom collections import dequeimport contextlibimport errnoimport ioimport locale# we have a submodule named 'logging' which would shadow this if we used the# regular name:import logging as std_loggingimport reimport osimport posixpathimport shutilimport statimport subprocessimport sysimport tarfileimport zipfilefrom pip.exceptions import InstallationErrorfrom pip.compat import console_to_str, expanduser, stdlib_pkgsfrom pip.locations import (site_packages, user_site, running_under_virtualenv, virtualenv_no_global,write_delete_marker_file,)from pip._vendor import pkg_resourcesfrom pip._vendor.six.moves import inputfrom pip._vendor.six import PY2from pip._vendor.retrying import retryif PY2:from io import BytesIO as StringIOelse:from io import StringIO__all__ = ['rmtree', 'display_path', 'backup_dir','ask', 'splitext','format_size', 'is_installable_dir','is_svn_page', 'file_contents','split_leading_dir', 'has_leading_dir','normalize_path','renames', 'get_terminal_size', 'get_prog','unzip_file', 'untar_file', 'unpack_file', 'call_subprocess','captured_stdout', 'ensure_dir','ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS','get_installed_version']logger = std_logging.getLogger(__name__)BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')ZIP_EXTENSIONS = ('.zip', '.whl')TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')ARCHIVE_EXTENSIONS = (ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONStry:import bz2 # noqaSUPPORTED_EXTENSIONS += BZ2_EXTENSIONSexcept ImportError:logger.debug('bz2 module is not available')try:# Only for Python 3.3+import lzma # noqaSUPPORTED_EXTENSIONS += XZ_EXTENSIONSexcept ImportError:logger.debug('lzma module is not available')def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs):try:return __import__(pkg_or_module_string)except ImportError:raise ExceptionType(*args, **kwargs)def ensure_dir(path):"""os.path.makedirs without EEXIST."""try:os.makedirs(path)except OSError as e:if e.errno != errno.EEXIST:raisedef get_prog():try:if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'):return "%s -m pip" % sys.executableexcept (AttributeError, TypeError, IndexError):passreturn 'pip'# Retry every half second for up to 3 seconds@retry(stop_max_delay=3000, wait_fixed=500)def rmtree(dir, ignore_errors=False):shutil.rmtree(dir, ignore_errors=ignore_errors,onerror=rmtree_errorhandler)def rmtree_errorhandler(func, path, exc_info):"""On Windows, the files in .svn are read-only, so when rmtree() tries toremove them, an exception is thrown. We catch that here, remove theread-only attribute, and hopefully continue without problems."""# if file type currently read onlyif os.stat(path).st_mode & stat.S_IREAD:# convert to read/writeos.chmod(path, stat.S_IWRITE)# use the original function to repeat the operationfunc(path)returnelse:raisedef display_path(path):"""Gives the display value for a given path, making it relative to cwdif possible."""path = os.path.normcase(os.path.abspath(path))if sys.version_info[0] == 2:path = path.decode(sys.getfilesystemencoding(), 'replace')path = path.encode(sys.getdefaultencoding(), 'replace')if path.startswith(os.getcwd() + os.path.sep):path = '.' + path[len(os.getcwd()):]return pathdef backup_dir(dir, ext='.bak'):"""Figure out the name of a directory to back up the given dir to(adding .bak, .bak2, etc)"""n = 1extension = extwhile os.path.exists(dir + extension):n += 1extension = ext + str(n)return dir + extensiondef ask_path_exists(message, options):for action in os.environ.get('PIP_EXISTS_ACTION', '').split():if action in options:return actionreturn ask(message, options)def ask(message, options):"""Ask the message interactively, with the given possible responses"""while 1:if os.environ.get('PIP_NO_INPUT'):raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' %message)response = input(message)response = response.strip().lower()if response not in options:print('Your response (%r) was not one of the expected responses: ''%s' % (response, ', '.join(options)))else:return responsedef format_size(bytes):if bytes > 1000 * 1000:return '%.1fMB' % (bytes / 1000.0 / 1000)elif bytes > 10 * 1000:return '%ikB' % (bytes / 1000)elif bytes > 1000:return '%.1fkB' % (bytes / 1000.0)else:return '%ibytes' % bytesdef is_installable_dir(path):"""Return True if `path` is a directory containing a setup.py file."""if not os.path.isdir(path):return Falsesetup_py = os.path.join(path, 'setup.py')if os.path.isfile(setup_py):return Truereturn Falsedef is_svn_page(html):"""Returns true if the page appears to be the index page of an svn repository"""return (re.search(r'<title>[^<]*Revision \d+:', html) andre.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))def file_contents(filename):with open(filename, 'rb') as fp:return fp.read().decode('utf-8')def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):"""Yield pieces of data from a file-like object until EOF."""while True:chunk = file.read(size)if not chunk:breakyield chunkdef split_leading_dir(path):path = path.lstrip('/').lstrip('\\')if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or'\\' not in path):return path.split('/', 1)elif '\\' in path:return path.split('\\', 1)else:return path, ''def has_leading_dir(paths):"""Returns true if all the paths have the same leading path name(i.e., everything is in one subdirectory in an archive)"""common_prefix = Nonefor path in paths:prefix, rest = split_leading_dir(path)if not prefix:return Falseelif common_prefix is None:common_prefix = prefixelif prefix != common_prefix:return Falsereturn Truedef normalize_path(path, resolve_symlinks=True):"""Convert a path to its canonical, case-normalized, absolute version."""path = expanduser(path)if resolve_symlinks:path = os.path.realpath(path)else:path = os.path.abspath(path)return os.path.normcase(path)def splitext(path):"""Like os.path.splitext, but take off .tar too"""base, ext = posixpath.splitext(path)if base.lower().endswith('.tar'):ext = base[-4:] + extbase = base[:-4]return base, extdef renames(old, new):"""Like os.renames(), but handles renaming across devices."""# Implementation borrowed from os.renames().head, tail = os.path.split(new)if head and tail and not os.path.exists(head):os.makedirs(head)shutil.move(old, new)head, tail = os.path.split(old)if head and tail:try:os.removedirs(head)except OSError:passdef is_local(path):"""Return True if path is within sys.prefix, if we're running in a virtualenv.If we're not in a virtualenv, all paths are considered "local.""""if not running_under_virtualenv():return Truereturn normalize_path(path).startswith(normalize_path(sys.prefix))def dist_is_local(dist):"""Return True if given Distribution object is installed locally(i.e. within current virtualenv).Always True if we're not in a virtualenv."""return is_local(dist_location(dist))def dist_in_usersite(dist):"""Return True if given Distribution is installed in user site."""norm_path = normalize_path(dist_location(dist))return norm_path.startswith(normalize_path(user_site))def dist_in_site_packages(dist):"""Return True if given Distribution is installed indistutils.sysconfig.get_python_lib()."""return normalize_path(dist_location(dist)).startswith(normalize_path(site_packages))def dist_is_editable(dist):"""Is distribution an editable install?"""for path_item in sys.path:egg_link = os.path.join(path_item, dist.project_name + '.egg-link')if os.path.isfile(egg_link):return Truereturn Falsedef get_installed_distributions(local_only=True,skip=stdlib_pkgs,include_editables=True,editables_only=False,user_only=False):"""Return a list of installed Distribution objects.If ``local_only`` is True (default), only return installationslocal to the current virtualenv, if in a virtualenv.``skip`` argument is an iterable of lower-case project names toignore; defaults to stdlib_pkgsIf ``editables`` is False, don't report editables.If ``editables_only`` is True , only report editables.If ``user_only`` is True , only report installations in the usersite directory."""if local_only:local_test = dist_is_localelse:def local_test(d):return Trueif include_editables:def editable_test(d):return Trueelse:def editable_test(d):return not dist_is_editable(d)if editables_only:def editables_only_test(d):return dist_is_editable(d)else:def editables_only_test(d):return Trueif user_only:user_test = dist_in_usersiteelse:def user_test(d):return Truereturn [d for d in pkg_resources.working_setif local_test(d) andd.key not in skip andeditable_test(d) andeditables_only_test(d) anduser_test(d)]def egg_link_path(dist):"""Return the path for the .egg-link file if it exists, otherwise, None.There's 3 scenarios:1) not in a virtualenvtry to find in site.USER_SITE, then site_packages2) in a no-global virtualenvtry to find in site_packages3) in a yes-global virtualenvtry to find in site_packages, then site.USER_SITE(don't look in global location)For #1 and #3, there could be odd cases, where there's an egg-link in 2locations.This method will just return the first one found."""sites = []if running_under_virtualenv():if virtualenv_no_global():sites.append(site_packages)else:sites.append(site_packages)if user_site:sites.append(user_site)else:if user_site:sites.append(user_site)sites.append(site_packages)for site in sites:egglink = os.path.join(site, dist.project_name) + '.egg-link'if os.path.isfile(egglink):return egglinkdef dist_location(dist):"""Get the site-packages location of this distribution. Generallythis is dist.location, except in the case of develop-installedpackages, where dist.location is the source code location, and wewant to know where the egg-link file is."""egg_link = egg_link_path(dist)if egg_link:return egg_linkreturn dist.locationdef get_terminal_size():"""Returns a tuple (x, y) representing the width(x) and the height(x)in characters of the terminal window."""def ioctl_GWINSZ(fd):try:import fcntlimport termiosimport structcr = struct.unpack('hh',fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))except:return Noneif cr == (0, 0):return Nonereturn crcr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)if not cr:try:fd = os.open(os.ctermid(), os.O_RDONLY)cr = ioctl_GWINSZ(fd)os.close(fd)except:passif not cr:cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))return int(cr[1]), int(cr[0])def current_umask():"""Get the current umask which involves having to set it temporarily."""mask = os.umask(0)os.umask(mask)return maskdef unzip_file(filename, location, flatten=True):"""Unzip the file (with path `filename`) to the destination `location`. Allfiles are written based on system defaults and umask (i.e. permissions arenot preserved), except that regular file members with any executepermissions (user, group, or world) have "chmod +x" applied after beingwritten. Note that for windows, any execute changes using os.chmod areno-ops per the python docs."""ensure_dir(location)zipfp = open(filename, 'rb')try:zip = zipfile.ZipFile(zipfp, allowZip64=True)leading = has_leading_dir(zip.namelist()) and flattenfor info in zip.infolist():name = info.filenamedata = zip.read(name)fn = nameif leading:fn = split_leading_dir(name)[1]fn = os.path.join(location, fn)dir = os.path.dirname(fn)if fn.endswith('/') or fn.endswith('\\'):# A directoryensure_dir(fn)else:ensure_dir(dir)fp = open(fn, 'wb')try:fp.write(data)finally:fp.close()mode = info.external_attr >> 16# if mode and regular file and any execute permissions for# user/group/world?if mode and stat.S_ISREG(mode) and mode & 0o111:# make dest file have execute for user/group/world# (chmod +x) no-op on windows per python docsos.chmod(fn, (0o777 - current_umask() | 0o111))finally:zipfp.close()def untar_file(filename, location):"""Untar the file (with path `filename`) to the destination `location`.All files are written based on system defaults and umask (i.e. permissionsare not preserved), except that regular file members with any executepermissions (user, group, or world) have "chmod +x" applied after beingwritten. Note that for windows, any execute changes using os.chmod areno-ops per the python docs."""ensure_dir(location)if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):mode = 'r:gz'elif filename.lower().endswith(BZ2_EXTENSIONS):mode = 'r:bz2'elif filename.lower().endswith(XZ_EXTENSIONS):mode = 'r:xz'elif filename.lower().endswith('.tar'):mode = 'r'else:logger.warning('Cannot determine compression type for file %s', filename,)mode = 'r:*'tar = tarfile.open(filename, mode)try:# note: python<=2.5 doesn't seem to know about pax headers, filter themleading = has_leading_dir([member.name for member in tar.getmembers()if member.name != 'pax_global_header'])for member in tar.getmembers():fn = member.nameif fn == 'pax_global_header':continueif leading:fn = split_leading_dir(fn)[1]path = os.path.join(location, fn)if member.isdir():ensure_dir(path)elif member.issym():try:tar._extract_member(member, path)except Exception as exc:# Some corrupt tar files seem to produce this# (specifically bad symlinks)logger.warning('In the tar file %s the member %s is invalid: %s',filename, member.name, exc,)continueelse:try:fp = tar.extractfile(member)except (KeyError, AttributeError) as exc:# Some corrupt tar files seem to produce this# (specifically bad symlinks)logger.warning('In the tar file %s the member %s is invalid: %s',filename, member.name, exc,)continueensure_dir(os.path.dirname(path))with open(path, 'wb') as destfp:shutil.copyfileobj(fp, destfp)fp.close()# Update the timestamp (useful for cython compiled files)tar.utime(member, path)# member have any execute permissions for user/group/world?if member.mode & 0o111:# make dest file have execute for user/group/world# no-op on windows per python docsos.chmod(path, (0o777 - current_umask() | 0o111))finally:tar.close()def unpack_file(filename, location, content_type, link):filename = os.path.realpath(filename)if (content_type == 'application/zip' orfilename.lower().endswith(ZIP_EXTENSIONS) orzipfile.is_zipfile(filename)):unzip_file(filename,location,flatten=not filename.endswith('.whl'))elif (content_type == 'application/x-gzip' ortarfile.is_tarfile(filename) orfilename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)):untar_file(filename, location)elif (content_type and content_type.startswith('text/html') andis_svn_page(file_contents(filename))):# We don't really care about thisfrom pip.vcs.subversion import SubversionSubversion('svn+' + link.url).unpack(location)else:# FIXME: handle?# FIXME: magic signatures?logger.critical('Cannot unpack file %s (downloaded from %s, content-type: %s); ''cannot detect archive format',filename, location, content_type,)raise InstallationError('Cannot determine archive format of %s' % location)def call_subprocess(cmd, show_stdout=True, cwd=None,on_returncode='raise',command_desc=None,extra_environ=None, spinner=None):# This function's handling of subprocess output is confusing and I# previously broke it terribly, so as penance I will write a long comment# explaining things.## The obvious thing that affects output is the show_stdout=# kwarg. show_stdout=True means, let the subprocess write directly to our# stdout. Even though it is nominally the default, it is almost never used# inside pip (and should not be used in new code without a very good# reason); as of 2016-02-22 it is only used in a few places inside the VCS# wrapper code. Ideally we should get rid of it entirely, because it# creates a lot of complexity here for a rarely used feature.## Most places in pip set show_stdout=False. What this means is:# - We connect the child stdout to a pipe, which we read.# - By default, we hide the output but show a spinner -- unless the# subprocess exits with an error, in which case we show the output.# - If the --verbose option was passed (= loglevel is DEBUG), then we show# the output unconditionally. (But in this case we don't want to show# the output a second time if it turns out that there was an error.)## stderr is always merged with stdout (even if show_stdout=True).if show_stdout:stdout = Noneelse:stdout = subprocess.PIPEif command_desc is None:cmd_parts = []for part in cmd:if ' ' in part or '\n' in part or '"' in part or "'" in part:part = '"%s"' % part.replace('"', '\\"')cmd_parts.append(part)command_desc = ' '.join(cmd_parts)logger.debug("Running command %s", command_desc)env = os.environ.copy()if extra_environ:env.update(extra_environ)try:proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,cwd=cwd, env=env)except Exception as exc:logger.critical("Error %s while executing command %s", exc, command_desc,)raiseif stdout is not None:all_output = []while True:line = console_to_str(proc.stdout.readline())if not line:breakline = line.rstrip()all_output.append(line + '\n')if logger.getEffectiveLevel() <= std_logging.DEBUG:# Show the line immediatelylogger.debug(line)else:# Update the spinnerif spinner is not None:spinner.spin()proc.wait()if spinner is not None:if proc.returncode:spinner.finish("error")else:spinner.finish("done")if proc.returncode:if on_returncode == 'raise':if (logger.getEffectiveLevel() > std_logging.DEBUG andnot show_stdout):logger.info('Complete output from command %s:', command_desc,)logger.info(''.join(all_output) +'\n----------------------------------------')raise InstallationError('Command "%s" failed with error code %s in %s'% (command_desc, proc.returncode, cwd))elif on_returncode == 'warn':logger.warning('Command "%s" had error code %s in %s',command_desc, proc.returncode, cwd,)elif on_returncode == 'ignore':passelse:raise ValueError('Invalid value: on_returncode=%s' %repr(on_returncode))if not show_stdout:return ''.join(all_output)def read_text_file(filename):"""Return the contents of *filename*.Try to decode the file contents with utf-8, the preferred system encoding(e.g., cp1252 on some Windows machines), and latin1, in that order.Decoding a byte string with latin1 will never raise an error. In the worstcase, the returned string will contain some garbage characters."""with open(filename, 'rb') as fp:data = fp.read()encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']for enc in encodings:try:data = data.decode(enc)except UnicodeDecodeError:continuebreakassert type(data) != bytes # Latin1 should have worked.return datadef _make_build_dir(build_dir):os.makedirs(build_dir)write_delete_marker_file(build_dir)class FakeFile(object):"""Wrap a list of lines in an object with readline() to makeConfigParser happy."""def __init__(self, lines):self._gen = (l for l in lines)def readline(self):try:try:return next(self._gen)except NameError:return self._gen.next()except StopIteration:return ''def __iter__(self):return self._genclass StreamWrapper(StringIO):@classmethoddef from_stream(cls, orig_stream):cls.orig_stream = orig_streamreturn cls()# compileall.compile_dir() needs stdout.encoding to print to stdout@propertydef encoding(self):return self.orig_stream.encoding@contextlib.contextmanagerdef captured_output(stream_name):"""Return a context manager used by captured_stdout/stdin/stderrthat temporarily replaces the sys stream *stream_name* with a StringIO.Taken from Lib/support/__init__.py in the CPython repo."""orig_stdout = getattr(sys, stream_name)setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))try:yield getattr(sys, stream_name)finally:setattr(sys, stream_name, orig_stdout)def captured_stdout():"""Capture the output of sys.stdout:with captured_stdout() as stdout:print('hello')self.assertEqual(stdout.getvalue(), 'hello\n')Taken from Lib/support/__init__.py in the CPython repo."""return captured_output('stdout')class cached_property(object):"""A property that is only computed once per instance and then replacesitself with an ordinary attribute. Deleting the attribute resets theproperty.Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175"""def __init__(self, func):self.__doc__ = getattr(func, '__doc__')self.func = funcdef __get__(self, obj, cls):if obj is None:# We're being accessed from the class itself, not from an objectreturn selfvalue = obj.__dict__[self.func.__name__] = self.func(obj)return valuedef get_installed_version(dist_name, lookup_dirs=None):"""Get the installed version of dist_name avoiding pkg_resources cache"""# Create a requirement that we'll look for inside of setuptools.req = pkg_resources.Requirement.parse(dist_name)# We want to avoid having this cached, so we need to construct a new# working set each time.if lookup_dirs is None:working_set = pkg_resources.WorkingSet()else:working_set = pkg_resources.WorkingSet(lookup_dirs)# Get the installed distribution from our working setdist = working_set.find(req)# Check to see if we got an installed distribution or not, if we did# we want to return it's version.return dist.version if dist else Nonedef consume(iterator):"""Consume an iterable at C speed."""deque(iterator, maxlen=0)
from __future__ import absolute_importSUCCESS = 0ERROR = 1UNKNOWN_ERROR = 2VIRTUALENV_NOT_FOUND = 3PREVIOUS_BUILD_DIR_ERROR = 4NO_MATCHES_FOUND = 23
from __future__ import absolute_importimport loggingimport osimport tempfilefrom pip.compat import uses_pycache, WINDOWS, cache_from_sourcefrom pip.exceptions import UninstallationErrorfrom pip.utils import rmtree, ask, is_local, renames, normalize_pathfrom pip.utils.logging import indent_loglogger = logging.getLogger(__name__)class UninstallPathSet(object):"""A set of file paths to be removed in the uninstallation of arequirement."""def __init__(self, dist):self.paths = set()self._refuse = set()self.pth = {}self.dist = distself.save_dir = Noneself._moved_paths = []def _permitted(self, path):"""Return True if the given path is one we are permitted toremove/modify, False otherwise."""return is_local(path)def add(self, path):head, tail = os.path.split(path)# we normalize the head to resolve parent directory symlinks, but not# the tail, since we only want to uninstall symlinks, not their targetspath = os.path.join(normalize_path(head), os.path.normcase(tail))if not os.path.exists(path):returnif self._permitted(path):self.paths.add(path)else:self._refuse.add(path)# __pycache__ files can show up after 'installed-files.txt' is created,# due to importsif os.path.splitext(path)[1] == '.py' and uses_pycache:self.add(cache_from_source(path))def add_pth(self, pth_file, entry):pth_file = normalize_path(pth_file)if self._permitted(pth_file):if pth_file not in self.pth:self.pth[pth_file] = UninstallPthEntries(pth_file)self.pth[pth_file].add(entry)else:self._refuse.add(pth_file)def compact(self, paths):"""Compact a path set to contain the minimal number of pathsnecessary to contain all paths in the set. If /a/path/ and/a/path/to/a/file.txt are both in the set, leave only theshorter path."""short_paths = set()for path in sorted(paths, key=len):if not any([(path.startswith(shortpath) andpath[len(shortpath.rstrip(os.path.sep))] == os.path.sep)for shortpath in short_paths]):short_paths.add(path)return short_pathsdef _stash(self, path):return os.path.join(self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep))def remove(self, auto_confirm=False):"""Remove paths in ``self.paths`` with confirmation (unless``auto_confirm`` is True)."""if not self.paths:logger.info("Can't uninstall '%s'. No files were found to uninstall.",self.dist.project_name,)returnlogger.info('Uninstalling %s-%s:',self.dist.project_name, self.dist.version)with indent_log():paths = sorted(self.compact(self.paths))if auto_confirm:response = 'y'else:for path in paths:logger.info(path)response = ask('Proceed (y/n)? ', ('y', 'n'))if self._refuse:logger.info('Not removing or modifying (outside of prefix):')for path in self.compact(self._refuse):logger.info(path)if response == 'y':self.save_dir = tempfile.mkdtemp(suffix='-uninstall',prefix='pip-')for path in paths:new_path = self._stash(path)logger.debug('Removing file or directory %s', path)self._moved_paths.append(path)renames(path, new_path)for pth in self.pth.values():pth.remove()logger.info('Successfully uninstalled %s-%s',self.dist.project_name, self.dist.version)def rollback(self):"""Rollback the changes previously made by remove()."""if self.save_dir is None:logger.error("Can't roll back %s; was not uninstalled",self.dist.project_name,)return Falselogger.info('Rolling back uninstall of %s', self.dist.project_name)for path in self._moved_paths:tmp_path = self._stash(path)logger.debug('Replacing %s', path)renames(tmp_path, path)for pth in self.pth.values():pth.rollback()def commit(self):"""Remove temporary save dir: rollback will no longer be possible."""if self.save_dir is not None:rmtree(self.save_dir)self.save_dir = Noneself._moved_paths = []class UninstallPthEntries(object):def __init__(self, pth_file):if not os.path.isfile(pth_file):raise UninstallationError("Cannot remove entries from nonexistent file %s" % pth_file)self.file = pth_fileself.entries = set()self._saved_lines = Nonedef add(self, entry):entry = os.path.normcase(entry)# On Windows, os.path.normcase converts the entry to use# backslashes. This is correct for entries that describe absolute# paths outside of site-packages, but all the others use forward# slashes.if WINDOWS and not os.path.splitdrive(entry)[0]:entry = entry.replace('\\', '/')self.entries.add(entry)def remove(self):logger.debug('Removing pth entries from %s:', self.file)with open(self.file, 'rb') as fh:# windows uses '\r\n' with py3k, but uses '\n' with py2.xlines = fh.readlines()self._saved_lines = linesif any(b'\r\n' in line for line in lines):endline = '\r\n'else:endline = '\n'for entry in self.entries:try:logger.debug('Removing entry: %s', entry)lines.remove((entry + endline).encode("utf-8"))except ValueError:passwith open(self.file, 'wb') as fh:fh.writelines(lines)def rollback(self):if self._saved_lines is None:logger.error('Cannot roll back changes to %s, none were made', self.file)return Falselogger.debug('Rolling %s back to previous state', self.file)with open(self.file, 'wb') as fh:fh.writelines(self._saved_lines)return True
from __future__ import absolute_importfrom collections import defaultdictfrom itertools import chainimport loggingimport osfrom pip._vendor import pkg_resourcesfrom pip._vendor import requestsfrom pip.compat import expanduserfrom pip.download import (is_file_url, is_dir_url, is_vcs_url, url_to_path,unpack_url)from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled,DistributionNotFound, PreviousBuildDirError,HashError, HashErrors, HashUnpinned,DirectoryUrlHashUnsupported, VcsHashUnsupported,UnsupportedPythonVersion)from pip.req.req_install import InstallRequirementfrom pip.utils import (display_path, dist_in_usersite, ensure_dir, normalize_path)from pip.utils.hashes import MissingHashesfrom pip.utils.logging import indent_logfrom pip.utils.packaging import check_dist_requires_pythonfrom pip.vcs import vcsfrom pip.wheel import Wheellogger = logging.getLogger(__name__)class Requirements(object):def __init__(self):self._keys = []self._dict = {}def keys(self):return self._keysdef values(self):return [self._dict[key] for key in self._keys]def __contains__(self, item):return item in self._keysdef __setitem__(self, key, value):if key not in self._keys:self._keys.append(key)self._dict[key] = valuedef __getitem__(self, key):return self._dict[key]def __repr__(self):values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()]return 'Requirements({%s})' % ', '.join(values)class DistAbstraction(object):"""Abstracts out the wheel vs non-wheel prepare_files logic.The requirements for anything installable are as follows:- we must be able to determine the requirement name(or we can't correctly handle the non-upgrade case).- we must be able to generate a list of run-time dependencieswithout installing any additional packages (or we wouldhave to either burn time by doing temporary isolated installsor alternatively violate pips 'don't start installing unlessall requirements are available' rule - neither of which aredesirable).- for packages with setup requirements, we must also be ableto determine their requirements without installing additionalpackages (for the same reason as run-time dependencies)- we must be able to create a Distribution object exposing theabove metadata."""def __init__(self, req_to_install):self.req_to_install = req_to_installdef dist(self, finder):"""Return a setuptools Dist object."""raise NotImplementedError(self.dist)def prep_for_dist(self):"""Ensure that we can get a Dist for this requirement."""raise NotImplementedError(self.dist)def make_abstract_dist(req_to_install):"""Factory to make an abstract dist object.Preconditions: Either an editable req with a source_dir, or satisfied_by ora wheel link, or a non-editable req with a source_dir.:return: A concrete DistAbstraction."""if req_to_install.editable:return IsSDist(req_to_install)elif req_to_install.link and req_to_install.link.is_wheel:return IsWheel(req_to_install)else:return IsSDist(req_to_install)class IsWheel(DistAbstraction):def dist(self, finder):return list(pkg_resources.find_distributions(self.req_to_install.source_dir))[0]def prep_for_dist(self):# FIXME:https://github.com/pypa/pip/issues/1112passclass IsSDist(DistAbstraction):def dist(self, finder):dist = self.req_to_install.get_dist()# FIXME: shouldn't be globally added:if dist.has_metadata('dependency_links.txt'):finder.add_dependency_links(dist.get_metadata_lines('dependency_links.txt'))return distdef prep_for_dist(self):self.req_to_install.run_egg_info()self.req_to_install.assert_source_matches_version()class Installed(DistAbstraction):def dist(self, finder):return self.req_to_install.satisfied_bydef prep_for_dist(self):passclass RequirementSet(object):def __init__(self, build_dir, src_dir, download_dir, upgrade=False,upgrade_strategy=None, ignore_installed=False, as_egg=False,target_dir=None, ignore_dependencies=False,force_reinstall=False, use_user_site=False, session=None,pycompile=True, isolated=False, wheel_download_dir=None,wheel_cache=None, require_hashes=False,ignore_requires_python=False):"""Create a RequirementSet.:param wheel_download_dir: Where still-packed .whl files should bewritten to. If None they are written to the download_dir parameter.Separate to download_dir to permit only keeping wheel archives forpip wheel.:param download_dir: Where still packed archives should be written to.If None they are not saved, and are deleted immediately afterunpacking.:param wheel_cache: The pip wheel cache, for passing toInstallRequirement."""if session is None:raise TypeError("RequirementSet() missing 1 required keyword argument: ""'session'")self.build_dir = build_dirself.src_dir = src_dir# XXX: download_dir and wheel_download_dir overlap semantically and may# be combined if we're willing to have non-wheel archives present in# the wheelhouse output by 'pip wheel'.self.download_dir = download_dirself.upgrade = upgradeself.upgrade_strategy = upgrade_strategyself.ignore_installed = ignore_installedself.force_reinstall = force_reinstallself.requirements = Requirements()# Mapping of alias: real_nameself.requirement_aliases = {}self.unnamed_requirements = []self.ignore_dependencies = ignore_dependenciesself.ignore_requires_python = ignore_requires_pythonself.successfully_downloaded = []self.successfully_installed = []self.reqs_to_cleanup = []self.as_egg = as_eggself.use_user_site = use_user_siteself.target_dir = target_dir # set from --target optionself.session = sessionself.pycompile = pycompileself.isolated = isolatedif wheel_download_dir:wheel_download_dir = normalize_path(wheel_download_dir)self.wheel_download_dir = wheel_download_dirself._wheel_cache = wheel_cacheself.require_hashes = require_hashes# Maps from install_req -> dependencies_of_install_reqself._dependencies = defaultdict(list)def __str__(self):reqs = [req for req in self.requirements.values()if not req.comes_from]reqs.sort(key=lambda req: req.name.lower())return ' '.join([str(req.req) for req in reqs])def __repr__(self):reqs = [req for req in self.requirements.values()]reqs.sort(key=lambda req: req.name.lower())reqs_str = ', '.join([str(req.req) for req in reqs])return ('<%s object; %d requirement(s): %s>'% (self.__class__.__name__, len(reqs), reqs_str))def add_requirement(self, install_req, parent_req_name=None,extras_requested=None):"""Add install_req as a requirement to install.:param parent_req_name: The name of the requirement that needed thisadded. The name is used because when multiple unnamed requirementsresolve to the same name, we could otherwise end up with dependencylinks that point outside the Requirements set. parent_req mustalready be added. Note that None implies that this is a usersupplied requirement, vs an inferred one.:param extras_requested: an iterable of extras used to evaluate theenvironement markers.:return: Additional requirements to scan. That is either [] ifthe requirement is not applicable, or [install_req] if therequirement is applicable and has just been added."""name = install_req.nameif not install_req.match_markers(extras_requested):logger.warning("Ignoring %s: markers '%s' don't match your ""environment", install_req.name,install_req.markers)return []# This check has to come after we filter requirements with the# environment markers.if install_req.link and install_req.link.is_wheel:wheel = Wheel(install_req.link.filename)if not wheel.supported():raise InstallationError("%s is not a supported wheel on this platform." %wheel.filename)install_req.as_egg = self.as_egginstall_req.use_user_site = self.use_user_siteinstall_req.target_dir = self.target_dirinstall_req.pycompile = self.pycompileinstall_req.is_direct = (parent_req_name is None)if not name:# url or path requirement w/o an egg fragmentself.unnamed_requirements.append(install_req)return [install_req]else:try:existing_req = self.get_requirement(name)except KeyError:existing_req = Noneif (parent_req_name is None and existing_req and notexisting_req.constraint andexisting_req.extras == install_req.extras and notexisting_req.req.specifier == install_req.req.specifier):raise InstallationError('Double requirement given: %s (already in %s, name=%r)'% (install_req, existing_req, name))if not existing_req:# Add requirementself.requirements[name] = install_req# FIXME: what about other normalizations? E.g., _ vs. -?if name.lower() != name:self.requirement_aliases[name.lower()] = nameresult = [install_req]else:# Assume there's no need to scan, and that we've already# encountered this for scanning.result = []if not install_req.constraint and existing_req.constraint:if (install_req.link and not (existing_req.link andinstall_req.link.path == existing_req.link.path)):self.reqs_to_cleanup.append(install_req)raise InstallationError("Could not satisfy constraints for '%s': ""installation from path or url cannot be ""constrained to a version" % name)# If we're now installing a constraint, mark the existing# object for real installation.existing_req.constraint = Falseexisting_req.extras = tuple(sorted(set(existing_req.extras).union(set(install_req.extras))))logger.debug("Setting %s extras to: %s",existing_req, existing_req.extras)# And now we need to scan this.result = [existing_req]# Canonicalise to the already-added object for the backref# check below.install_req = existing_reqif parent_req_name:parent_req = self.get_requirement(parent_req_name)self._dependencies[parent_req].append(install_req)return resultdef has_requirement(self, project_name):name = project_name.lower()if (name in self.requirements andnot self.requirements[name].constraint orname in self.requirement_aliases andnot self.requirements[self.requirement_aliases[name]].constraint):return Truereturn False@propertydef has_requirements(self):return list(req for req in self.requirements.values() if notreq.constraint) or self.unnamed_requirements@propertydef is_download(self):if self.download_dir:self.download_dir = expanduser(self.download_dir)if os.path.exists(self.download_dir):return Trueelse:logger.critical('Could not find download directory')raise InstallationError("Could not find or access download directory '%s'"% display_path(self.download_dir))return Falsedef get_requirement(self, project_name):for name in project_name, project_name.lower():if name in self.requirements:return self.requirements[name]if name in self.requirement_aliases:return self.requirements[self.requirement_aliases[name]]raise KeyError("No project with the name %r" % project_name)def uninstall(self, auto_confirm=False):for req in self.requirements.values():if req.constraint:continuereq.uninstall(auto_confirm=auto_confirm)req.commit_uninstall()def prepare_files(self, finder):"""Prepare process. Create temp directories, download and/or unpack files."""# make the wheelhouseif self.wheel_download_dir:ensure_dir(self.wheel_download_dir)# If any top-level requirement has a hash specified, enter# hash-checking mode, which requires hashes from all.root_reqs = self.unnamed_requirements + self.requirements.values()require_hashes = (self.require_hashes orany(req.has_hash_options for req in root_reqs))if require_hashes and self.as_egg:raise InstallationError('--egg is not allowed with --require-hashes mode, since it ''delegates dependency resolution to setuptools and could thus ''result in installation of unhashed packages.')# Actually prepare the files, and collect any exceptions. Most hash# exceptions cannot be checked ahead of time, because# req.populate_link() needs to be called before we can make decisions# based on link type.discovered_reqs = []hash_errors = HashErrors()for req in chain(root_reqs, discovered_reqs):try:discovered_reqs.extend(self._prepare_file(finder,req,require_hashes=require_hashes,ignore_dependencies=self.ignore_dependencies))except HashError as exc:exc.req = reqhash_errors.append(exc)if hash_errors:raise hash_errorsdef _is_upgrade_allowed(self, req):return self.upgrade and (self.upgrade_strategy == "eager" or (self.upgrade_strategy == "only-if-needed" and req.is_direct))def _check_skip_installed(self, req_to_install, finder):"""Check if req_to_install should be skipped.This will check if the req is installed, and whether we should upgradeor reinstall it, taking into account all the relevant user options.After calling this req_to_install will only have satisfied_by set toNone if the req_to_install is to be upgraded/reinstalled etc. Anyother value will be a dist recording the current thing installed thatsatisfies the requirement.Note that for vcs urls and the like we can't assess skipping in thisroutine - we simply identify that we need to pull the thing down,then later on it is pulled down and introspected to assess upgrade/reinstalls etc.:return: A text reason for why it was skipped, or None."""# Check whether to upgrade/reinstall this req or not.req_to_install.check_if_exists()if req_to_install.satisfied_by:upgrade_allowed = self._is_upgrade_allowed(req_to_install)# Is the best version is installed.best_installed = Falseif upgrade_allowed:# For link based requirements we have to pull the# tree down and inspect to assess the version #, so# its handled way down.if not (self.force_reinstall or req_to_install.link):try:finder.find_requirement(req_to_install, upgrade_allowed)except BestVersionAlreadyInstalled:best_installed = Trueexcept DistributionNotFound:# No distribution found, so we squash the# error - it will be raised later when we# re-try later to do the install.# Why don't we just raise here?passif not best_installed:# don't uninstall conflict if user install and# conflict is not user installif not (self.use_user_site and notdist_in_usersite(req_to_install.satisfied_by)):req_to_install.conflicts_with = \req_to_install.satisfied_byreq_to_install.satisfied_by = None# Figure out a nice message to say why we're skipping this.if best_installed:skip_reason = 'already up-to-date'elif self.upgrade_strategy == "only-if-needed":skip_reason = 'not upgraded as not directly required'else:skip_reason = 'already satisfied'return skip_reasonelse:return Nonedef _prepare_file(self,finder,req_to_install,require_hashes=False,ignore_dependencies=False):"""Prepare a single requirements file.:return: A list of additional InstallRequirements to also install."""# Tell user what we are doing for this requirement:# obtain (editable), skipping, processing (local url), collecting# (remote url or package name)if req_to_install.constraint or req_to_install.prepared:return []req_to_install.prepared = True# ###################### ## # print log messages # ## ###################### #if req_to_install.editable:logger.info('Obtaining %s', req_to_install)else:# satisfied_by is only evaluated by calling _check_skip_installed,# so it must be None here.assert req_to_install.satisfied_by is Noneif not self.ignore_installed:skip_reason = self._check_skip_installed(req_to_install, finder)if req_to_install.satisfied_by:assert skip_reason is not None, ('_check_skip_installed returned None but ''req_to_install.satisfied_by is set to %r'% (req_to_install.satisfied_by,))logger.info('Requirement %s: %s', skip_reason,req_to_install)else:if (req_to_install.link andreq_to_install.link.scheme == 'file'):path = url_to_path(req_to_install.link.url)logger.info('Processing %s', display_path(path))else:logger.info('Collecting %s', req_to_install)with indent_log():# ################################ ## # vcs update or unpack archive # ## ################################ #if req_to_install.editable:if require_hashes:raise InstallationError('The editable requirement %s cannot be installed when ''requiring hashes, because there is no single file to ''hash.' % req_to_install)req_to_install.ensure_has_source_dir(self.src_dir)req_to_install.update_editable(not self.is_download)abstract_dist = make_abstract_dist(req_to_install)abstract_dist.prep_for_dist()if self.is_download:req_to_install.archive(self.download_dir)req_to_install.check_if_exists()elif req_to_install.satisfied_by:if require_hashes:logger.debug('Since it is already installed, we are trusting this ''package without checking its hash. To ensure a ''completely repeatable environment, install into an ''empty virtualenv.')abstract_dist = Installed(req_to_install)else:# @@ if filesystem packages are not marked# editable in a req, a non deterministic error# occurs when the script attempts to unpack the# build directoryreq_to_install.ensure_has_source_dir(self.build_dir)# If a checkout exists, it's unwise to keep going. version# inconsistencies are logged later, but do not fail the# installation.# FIXME: this won't upgrade when there's an existing# package unpacked in `req_to_install.source_dir`if os.path.exists(os.path.join(req_to_install.source_dir, 'setup.py')):raise PreviousBuildDirError("pip can't proceed with requirements '%s' due to a"" pre-existing build directory (%s). This is ""likely due to a previous installation that failed"". pip is being responsible and not assuming it ""can delete this. Please delete it and try again."% (req_to_install, req_to_install.source_dir))req_to_install.populate_link(finder,self._is_upgrade_allowed(req_to_install),require_hashes)# We can't hit this spot and have populate_link return None.# req_to_install.satisfied_by is None here (because we're# guarded) and upgrade has no impact except when satisfied_by# is not None.# Then inside find_requirement existing_applicable -> False# If no new versions are found, DistributionNotFound is raised,# otherwise a result is guaranteed.assert req_to_install.linklink = req_to_install.link# Now that we have the real link, we can tell what kind of# requirements we have and raise some more informative errors# than otherwise. (For example, we can raise VcsHashUnsupported# for a VCS URL rather than HashMissing.)if require_hashes:# We could check these first 2 conditions inside# unpack_url and save repetition of conditions, but then# we would report less-useful error messages for# unhashable requirements, complaining that there's no# hash provided.if is_vcs_url(link):raise VcsHashUnsupported()elif is_file_url(link) and is_dir_url(link):raise DirectoryUrlHashUnsupported()if (not req_to_install.original_link andnot req_to_install.is_pinned):# Unpinned packages are asking for trouble when a new# version is uploaded. This isn't a security check, but# it saves users a surprising hash mismatch in the# future.## file:/// URLs aren't pinnable, so don't complain# about them not being pinned.raise HashUnpinned()hashes = req_to_install.hashes(trust_internet=not require_hashes)if require_hashes and not hashes:# Known-good hashes are missing for this requirement, so# shim it with a facade object that will provoke hash# computation and then raise a HashMissing exception# showing the user what the hash should be.hashes = MissingHashes()try:download_dir = self.download_dir# We always delete unpacked sdists after pip ran.autodelete_unpacked = Trueif req_to_install.link.is_wheel \and self.wheel_download_dir:# when doing 'pip wheel` we download wheels to a# dedicated dir.download_dir = self.wheel_download_dirif req_to_install.link.is_wheel:if download_dir:# When downloading, we only unpack wheels to get# metadata.autodelete_unpacked = Trueelse:# When installing a wheel, we use the unpacked# wheel.autodelete_unpacked = Falseunpack_url(req_to_install.link, req_to_install.source_dir,download_dir, autodelete_unpacked,session=self.session, hashes=hashes)except requests.HTTPError as exc:logger.critical('Could not install requirement %s because ''of error %s',req_to_install,exc,)raise InstallationError('Could not install requirement %s because ''of HTTP error %s for URL %s' %(req_to_install, exc, req_to_install.link))abstract_dist = make_abstract_dist(req_to_install)abstract_dist.prep_for_dist()if self.is_download:# Make a .zip of the source_dir we already created.if req_to_install.link.scheme in vcs.all_schemes:req_to_install.archive(self.download_dir)# req_to_install.req is only avail after unpack for URL# pkgs repeat check_if_exists to uninstall-on-upgrade# (#14)if not self.ignore_installed:req_to_install.check_if_exists()if req_to_install.satisfied_by:if self.upgrade or self.ignore_installed:# don't uninstall conflict if user install and# conflict is not user installif not (self.use_user_site and notdist_in_usersite(req_to_install.satisfied_by)):req_to_install.conflicts_with = \req_to_install.satisfied_byreq_to_install.satisfied_by = Noneelse:logger.info('Requirement already satisfied (use ''--upgrade to upgrade): %s',req_to_install,)# ###################### ## # parse dependencies # ## ###################### #dist = abstract_dist.dist(finder)try:check_dist_requires_python(dist)except UnsupportedPythonVersion as e:if self.ignore_requires_python:logger.warning(e.args[0])else:req_to_install.remove_temporary_source()raisemore_reqs = []def add_req(subreq, extras_requested):sub_install_req = InstallRequirement(str(subreq),req_to_install,isolated=self.isolated,wheel_cache=self._wheel_cache,)more_reqs.extend(self.add_requirement(sub_install_req, req_to_install.name,extras_requested=extras_requested))# We add req_to_install before its dependencies, so that we# can refer to it when adding dependencies.if not self.has_requirement(req_to_install.name):# 'unnamed' requirements will get added hereself.add_requirement(req_to_install, None)if not ignore_dependencies:if (req_to_install.extras):logger.debug("Installing extra requirements: %r",','.join(req_to_install.extras),)missing_requested = sorted(set(req_to_install.extras) - set(dist.extras))for missing in missing_requested:logger.warning('%s does not provide the extra \'%s\'',dist, missing)available_requested = sorted(set(dist.extras) & set(req_to_install.extras))for subreq in dist.requires(available_requested):add_req(subreq, extras_requested=available_requested)# cleanup tmp srcself.reqs_to_cleanup.append(req_to_install)if not req_to_install.editable and not req_to_install.satisfied_by:# XXX: --no-install leads this to report 'Successfully# downloaded' for only non-editable reqs, even though we took# action on them.self.successfully_downloaded.append(req_to_install)return more_reqsdef cleanup_files(self):"""Clean up files, remove builds."""logger.debug('Cleaning up...')with indent_log():for req in self.reqs_to_cleanup:req.remove_temporary_source()def _to_install(self):"""Create the installation order.The installation order is topological - requirements are installedbefore the requiring thing. We break cycles at an arbitrary point,and make no other guarantees."""# The current implementation, which we may change at any point# installs the user specified things in the order given, except when# dependencies must come earlier to achieve topological order.order = []ordered_reqs = set()def schedule(req):if req.satisfied_by or req in ordered_reqs:returnif req.constraint:returnordered_reqs.add(req)for dep in self._dependencies[req]:schedule(dep)order.append(req)for install_req in self.requirements.values():schedule(install_req)return orderdef install(self, install_options, global_options=(), *args, **kwargs):"""Install everything in this set (after having downloaded and unpackedthe packages)"""to_install = self._to_install()if to_install:logger.info('Installing collected packages: %s',', '.join([req.name for req in to_install]),)with indent_log():for requirement in to_install:if requirement.conflicts_with:logger.info('Found existing installation: %s',requirement.conflicts_with,)with indent_log():requirement.uninstall(auto_confirm=True)try:requirement.install(install_options,global_options,*args,**kwargs)except:# if install did not succeed, rollback previous uninstallif (requirement.conflicts_with and notrequirement.install_succeeded):requirement.rollback_uninstall()raiseelse:if (requirement.conflicts_with andrequirement.install_succeeded):requirement.commit_uninstall()requirement.remove_temporary_source()self.successfully_installed = to_install
from __future__ import absolute_importimport loggingimport osimport reimport shutilimport sysimport tempfileimport tracebackimport warningsimport zipfilefrom distutils import sysconfigfrom distutils.util import change_rootfrom email.parser import FeedParserfrom pip._vendor import pkg_resources, sixfrom pip._vendor.packaging import specifiersfrom pip._vendor.packaging.markers import Markerfrom pip._vendor.packaging.requirements import InvalidRequirement, Requirementfrom pip._vendor.packaging.utils import canonicalize_namefrom pip._vendor.packaging.version import Version, parse as parse_versionfrom pip._vendor.six.moves import configparserimport pip.wheelfrom pip.compat import native_str, get_stdlib, WINDOWSfrom pip.download import is_url, url_to_path, path_to_url, is_archive_filefrom pip.exceptions import (InstallationError, UninstallationError,)from pip.locations import (bin_py, running_under_virtualenv, PIP_DELETE_MARKER_FILENAME, bin_user,)from pip.utils import (display_path, rmtree, ask_path_exists, backup_dir, is_installable_dir,dist_in_usersite, dist_in_site_packages, egg_link_path,call_subprocess, read_text_file, FakeFile, _make_build_dir, ensure_dir,get_installed_version, normalize_path, dist_is_local,)from pip.utils.hashes import Hashesfrom pip.utils.deprecation import RemovedInPip10Warningfrom pip.utils.logging import indent_logfrom pip.utils.setuptools_build import SETUPTOOLS_SHIMfrom pip.utils.ui import open_spinnerfrom pip.req.req_uninstall import UninstallPathSetfrom pip.vcs import vcsfrom pip.wheel import move_wheel_files, Wheellogger = logging.getLogger(__name__)operators = specifiers.Specifier._operators.keys()def _strip_extras(path):m = re.match(r'^(.+)(\[[^\]]+\])$', path)extras = Noneif m:path_no_extras = m.group(1)extras = m.group(2)else:path_no_extras = pathreturn path_no_extras, extrasdef _safe_extras(extras):return set(pkg_resources.safe_extra(extra) for extra in extras)class InstallRequirement(object):def __init__(self, req, comes_from, source_dir=None, editable=False,link=None, as_egg=False, update=True,pycompile=True, markers=None, isolated=False, options=None,wheel_cache=None, constraint=False):self.extras = ()if isinstance(req, six.string_types):try:req = Requirement(req)except InvalidRequirement:if os.path.sep in req:add_msg = "It looks like a path. Does it exist ?"elif '=' in req and not any(op in req for op in operators):add_msg = "= is not a valid operator. Did you mean == ?"else:add_msg = traceback.format_exc()raise InstallationError("Invalid requirement: '%s'\n%s" % (req, add_msg))self.extras = _safe_extras(req.extras)self.req = reqself.comes_from = comes_fromself.constraint = constraintself.source_dir = source_dirself.editable = editableself._wheel_cache = wheel_cacheself.link = self.original_link = linkself.as_egg = as_eggif markers is not None:self.markers = markerselse:self.markers = req and req.markerself._egg_info_path = None# This holds the pkg_resources.Distribution object if this requirement# is already available:self.satisfied_by = None# This hold the pkg_resources.Distribution object if this requirement# conflicts with another installed distribution:self.conflicts_with = None# Temporary build locationself._temp_build_dir = None# Used to store the global directory where the _temp_build_dir should# have been created. Cf _correct_build_location method.self._ideal_build_dir = None# True if the editable should be updated:self.update = update# Set to True after successful installationself.install_succeeded = None# UninstallPathSet of uninstalled distribution (for possible rollback)self.uninstalled = None# Set True if a legitimate do-nothing-on-uninstall has happened - e.g.# system site packages, stdlib packages.self.nothing_to_uninstall = Falseself.use_user_site = Falseself.target_dir = Noneself.options = options if options else {}self.pycompile = pycompile# Set to True after successful preparation of this requirementself.prepared = Falseself.isolated = isolated@classmethoddef from_editable(cls, editable_req, comes_from=None, default_vcs=None,isolated=False, options=None, wheel_cache=None,constraint=False):from pip.index import Linkname, url, extras_override = parse_editable(editable_req, default_vcs)if url.startswith('file:'):source_dir = url_to_path(url)else:source_dir = Noneres = cls(name, comes_from, source_dir=source_dir,editable=True,link=Link(url),constraint=constraint,isolated=isolated,options=options if options else {},wheel_cache=wheel_cache)if extras_override is not None:res.extras = _safe_extras(extras_override)return res@classmethoddef from_line(cls, name, comes_from=None, isolated=False, options=None,wheel_cache=None, constraint=False):"""Creates an InstallRequirement from a name, which might be arequirement, directory containing 'setup.py', filename, or URL."""from pip.index import Linkif is_url(name):marker_sep = '; 'else:marker_sep = ';'if marker_sep in name:name, markers = name.split(marker_sep, 1)markers = markers.strip()if not markers:markers = Noneelse:markers = Marker(markers)else:markers = Nonename = name.strip()req = Nonepath = os.path.normpath(os.path.abspath(name))link = Noneextras = Noneif is_url(name):link = Link(name)else:p, extras = _strip_extras(path)if (os.path.isdir(p) and(os.path.sep in name or name.startswith('.'))):if not is_installable_dir(p):raise InstallationError("Directory %r is not installable. File 'setup.py' ""not found." % name)link = Link(path_to_url(p))elif is_archive_file(p):if not os.path.isfile(p):logger.warning('Requirement %r looks like a filename, but the ''file does not exist',name)link = Link(path_to_url(p))# it's a local file, dir, or urlif link:# Handle relative file URLsif link.scheme == 'file' and re.search(r'\.\./', link.url):link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path))))# wheel fileif link.is_wheel:wheel = Wheel(link.filename) # can raise InvalidWheelFilenamereq = "%s==%s" % (wheel.name, wheel.version)else:# set the req to the egg fragment. when it's not there, this# will become an 'unnamed' requirementreq = link.egg_fragment# a requirement specifierelse:req = nameoptions = options if options else {}res = cls(req, comes_from, link=link, markers=markers,isolated=isolated, options=options,wheel_cache=wheel_cache, constraint=constraint)if extras:res.extras = _safe_extras(Requirement('placeholder' + extras).extras)return resdef __str__(self):if self.req:s = str(self.req)if self.link:s += ' from %s' % self.link.urlelse:s = self.link.url if self.link else Noneif self.satisfied_by is not None:s += ' in %s' % display_path(self.satisfied_by.location)if self.comes_from:if isinstance(self.comes_from, six.string_types):comes_from = self.comes_fromelse:comes_from = self.comes_from.from_path()if comes_from:s += ' (from %s)' % comes_fromreturn sdef __repr__(self):return '<%s object: %s editable=%r>' % (self.__class__.__name__, str(self), self.editable)def populate_link(self, finder, upgrade, require_hashes):"""Ensure that if a link can be found for this, that it is found.Note that self.link may still be None - if Upgrade is False and therequirement is already installed.If require_hashes is True, don't use the wheel cache, because cachedwheels, always built locally, have different hashes than the filesdownloaded from the index server and thus throw false hash mismatches.Furthermore, cached wheels at present have undeterministic contents dueto file modification times."""if self.link is None:self.link = finder.find_requirement(self, upgrade)if self._wheel_cache is not None and not require_hashes:old_link = self.linkself.link = self._wheel_cache.cached_wheel(self.link, self.name)if old_link != self.link:logger.debug('Using cached wheel link: %s', self.link)@propertydef specifier(self):return self.req.specifier@propertydef is_pinned(self):"""Return whether I am pinned to an exact version.For example, some-package==1.2 is pinned; some-package>1.2 is not."""specifiers = self.specifierreturn (len(specifiers) == 1 andnext(iter(specifiers)).operator in ('==', '==='))def from_path(self):if self.req is None:return Nones = str(self.req)if self.comes_from:if isinstance(self.comes_from, six.string_types):comes_from = self.comes_fromelse:comes_from = self.comes_from.from_path()if comes_from:s += '->' + comes_fromreturn sdef build_location(self, build_dir):if self._temp_build_dir is not None:return self._temp_build_dirif self.req is None:# for requirement via a path to a directory: the name of the# package is not available yet so we create a temp directory# Once run_egg_info will have run, we'll be able# to fix it via _correct_build_location# Some systems have /tmp as a symlink which confuses custom# builds (such as numpy). Thus, we ensure that the real path# is returned.self._temp_build_dir = os.path.realpath(tempfile.mkdtemp('-build', 'pip-'))self._ideal_build_dir = build_dirreturn self._temp_build_dirif self.editable:name = self.name.lower()else:name = self.name# FIXME: Is there a better place to create the build_dir? (hg and bzr# need this)if not os.path.exists(build_dir):logger.debug('Creating directory %s', build_dir)_make_build_dir(build_dir)return os.path.join(build_dir, name)def _correct_build_location(self):"""Move self._temp_build_dir to self._ideal_build_dir/self.req.nameFor some requirements (e.g. a path to a directory), the name of thepackage is not available until we run egg_info, so the build_locationwill return a temporary directory and store the _ideal_build_dir.This is only called by self.egg_info_path to fix the temporary builddirectory."""if self.source_dir is not None:returnassert self.req is not Noneassert self._temp_build_dirassert self._ideal_build_dirold_location = self._temp_build_dirself._temp_build_dir = Nonenew_location = self.build_location(self._ideal_build_dir)if os.path.exists(new_location):raise InstallationError('A package already exists in %s; please remove it to continue'% display_path(new_location))logger.debug('Moving package %s from %s to new location %s',self, display_path(old_location), display_path(new_location),)shutil.move(old_location, new_location)self._temp_build_dir = new_locationself._ideal_build_dir = Noneself.source_dir = new_locationself._egg_info_path = None@propertydef name(self):if self.req is None:return Nonereturn native_str(pkg_resources.safe_name(self.req.name))@propertydef setup_py_dir(self):return os.path.join(self.source_dir,self.link and self.link.subdirectory_fragment or '')@propertydef setup_py(self):assert self.source_dir, "No source dir for %s" % selftry:import setuptools # noqaexcept ImportError:if get_installed_version('setuptools') is None:add_msg = "Please install setuptools."else:add_msg = traceback.format_exc()# Setuptools is not availableraise InstallationError("Could not import setuptools which is required to ""install from a source distribution.\n%s" % add_msg)setup_py = os.path.join(self.setup_py_dir, 'setup.py')# Python2 __file__ should not be unicodeif six.PY2 and isinstance(setup_py, six.text_type):setup_py = setup_py.encode(sys.getfilesystemencoding())return setup_pydef run_egg_info(self):assert self.source_dirif self.name:logger.debug('Running setup.py (path:%s) egg_info for package %s',self.setup_py, self.name,)else:logger.debug('Running setup.py (path:%s) egg_info for package from %s',self.setup_py, self.link,)with indent_log():script = SETUPTOOLS_SHIM % self.setup_pybase_cmd = [sys.executable, '-c', script]if self.isolated:base_cmd += ["--no-user-cfg"]egg_info_cmd = base_cmd + ['egg_info']# We can't put the .egg-info files at the root, because then the# source code will be mistaken for an installed egg, causing# problemsif self.editable:egg_base_option = []else:egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')ensure_dir(egg_info_dir)egg_base_option = ['--egg-base', 'pip-egg-info']call_subprocess(egg_info_cmd + egg_base_option,cwd=self.setup_py_dir,show_stdout=False,command_desc='python setup.py egg_info')if not self.req:if isinstance(parse_version(self.pkg_info()["Version"]), Version):op = "=="else:op = "==="self.req = Requirement("".join([self.pkg_info()["Name"],op,self.pkg_info()["Version"],]))self._correct_build_location()else:metadata_name = canonicalize_name(self.pkg_info()["Name"])if canonicalize_name(self.req.name) != metadata_name:logger.warning('Running setup.py (path:%s) egg_info for package %s ''produced metadata for project name %s. Fix your ''#egg=%s fragments.',self.setup_py, self.name, metadata_name, self.name)self.req = Requirement(metadata_name)def egg_info_data(self, filename):if self.satisfied_by is not None:if not self.satisfied_by.has_metadata(filename):return Nonereturn self.satisfied_by.get_metadata(filename)assert self.source_dirfilename = self.egg_info_path(filename)if not os.path.exists(filename):return Nonedata = read_text_file(filename)return datadef egg_info_path(self, filename):if self._egg_info_path is None:if self.editable:base = self.source_direlse:base = os.path.join(self.setup_py_dir, 'pip-egg-info')filenames = os.listdir(base)if self.editable:filenames = []for root, dirs, files in os.walk(base):for dir in vcs.dirnames:if dir in dirs:dirs.remove(dir)# Iterate over a copy of ``dirs``, since mutating# a list while iterating over it can cause trouble.# (See https://github.com/pypa/pip/pull/462.)for dir in list(dirs):# Don't search in anything that looks like a virtualenv# environmentif (os.path.lexists(os.path.join(root, dir, 'bin', 'python')) oros.path.exists(os.path.join(root, dir, 'Scripts', 'Python.exe'))):dirs.remove(dir)# Also don't search through testselif dir == 'test' or dir == 'tests':dirs.remove(dir)filenames.extend([os.path.join(root, dir)for dir in dirs])filenames = [f for f in filenames if f.endswith('.egg-info')]if not filenames:raise InstallationError('No files/directories in %s (from %s)' % (base, filename))assert filenames, \"No files/directories in %s (from %s)" % (base, filename)# if we have more than one match, we pick the toplevel one. This# can easily be the case if there is a dist folder which contains# an extracted tarball for testing purposes.if len(filenames) > 1:filenames.sort(key=lambda x: x.count(os.path.sep) +(os.path.altsep and x.count(os.path.altsep) or 0))self._egg_info_path = os.path.join(base, filenames[0])return os.path.join(self._egg_info_path, filename)def pkg_info(self):p = FeedParser()data = self.egg_info_data('PKG-INFO')if not data:logger.warning('No PKG-INFO file found in %s',display_path(self.egg_info_path('PKG-INFO')),)p.feed(data or '')return p.close()_requirements_section_re = re.compile(r'\[(.*?)\]')@propertydef installed_version(self):return get_installed_version(self.name)def assert_source_matches_version(self):assert self.source_dirversion = self.pkg_info()['version']if self.req.specifier and version not in self.req.specifier:logger.warning('Requested %s, but installing version %s',self,self.installed_version,)else:logger.debug('Source in %s has version %s, which satisfies requirement %s',display_path(self.source_dir),version,self,)def update_editable(self, obtain=True):if not self.link:logger.debug("Cannot update repository at %s; repository location is ""unknown",self.source_dir,)returnassert self.editableassert self.source_dirif self.link.scheme == 'file':# Static paths don't get updatedreturnassert '+' in self.link.url, "bad url: %r" % self.link.urlif not self.update:returnvc_type, url = self.link.url.split('+', 1)backend = vcs.get_backend(vc_type)if backend:vcs_backend = backend(self.link.url)if obtain:vcs_backend.obtain(self.source_dir)else:vcs_backend.export(self.source_dir)else:assert 0, ('Unexpected version control type (in %s): %s'% (self.link, vc_type))def uninstall(self, auto_confirm=False):"""Uninstall the distribution currently satisfying this requirement.Prompts before removing or modifying files unless``auto_confirm`` is True.Refuses to delete or modify files outside of ``sys.prefix`` -thus uninstallation within a virtual environment can onlymodify that virtual environment, even if the virtualenv islinked to global site-packages."""if not self.check_if_exists():raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,))dist = self.satisfied_by or self.conflicts_withdist_path = normalize_path(dist.location)if not dist_is_local(dist):logger.info("Not uninstalling %s at %s, outside environment %s",dist.key,dist_path,sys.prefix,)self.nothing_to_uninstall = Truereturnif dist_path in get_stdlib():logger.info("Not uninstalling %s at %s, as it is in the standard library.",dist.key,dist_path,)self.nothing_to_uninstall = Truereturnpaths_to_remove = UninstallPathSet(dist)develop_egg_link = egg_link_path(dist)develop_egg_link_egg_info = '{0}.egg-info'.format(pkg_resources.to_filename(dist.project_name))egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)# Special case for distutils installed packagedistutils_egg_info = getattr(dist._provider, 'path', None)# Uninstall cases order do matter as in the case of 2 installs of the# same package, pip needs to uninstall the currently detected versionif (egg_info_exists and dist.egg_info.endswith('.egg-info') andnot dist.egg_info.endswith(develop_egg_link_egg_info)):# if dist.egg_info.endswith(develop_egg_link_egg_info), we# are in fact in the develop_egg_link casepaths_to_remove.add(dist.egg_info)if dist.has_metadata('installed-files.txt'):for installed_file in dist.get_metadata('installed-files.txt').splitlines():path = os.path.normpath(os.path.join(dist.egg_info, installed_file))paths_to_remove.add(path)# FIXME: need a test for this elif block# occurs with --single-version-externally-managed/--record outside# of pipelif dist.has_metadata('top_level.txt'):if dist.has_metadata('namespace_packages.txt'):namespaces = dist.get_metadata('namespace_packages.txt')else:namespaces = []for top_level_pkg in [p for pin dist.get_metadata('top_level.txt').splitlines()if p and p not in namespaces]:path = os.path.join(dist.location, top_level_pkg)paths_to_remove.add(path)paths_to_remove.add(path + '.py')paths_to_remove.add(path + '.pyc')paths_to_remove.add(path + '.pyo')elif distutils_egg_info:warnings.warn("Uninstalling a distutils installed project ({0}) has been ""deprecated and will be removed in a future version. This is ""due to the fact that uninstalling a distutils project will ""only partially uninstall the project.".format(self.name),RemovedInPip10Warning,)paths_to_remove.add(distutils_egg_info)elif dist.location.endswith('.egg'):# package installed by easy_install# We cannot match on dist.egg_name because it can slightly vary# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.eggpaths_to_remove.add(dist.location)easy_install_egg = os.path.split(dist.location)[1]easy_install_pth = os.path.join(os.path.dirname(dist.location),'easy-install.pth')paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)elif egg_info_exists and dist.egg_info.endswith('.dist-info'):for path in pip.wheel.uninstallation_paths(dist):paths_to_remove.add(path)elif develop_egg_link:# develop eggwith open(develop_egg_link, 'r') as fh:link_pointer = os.path.normcase(fh.readline().strip())assert (link_pointer == dist.location), ('Egg-link %s does not match installed location of %s ''(at %s)' % (link_pointer, self.name, dist.location))paths_to_remove.add(develop_egg_link)easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),'easy-install.pth')paths_to_remove.add_pth(easy_install_pth, dist.location)else:logger.debug('Not sure how to uninstall: %s - Check: %s',dist, dist.location)# find distutils scripts= scriptsif dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):for script in dist.metadata_listdir('scripts'):if dist_in_usersite(dist):bin_dir = bin_userelse:bin_dir = bin_pypaths_to_remove.add(os.path.join(bin_dir, script))if WINDOWS:paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')# find console_scriptsif dist.has_metadata('entry_points.txt'):if six.PY2:options = {}else:options = {"delimiters": ('=', )}config = configparser.SafeConfigParser(**options)config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt')))if config.has_section('console_scripts'):for name, value in config.items('console_scripts'):if dist_in_usersite(dist):bin_dir = bin_userelse:bin_dir = bin_pypaths_to_remove.add(os.path.join(bin_dir, name))if WINDOWS:paths_to_remove.add(os.path.join(bin_dir, name) + '.exe')paths_to_remove.add(os.path.join(bin_dir, name) + '.exe.manifest')paths_to_remove.add(os.path.join(bin_dir, name) + '-script.py')paths_to_remove.remove(auto_confirm)self.uninstalled = paths_to_removedef rollback_uninstall(self):if self.uninstalled:self.uninstalled.rollback()else:logger.error("Can't rollback %s, nothing uninstalled.", self.name,)def commit_uninstall(self):if self.uninstalled:self.uninstalled.commit()elif not self.nothing_to_uninstall:logger.error("Can't commit %s, nothing uninstalled.", self.name,)def archive(self, build_dir):assert self.source_dircreate_archive = Truearchive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"])archive_path = os.path.join(build_dir, archive_name)if os.path.exists(archive_path):response = ask_path_exists('The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %display_path(archive_path), ('i', 'w', 'b', 'a'))if response == 'i':create_archive = Falseelif response == 'w':logger.warning('Deleting %s', display_path(archive_path))os.remove(archive_path)elif response == 'b':dest_file = backup_dir(archive_path)logger.warning('Backing up %s to %s',display_path(archive_path),display_path(dest_file),)shutil.move(archive_path, dest_file)elif response == 'a':sys.exit(-1)if create_archive:zip = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED,allowZip64=True)dir = os.path.normcase(os.path.abspath(self.setup_py_dir))for dirpath, dirnames, filenames in os.walk(dir):if 'pip-egg-info' in dirnames:dirnames.remove('pip-egg-info')for dirname in dirnames:dirname = os.path.join(dirpath, dirname)name = self._clean_zip_name(dirname, dir)zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')zipdir.external_attr = 0x1ED << 16 # 0o755zip.writestr(zipdir, '')for filename in filenames:if filename == PIP_DELETE_MARKER_FILENAME:continuefilename = os.path.join(dirpath, filename)name = self._clean_zip_name(filename, dir)zip.write(filename, self.name + '/' + name)zip.close()logger.info('Saved %s', display_path(archive_path))def _clean_zip_name(self, name, prefix):assert name.startswith(prefix + os.path.sep), ("name %r doesn't start with prefix %r" % (name, prefix))name = name[len(prefix) + 1:]name = name.replace(os.path.sep, '/')return namedef match_markers(self, extras_requested=None):if not extras_requested:# Provide an extra to safely evaluate the markers# without matching any extraextras_requested = ('',)if self.markers is not None:return any(self.markers.evaluate({'extra': extra})for extra in extras_requested)else:return Truedef install(self, install_options, global_options=[], root=None,prefix=None):if self.editable:self.install_editable(install_options, global_options, prefix=prefix)returnif self.is_wheel:version = pip.wheel.wheel_version(self.source_dir)pip.wheel.check_compatibility(version, self.name)self.move_wheel_files(self.source_dir, root=root, prefix=prefix)self.install_succeeded = Truereturn# Extend the list of global and install options passed on to# the setup.py call with the ones from the requirements file.# Options specified in requirements file override those# specified on the command line, since the last option given# to setup.py is the one that is used.global_options += self.options.get('global_options', [])install_options += self.options.get('install_options', [])if self.isolated:global_options = list(global_options) + ["--no-user-cfg"]temp_location = tempfile.mkdtemp('-record', 'pip-')record_filename = os.path.join(temp_location, 'install-record.txt')try:install_args = self.get_install_args(global_options, record_filename, root, prefix)msg = 'Running setup.py install for %s' % (self.name,)with open_spinner(msg) as spinner:with indent_log():call_subprocess(install_args + install_options,cwd=self.setup_py_dir,show_stdout=False,spinner=spinner,)if not os.path.exists(record_filename):logger.debug('Record file %s not found', record_filename)returnself.install_succeeded = Trueif self.as_egg:# there's no --always-unzip option we can pass to install# command so we unable to save the installed-files.txtreturndef prepend_root(path):if root is None or not os.path.isabs(path):return pathelse:return change_root(root, path)with open(record_filename) as f:for line in f:directory = os.path.dirname(line)if directory.endswith('.egg-info'):egg_info_dir = prepend_root(directory)breakelse:logger.warning('Could not find .egg-info directory in install record'' for %s',self,)# FIXME: put the record somewhere# FIXME: should this be an error?returnnew_lines = []with open(record_filename) as f:for line in f:filename = line.strip()if os.path.isdir(filename):filename += os.path.sepnew_lines.append(os.path.relpath(prepend_root(filename), egg_info_dir))inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')with open(inst_files_path, 'w') as f:f.write('\n'.join(new_lines) + '\n')finally:if os.path.exists(record_filename):os.remove(record_filename)rmtree(temp_location)def ensure_has_source_dir(self, parent_dir):"""Ensure that a source_dir is set.This will create a temporary build dir if the name of the requirementisn't known yet.:param parent_dir: The ideal pip parent_dir for the source_dir.Generally src_dir for editables and build_dir for sdists.:return: self.source_dir"""if self.source_dir is None:self.source_dir = self.build_location(parent_dir)return self.source_dirdef get_install_args(self, global_options, record_filename, root, prefix):install_args = [sys.executable, "-u"]install_args.append('-c')install_args.append(SETUPTOOLS_SHIM % self.setup_py)install_args += list(global_options) + \['install', '--record', record_filename]if not self.as_egg:install_args += ['--single-version-externally-managed']if root is not None:install_args += ['--root', root]if prefix is not None:install_args += ['--prefix', prefix]if self.pycompile:install_args += ["--compile"]else:install_args += ["--no-compile"]if running_under_virtualenv():py_ver_str = 'python' + sysconfig.get_python_version()install_args += ['--install-headers',os.path.join(sys.prefix, 'include', 'site',py_ver_str, self.name)]return install_argsdef remove_temporary_source(self):"""Remove the source files from this requirement, if they are markedfor deletion"""if self.source_dir and os.path.exists(os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):logger.debug('Removing source in %s', self.source_dir)rmtree(self.source_dir)self.source_dir = Noneif self._temp_build_dir and os.path.exists(self._temp_build_dir):rmtree(self._temp_build_dir)self._temp_build_dir = Nonedef install_editable(self, install_options,global_options=(), prefix=None):logger.info('Running setup.py develop for %s', self.name)if self.isolated:global_options = list(global_options) + ["--no-user-cfg"]if prefix:prefix_param = ['--prefix={0}'.format(prefix)]install_options = list(install_options) + prefix_paramwith indent_log():# FIXME: should we do --install-headers here too?call_subprocess([sys.executable,'-c',SETUPTOOLS_SHIM % self.setup_py] +list(global_options) +['develop', '--no-deps'] +list(install_options),cwd=self.setup_py_dir,show_stdout=False)self.install_succeeded = Truedef check_if_exists(self):"""Find an installed distribution that satisfies or conflictswith this requirement, and set self.satisfied_by orself.conflicts_with appropriately."""if self.req is None:return Falsetry:# get_distribution() will resolve the entire list of requirements# anyway, and we've already determined that we need the requirement# in question, so strip the marker so that we don't try to# evaluate it.no_marker = Requirement(str(self.req))no_marker.marker = Noneself.satisfied_by = pkg_resources.get_distribution(str(no_marker))if self.editable and self.satisfied_by:self.conflicts_with = self.satisfied_by# when installing editables, nothing pre-existing should ever# satisfyself.satisfied_by = Nonereturn Trueexcept pkg_resources.DistributionNotFound:return Falseexcept pkg_resources.VersionConflict:existing_dist = pkg_resources.get_distribution(self.req.name)if self.use_user_site:if dist_in_usersite(existing_dist):self.conflicts_with = existing_distelif (running_under_virtualenv() anddist_in_site_packages(existing_dist)):raise InstallationError("Will not install to the user site because it will ""lack sys.path precedence to %s in %s" %(existing_dist.project_name, existing_dist.location))else:self.conflicts_with = existing_distreturn True@propertydef is_wheel(self):return self.link and self.link.is_wheeldef move_wheel_files(self, wheeldir, root=None, prefix=None):move_wheel_files(self.name, self.req, wheeldir,user=self.use_user_site,home=self.target_dir,root=root,prefix=prefix,pycompile=self.pycompile,isolated=self.isolated,)def get_dist(self):"""Return a pkg_resources.Distribution built from self.egg_info_path"""egg_info = self.egg_info_path('').rstrip('/')base_dir = os.path.dirname(egg_info)metadata = pkg_resources.PathMetadata(base_dir, egg_info)dist_name = os.path.splitext(os.path.basename(egg_info))[0]return pkg_resources.Distribution(os.path.dirname(egg_info),project_name=dist_name,metadata=metadata)@propertydef has_hash_options(self):"""Return whether any known-good hashes are specified as options.These activate --require-hashes mode; hashes specified as part of aURL do not."""return bool(self.options.get('hashes', {}))def hashes(self, trust_internet=True):"""Return a hash-comparer that considers my option- and URL-basedhashes to be known-good.Hashes in URLs--ones embedded in the requirements file, not onesdownloaded from an index server--are almost peers with ones fromflags. They satisfy --require-hashes (whether it was implicitly orexplicitly activated) but do not activate it. md5 and sha224 are notallowed in flags, which should nudge people toward good algos. Wealways OR all hashes together, even ones from URLs.:param trust_internet: Whether to trust URL-based (#md5=...) hashesdownloaded from the internet, as by populate_link()"""good_hashes = self.options.get('hashes', {}).copy()link = self.link if trust_internet else self.original_linkif link and link.hash:good_hashes.setdefault(link.hash_name, []).append(link.hash)return Hashes(good_hashes)def _strip_postfix(req):"""Strip req postfix ( -dev, 0.2, etc )"""# FIXME: use package_to_requirement?match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req)if match:# Strip off -dev, -0.2, etc.req = match.group(1)return reqdef parse_editable(editable_req, default_vcs=None):"""Parses an editable requirement into:- a requirement name- an URL- extras- editable optionsAccepted requirements:svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir.[some_extra]"""from pip.index import Linkurl = editable_reqextras = None# If a file path is specified with extras, strip off the extras.m = re.match(r'^(.+)(\[[^\]]+\])$', url)if m:url_no_extras = m.group(1)extras = m.group(2)else:url_no_extras = urlif os.path.isdir(url_no_extras):if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):raise InstallationError("Directory %r is not installable. File 'setup.py' not found." %url_no_extras)# Treating it as code that has already been checked outurl_no_extras = path_to_url(url_no_extras)if url_no_extras.lower().startswith('file:'):package_name = Link(url_no_extras).egg_fragmentif extras:return (package_name,url_no_extras,Requirement("placeholder" + extras.lower()).extras,)else:return package_name, url_no_extras, Nonefor version_control in vcs:if url.lower().startswith('%s:' % version_control):url = '%s+%s' % (version_control, url)breakif '+' not in url:if default_vcs:warnings.warn("--default-vcs has been deprecated and will be removed in ""the future.",RemovedInPip10Warning,)url = default_vcs + '+' + urlelse:raise InstallationError('%s should either be a path to a local project or a VCS url ''beginning with svn+, git+, hg+, or bzr+' %editable_req)vc_type = url.split('+', 1)[0].lower()if not vcs.get_backend(vc_type):error_message = 'For --editable=%s only ' % editable_req + \', '.join([backend.name + '+URL' for backend in vcs.backends]) + \' is currently supported'raise InstallationError(error_message)package_name = Link(url).egg_fragmentif not package_name:raise InstallationError("Could not detect requirement name, please specify one with #egg=")if not package_name:raise InstallationError('--editable=%s is not the right format; it must have ''#egg=Package' % editable_req)return _strip_postfix(package_name), url, None
"""Requirements file parsing"""from __future__ import absolute_importimport osimport reimport shleximport sysimport optparseimport warningsfrom pip._vendor.six.moves.urllib import parse as urllib_parsefrom pip._vendor.six.moves import filterfalseimport pipfrom pip.download import get_file_contentfrom pip.req.req_install import InstallRequirementfrom pip.exceptions import (RequirementsFileParseError)from pip.utils.deprecation import RemovedInPip10Warningfrom pip import cmdoptions__all__ = ['parse_requirements']SCHEME_RE = re.compile(r'^(http|https|file):', re.I)COMMENT_RE = re.compile(r'(^|\s)+#.*$')SUPPORTED_OPTIONS = [cmdoptions.constraints,cmdoptions.editable,cmdoptions.requirements,cmdoptions.no_index,cmdoptions.index_url,cmdoptions.find_links,cmdoptions.extra_index_url,cmdoptions.allow_external,cmdoptions.allow_all_external,cmdoptions.no_allow_external,cmdoptions.allow_unsafe,cmdoptions.no_allow_unsafe,cmdoptions.use_wheel,cmdoptions.no_use_wheel,cmdoptions.always_unzip,cmdoptions.no_binary,cmdoptions.only_binary,cmdoptions.pre,cmdoptions.process_dependency_links,cmdoptions.trusted_host,cmdoptions.require_hashes,]# options to be passed to requirementsSUPPORTED_OPTIONS_REQ = [cmdoptions.install_options,cmdoptions.global_options,cmdoptions.hash,]# the 'dest' string valuesSUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ]def parse_requirements(filename, finder=None, comes_from=None, options=None,session=None, constraint=False, wheel_cache=None):"""Parse a requirements file and yield InstallRequirement instances.:param filename: Path or url of requirements file.:param finder: Instance of pip.index.PackageFinder.:param comes_from: Origin description of requirements.:param options: cli options.:param session: Instance of pip.download.PipSession.:param constraint: If true, parsing a constraint file rather thanrequirements file.:param wheel_cache: Instance of pip.wheel.WheelCache"""if session is None:raise TypeError("parse_requirements() missing 1 required keyword argument: ""'session'")_, content = get_file_content(filename, comes_from=comes_from, session=session)lines_enum = preprocess(content, options)for line_number, line in lines_enum:req_iter = process_line(line, filename, line_number, finder,comes_from, options, session, wheel_cache,constraint=constraint)for req in req_iter:yield reqdef preprocess(content, options):"""Split, filter, and join lines, and return a line iterator:param content: the content of the requirements file:param options: cli options"""lines_enum = enumerate(content.splitlines(), start=1)lines_enum = join_lines(lines_enum)lines_enum = ignore_comments(lines_enum)lines_enum = skip_regex(lines_enum, options)return lines_enumdef process_line(line, filename, line_number, finder=None, comes_from=None,options=None, session=None, wheel_cache=None,constraint=False):"""Process a single requirements line; This can result in creating/yieldingrequirements, or updating the finder.For lines that contain requirements, the only options that have an effectare from SUPPORTED_OPTIONS_REQ, and they are scoped to therequirement. Other options from SUPPORTED_OPTIONS may be present, but areignored.For lines that do not contain requirements, the only options that have aneffect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ maybe present, but are ignored. These lines may contain multiple options(although our docs imply only one is supported), and all our parsed andaffect the finder.:param constraint: If True, parsing a constraints file.:param options: OptionParser options that we may update"""parser = build_parser()defaults = parser.get_default_values()defaults.index_url = Noneif finder:# `finder.format_control` will be updated during parsingdefaults.format_control = finder.format_controlargs_str, options_str = break_args_options(line)if sys.version_info < (2, 7, 3):# Prior to 2.7.3, shlex cannot deal with unicode entriesoptions_str = options_str.encode('utf8')opts, _ = parser.parse_args(shlex.split(options_str), defaults)# preserve for the nested code pathline_comes_from = '%s %s (line %s)' % ('-c' if constraint else '-r', filename, line_number)# yield a line requirementif args_str:isolated = options.isolated_mode if options else Falseif options:cmdoptions.check_install_build_global(options, opts)# get the options that apply to requirementsreq_options = {}for dest in SUPPORTED_OPTIONS_REQ_DEST:if dest in opts.__dict__ and opts.__dict__[dest]:req_options[dest] = opts.__dict__[dest]yield InstallRequirement.from_line(args_str, line_comes_from, constraint=constraint,isolated=isolated, options=req_options, wheel_cache=wheel_cache)# yield an editable requirementelif opts.editables:isolated = options.isolated_mode if options else Falsedefault_vcs = options.default_vcs if options else Noneyield InstallRequirement.from_editable(opts.editables[0], comes_from=line_comes_from,constraint=constraint, default_vcs=default_vcs, isolated=isolated,wheel_cache=wheel_cache)# parse a nested requirements fileelif opts.requirements or opts.constraints:if opts.requirements:req_path = opts.requirements[0]nested_constraint = Falseelse:req_path = opts.constraints[0]nested_constraint = True# original file is over httpif SCHEME_RE.search(filename):# do a url join so relative paths workreq_path = urllib_parse.urljoin(filename, req_path)# original file and nested file are pathselif not SCHEME_RE.search(req_path):# do a join so relative paths workreq_path = os.path.join(os.path.dirname(filename), req_path)# TODO: Why not use `comes_from='-r {} (line {})'` here as well?parser = parse_requirements(req_path, finder, comes_from, options, session,constraint=nested_constraint, wheel_cache=wheel_cache)for req in parser:yield req# percolate hash-checking option upwardelif opts.require_hashes:options.require_hashes = opts.require_hashes# set finder optionselif finder:if opts.allow_external:warnings.warn("--allow-external has been deprecated and will be removed in ""the future. Due to changes in the repository protocol, it no ""longer has any effect.",RemovedInPip10Warning,)if opts.allow_all_external:warnings.warn("--allow-all-external has been deprecated and will be removed ""in the future. Due to changes in the repository protocol, it ""no longer has any effect.",RemovedInPip10Warning,)if opts.allow_unverified:warnings.warn("--allow-unverified has been deprecated and will be removed ""in the future. Due to changes in the repository protocol, it ""no longer has any effect.",RemovedInPip10Warning,)if opts.index_url:finder.index_urls = [opts.index_url]if opts.use_wheel is False:finder.use_wheel = Falsepip.index.fmt_ctl_no_use_wheel(finder.format_control)if opts.no_index is True:finder.index_urls = []if opts.extra_index_urls:finder.index_urls.extend(opts.extra_index_urls)if opts.find_links:# FIXME: it would be nice to keep track of the source# of the find_links: support a find-links local path# relative to a requirements file.value = opts.find_links[0]req_dir = os.path.dirname(os.path.abspath(filename))relative_to_reqs_file = os.path.join(req_dir, value)if os.path.exists(relative_to_reqs_file):value = relative_to_reqs_filefinder.find_links.append(value)if opts.pre:finder.allow_all_prereleases = Trueif opts.process_dependency_links:finder.process_dependency_links = Trueif opts.trusted_hosts:finder.secure_origins.extend(("*", host, "*") for host in opts.trusted_hosts)def break_args_options(line):"""Break up the line into an args and options string. We only want to shlex(and then optparse) the options, not the args. args can contain markerswhich are corrupted by shlex."""tokens = line.split(' ')args = []options = tokens[:]for token in tokens:if token.startswith('-') or token.startswith('--'):breakelse:args.append(token)options.pop(0)return ' '.join(args), ' '.join(options)def build_parser():"""Return a parser for parsing requirement lines"""parser = optparse.OptionParser(add_help_option=False)option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQfor option_factory in option_factories:option = option_factory()parser.add_option(option)# By default optparse sys.exits on parsing errors. We want to wrap# that in our own exception.def parser_exit(self, msg):raise RequirementsFileParseError(msg)parser.exit = parser_exitreturn parserdef join_lines(lines_enum):"""Joins a line ending in '\' with the previous line (except when followingcomments). The joined line takes on the index of the first line."""primary_line_number = Nonenew_line = []for line_number, line in lines_enum:if not line.endswith('\\') or COMMENT_RE.match(line):if COMMENT_RE.match(line):# this ensures comments are always matched laterline = ' ' + lineif new_line:new_line.append(line)yield primary_line_number, ''.join(new_line)new_line = []else:yield line_number, lineelse:if not new_line:primary_line_number = line_numbernew_line.append(line.strip('\\'))# last line contains \if new_line:yield primary_line_number, ''.join(new_line)# TODO: handle space after '\'.def ignore_comments(lines_enum):"""Strips comments and filter empty lines."""for line_number, line in lines_enum:line = COMMENT_RE.sub('', line)line = line.strip()if line:yield line_number, linedef skip_regex(lines_enum, options):"""Skip lines that match '--skip-requirements-regex' patternNote: the regex pattern is only built once"""skip_regex = options.skip_requirements_regex if options else Noneif skip_regex:pattern = re.compile(skip_regex)lines_enum = filterfalse(lambda e: pattern.search(e[1]),lines_enum)return lines_enum
from __future__ import absolute_importfrom .req_install import InstallRequirementfrom .req_set import RequirementSet, Requirementsfrom .req_file import parse_requirements__all__ = ["RequirementSet", "Requirements", "InstallRequirement","parse_requirements",]
"""Generate and work with PEP 425 Compatibility Tags."""from __future__ import absolute_importimport reimport sysimport warningsimport platformimport loggingtry:import sysconfigexcept ImportError: # pragma nocover# Python < 2.7import distutils.sysconfig as sysconfigimport distutils.utilfrom pip.compat import OrderedDictimport pip.utils.glibclogger = logging.getLogger(__name__)_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')def get_config_var(var):try:return sysconfig.get_config_var(var)except IOError as e: # Issue #1074warnings.warn("{0}".format(e), RuntimeWarning)return Nonedef get_abbr_impl():"""Return abbreviated implementation name."""if hasattr(sys, 'pypy_version_info'):pyimpl = 'pp'elif sys.platform.startswith('java'):pyimpl = 'jy'elif sys.platform == 'cli':pyimpl = 'ip'else:pyimpl = 'cp'return pyimpldef get_impl_ver():"""Return implementation version."""impl_ver = get_config_var("py_version_nodot")if not impl_ver or get_abbr_impl() == 'pp':impl_ver = ''.join(map(str, get_impl_version_info()))return impl_verdef get_impl_version_info():"""Return sys.version_info-like tuple for use in decrementing the minorversion."""if get_abbr_impl() == 'pp':# as per https://github.com/pypa/pip/issues/2882return (sys.version_info[0], sys.pypy_version_info.major,sys.pypy_version_info.minor)else:return sys.version_info[0], sys.version_info[1]def get_impl_tag():"""Returns the Tag for this specific implementation."""return "{0}{1}".format(get_abbr_impl(), get_impl_ver())def get_flag(var, fallback, expected=True, warn=True):"""Use a fallback method for determining SOABI flags if the needed configvar is unset or unavailable."""val = get_config_var(var)if val is None:if warn:logger.debug("Config variable '%s' is unset, Python ABI tag may ""be incorrect", var)return fallback()return val == expecteddef get_abi_tag():"""Return the ABI tag based on SOABI (if available) or emulate SOABI(CPython 2, PyPy)."""soabi = get_config_var('SOABI')impl = get_abbr_impl()if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'):d = ''m = ''u = ''if get_flag('Py_DEBUG',lambda: hasattr(sys, 'gettotalrefcount'),warn=(impl == 'cp')):d = 'd'if get_flag('WITH_PYMALLOC',lambda: impl == 'cp',warn=(impl == 'cp')):m = 'm'if get_flag('Py_UNICODE_SIZE',lambda: sys.maxunicode == 0x10ffff,expected=4,warn=(impl == 'cp' andsys.version_info < (3, 3))) \and sys.version_info < (3, 3):u = 'u'abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)elif soabi and soabi.startswith('cpython-'):abi = 'cp' + soabi.split('-')[1]elif soabi:abi = soabi.replace('.', '_').replace('-', '_')else:abi = Nonereturn abidef _is_running_32bit():return sys.maxsize == 2147483647def get_platform():"""Return our platform name 'win32', 'linux_x86_64'"""if sys.platform == 'darwin':# distutils.util.get_platform() returns the release based on the value# of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may# be significantly older than the user's current machine.release, _, machine = platform.mac_ver()split_ver = release.split('.')if machine == "x86_64" and _is_running_32bit():machine = "i386"elif machine == "ppc64" and _is_running_32bit():machine = "ppc"return 'macosx_{0}_{1}_{2}'.format(split_ver[0], split_ver[1], machine)# XXX remove distutils dependencyresult = distutils.util.get_platform().replace('.', '_').replace('-', '_')if result == "linux_x86_64" and _is_running_32bit():# 32 bit Python program (running on a 64 bit Linux): pip should only# install and run 32 bit compiled extensions in that case.result = "linux_i686"return resultdef is_manylinux1_compatible():# Only Linux, and only x86-64 / i686if get_platform() not in ("linux_x86_64", "linux_i686"):return False# Check for presence of _manylinux moduletry:import _manylinuxreturn bool(_manylinux.manylinux1_compatible)except (ImportError, AttributeError):# Fall through to heuristic check belowpass# Check glibc version. CentOS 5 uses glibc 2.5.return pip.utils.glibc.have_compatible_glibc(2, 5)def get_darwin_arches(major, minor, machine):"""Return a list of supported arches (including group arches) forthe given major, minor and machine architecture of an macOS machine."""arches = []def _supports_arch(major, minor, arch):# Looking at the application support for macOS versions in the chart# provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears# our timeline looks roughly like:## 10.0 - Introduces ppc support.# 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64# and x86_64 support is CLI only, and cannot be used for GUI# applications.# 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.# 10.6 - Drops support for ppc64# 10.7 - Drops support for ppc## Given that we do not know if we're installing a CLI or a GUI# application, we must be conservative and assume it might be a GUI# application and behave as if ppc64 and x86_64 support did not occur# until 10.5.## Note: The above information is taken from the "Application support"# column in the chart not the "Processor support" since I believe# that we care about what instruction sets an application can use# not which processors the OS supports.if arch == 'ppc':return (major, minor) <= (10, 5)if arch == 'ppc64':return (major, minor) == (10, 5)if arch == 'i386':return (major, minor) >= (10, 4)if arch == 'x86_64':return (major, minor) >= (10, 5)if arch in groups:for garch in groups[arch]:if _supports_arch(major, minor, garch):return Truereturn Falsegroups = OrderedDict([("fat", ("i386", "ppc")),("intel", ("x86_64", "i386")),("fat64", ("x86_64", "ppc64")),("fat32", ("x86_64", "i386", "ppc")),])if _supports_arch(major, minor, machine):arches.append(machine)for garch in groups:if machine in groups[garch] and _supports_arch(major, minor, garch):arches.append(garch)arches.append('universal')return archesdef get_supported(versions=None, noarch=False, platform=None,impl=None, abi=None):"""Return a list of supported tags for each version specified in`versions`.:param versions: a list of string versions, of the form ["33", "32"],or None. The first version will be assumed to support our ABI.:param platform: specify the exact platform you want validtags for, or None. If None, use the local system platform.:param impl: specify the exact implementation you want validtags for, or None. If None, use the local interpreter impl.:param abi: specify the exact abi you want validtags for, or None. If None, use the local interpreter abi."""supported = []# Versions must be given with respect to the preferenceif versions is None:versions = []version_info = get_impl_version_info()major = version_info[:-1]# Support all previous minor Python versions.for minor in range(version_info[-1], -1, -1):versions.append(''.join(map(str, major + (minor,))))impl = impl or get_abbr_impl()abis = []abi = abi or get_abi_tag()if abi:abis[0:0] = [abi]abi3s = set()import impfor suffix in imp.get_suffixes():if suffix[0].startswith('.abi'):abi3s.add(suffix[0].split('.', 2)[1])abis.extend(sorted(list(abi3s)))abis.append('none')if not noarch:arch = platform or get_platform()if arch.startswith('macosx'):# support macosx-10.6-intel on macosx-10.9-x86_64match = _osx_arch_pat.match(arch)if match:name, major, minor, actual_arch = match.groups()tpl = '{0}_{1}_%i_%s'.format(name, major)arches = []for m in reversed(range(int(minor) + 1)):for a in get_darwin_arches(int(major), m, actual_arch):arches.append(tpl % (m, a))else:# arch pattern didn't match (?!)arches = [arch]elif platform is None and is_manylinux1_compatible():arches = [arch.replace('linux', 'manylinux1'), arch]else:arches = [arch]# Current version, current API (built specifically for our Python):for abi in abis:for arch in arches:supported.append(('%s%s' % (impl, versions[0]), abi, arch))# abi3 modules compatible with older version of Pythonfor version in versions[1:]:# abi3 was introduced in Python 3.2if version in ('31', '30'):breakfor abi in abi3s: # empty set if not Python 3for arch in arches:supported.append(("%s%s" % (impl, version), abi, arch))# Has binaries, does not use the Python API:for arch in arches:supported.append(('py%s' % (versions[0][0]), 'none', arch))# No abi / arch, but requires our implementation:supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))# Tagged specifically as being cross-version compatible# (with just the major version specified)supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))# No abi / arch, generic Pythonfor i, version in enumerate(versions):supported.append(('py%s' % (version,), 'none', 'any'))if i == 0:supported.append(('py%s' % (version[0]), 'none', 'any'))return supportedsupported_tags = get_supported()supported_tags_noarch = get_supported(noarch=True)implementation_tag = get_impl_tag()
from __future__ import absolute_importimport loggingimport reimport pipfrom pip.req import InstallRequirementfrom pip.req.req_file import COMMENT_REfrom pip.utils import get_installed_distributionsfrom pip._vendor import pkg_resourcesfrom pip._vendor.packaging.utils import canonicalize_namefrom pip._vendor.pkg_resources import RequirementParseErrorlogger = logging.getLogger(__name__)def freeze(requirement=None,find_links=None, local_only=None, user_only=None, skip_regex=None,default_vcs=None,isolated=False,wheel_cache=None,skip=()):find_links = find_links or []skip_match = Noneif skip_regex:skip_match = re.compile(skip_regex).searchdependency_links = []for dist in pkg_resources.working_set:if dist.has_metadata('dependency_links.txt'):dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))for link in find_links:if '#egg=' in link:dependency_links.append(link)for link in find_links:yield '-f %s' % linkinstallations = {}for dist in get_installed_distributions(local_only=local_only,skip=(),user_only=user_only):try:req = pip.FrozenRequirement.from_dist(dist,dependency_links)except RequirementParseError:logger.warning("Could not parse requirement: %s",dist.project_name)continueinstallations[req.name] = reqif requirement:# the options that don't get turned into an InstallRequirement# should only be emitted once, even if the same option is in multiple# requirements files, so we need to keep track of what has been emitted# so that we don't emit it again if it's seen againemitted_options = set()for req_file_path in requirement:with open(req_file_path) as req_file:for line in req_file:if (not line.strip() orline.strip().startswith('#') or(skip_match and skip_match(line)) orline.startswith(('-r', '--requirement','-Z', '--always-unzip','-f', '--find-links','-i', '--index-url','--pre','--trusted-host','--process-dependency-links','--extra-index-url'))):line = line.rstrip()if line not in emitted_options:emitted_options.add(line)yield linecontinueif line.startswith('-e') or line.startswith('--editable'):if line.startswith('-e'):line = line[2:].strip()else:line = line[len('--editable'):].strip().lstrip('=')line_req = InstallRequirement.from_editable(line,default_vcs=default_vcs,isolated=isolated,wheel_cache=wheel_cache,)else:line_req = InstallRequirement.from_line(COMMENT_RE.sub('', line).strip(),isolated=isolated,wheel_cache=wheel_cache,)if not line_req.name:logger.info("Skipping line in requirement file [%s] because ""it's not clear what it would install: %s",req_file_path, line.strip(),)logger.info(" (add #egg=PackageName to the URL to avoid"" this warning)")elif line_req.name not in installations:logger.warning("Requirement file [%s] contains %s, but that ""package is not installed",req_file_path, COMMENT_RE.sub('', line).strip(),)else:yield str(installations[line_req.name]).rstrip()del installations[line_req.name]yield('## The following requirements were added by ''pip freeze:')for installation in sorted(installations.values(), key=lambda x: x.name.lower()):if canonicalize_name(installation.name) not in skip:yield str(installation).rstrip()
def check_requirements(installed_dists):missing_reqs_dict = {}incompatible_reqs_dict = {}for dist in installed_dists:key = '%s==%s' % (dist.project_name, dist.version)missing_reqs = list(get_missing_reqs(dist, installed_dists))if missing_reqs:missing_reqs_dict[key] = missing_reqsincompatible_reqs = list(get_incompatible_reqs(dist, installed_dists))if incompatible_reqs:incompatible_reqs_dict[key] = incompatible_reqsreturn (missing_reqs_dict, incompatible_reqs_dict)def get_missing_reqs(dist, installed_dists):"""Return all of the requirements of `dist` that aren't present in`installed_dists`."""installed_names = set(d.project_name.lower() for d in installed_dists)missing_requirements = set()for requirement in dist.requires():if requirement.project_name.lower() not in installed_names:missing_requirements.add(requirement)yield requirementdef get_incompatible_reqs(dist, installed_dists):"""Return all of the requirements of `dist` that are present in`installed_dists`, but have incompatible versions."""installed_dists_by_name = {}for installed_dist in installed_dists:installed_dists_by_name[installed_dist.project_name] = installed_distfor requirement in dist.requires():present_dist = installed_dists_by_name.get(requirement.project_name)if present_dist and present_dist not in requirement:yield (requirement, present_dist)
from pip._vendor.six.moves.urllib import parse as urllib_parseclass Index(object):def __init__(self, url):self.url = urlself.netloc = urllib_parse.urlsplit(url).netlocself.simple_url = self.url_to_path('simple')self.pypi_url = self.url_to_path('pypi')self.pip_json_url = self.url_to_path('pypi/pip/json')def url_to_path(self, path):return urllib_parse.urljoin(self.url, path)PyPI = Index('https://pypi.python.org/')
from pip.models.index import Index, PyPI__all__ = ["Index", "PyPI"]
"""Locations where we look for configs, install stuff, etc"""from __future__ import absolute_importimport osimport os.pathimport siteimport sysfrom distutils import sysconfigfrom distutils.command.install import install, SCHEME_KEYS # noqafrom pip.compat import WINDOWS, expanduserfrom pip.utils import appdirs# Application DirectoriesUSER_CACHE_DIR = appdirs.user_cache_dir("pip")DELETE_MARKER_MESSAGE = '''\This file is placed here by pip to indicate the source was puthere by pip.Once this package is successfully installed this source code will bedeleted (unless you remove this file).'''PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'def write_delete_marker_file(directory):"""Write the pip delete marker file into this directory."""filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)with open(filepath, 'w') as marker_fp:marker_fp.write(DELETE_MARKER_MESSAGE)def running_under_virtualenv():"""Return True if we're running inside a virtualenv, False otherwise."""if hasattr(sys, 'real_prefix'):return Trueelif sys.prefix != getattr(sys, "base_prefix", sys.prefix):return Truereturn Falsedef virtualenv_no_global():"""Return True if in a venv and no system site packages."""# this mirrors the logic in virtualenv.py for locating the# no-global-site-packages.txt filesite_mod_dir = os.path.dirname(os.path.abspath(site.__file__))no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')if running_under_virtualenv() and os.path.isfile(no_global_file):return Trueif running_under_virtualenv():src_prefix = os.path.join(sys.prefix, 'src')else:# FIXME: keep src in cwd for now (it is not a temporary folder)try:src_prefix = os.path.join(os.getcwd(), 'src')except OSError:# In case the current working directory has been renamed or deletedsys.exit("The folder you are executing pip from can no longer be found.")# under macOS + virtualenv sys.prefix is not properly resolved# it is something like /path/to/python/bin/..# Note: using realpath due to tmp dirs on OSX being symlinkssrc_prefix = os.path.abspath(src_prefix)# FIXME doesn't account for venv linked to global site-packagessite_packages = sysconfig.get_python_lib()user_site = site.USER_SITEuser_dir = expanduser('~')if WINDOWS:bin_py = os.path.join(sys.prefix, 'Scripts')bin_user = os.path.join(user_site, 'Scripts')# buildout uses 'bin' on Windows too?if not os.path.exists(bin_py):bin_py = os.path.join(sys.prefix, 'bin')bin_user = os.path.join(user_site, 'bin')config_basename = 'pip.ini'legacy_storage_dir = os.path.join(user_dir, 'pip')legacy_config_file = os.path.join(legacy_storage_dir,config_basename,)else:bin_py = os.path.join(sys.prefix, 'bin')bin_user = os.path.join(user_site, 'bin')config_basename = 'pip.conf'legacy_storage_dir = os.path.join(user_dir, '.pip')legacy_config_file = os.path.join(legacy_storage_dir,config_basename,)# Forcing to use /usr/local/bin for standard macOS framework installs# Also log to ~/Library/Logs/ for use with the Console.app log viewerif sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':bin_py = '/usr/local/bin'site_config_files = [os.path.join(path, config_basename)for path in appdirs.site_config_dirs('pip')]def distutils_scheme(dist_name, user=False, home=None, root=None,isolated=False, prefix=None):"""Return a distutils install scheme"""from distutils.dist import Distributionscheme = {}if isolated:extra_dist_args = {"script_args": ["--no-user-cfg"]}else:extra_dist_args = {}dist_args = {'name': dist_name}dist_args.update(extra_dist_args)d = Distribution(dist_args)d.parse_config_files()i = d.get_command_obj('install', create=True)# NOTE: setting user or home has the side-effect of creating the home dir# or user base for installations during finalize_options()# ideally, we'd prefer a scheme class that has no side-effects.assert not (user and prefix), "user={0} prefix={1}".format(user, prefix)i.user = user or i.userif user:i.prefix = ""i.prefix = prefix or i.prefixi.home = home or i.homei.root = root or i.rooti.finalize_options()for key in SCHEME_KEYS:scheme[key] = getattr(i, 'install_' + key)# install_lib specified in setup.cfg should install *everything*# into there (i.e. it takes precedence over both purelib and# platlib). Note, i.install_lib is *always* set after# finalize_options(); we only want to override here if the user# has explicitly requested it hence going back to the configif 'install_lib' in d.get_option_dict('install'):scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))if running_under_virtualenv():scheme['headers'] = os.path.join(sys.prefix,'include','site','python' + sys.version[:3],dist_name,)if root is not None:path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]scheme["headers"] = os.path.join(root,path_no_drive[1:],)return scheme
"""Routines related to PyPI, indexes"""from __future__ import absolute_importimport loggingimport cgifrom collections import namedtupleimport itertoolsimport sysimport osimport reimport mimetypesimport posixpathimport warningsfrom pip._vendor.six.moves.urllib import parse as urllib_parsefrom pip._vendor.six.moves.urllib import request as urllib_requestfrom pip.compat import ipaddressfrom pip.utils import (cached_property, splitext, normalize_path,ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS,)from pip.utils.deprecation import RemovedInPip10Warningfrom pip.utils.logging import indent_logfrom pip.utils.packaging import check_requires_pythonfrom pip.exceptions import (DistributionNotFound, BestVersionAlreadyInstalled, InvalidWheelFilename,UnsupportedWheel,)from pip.download import HAS_TLS, is_url, path_to_url, url_to_pathfrom pip.wheel import Wheel, wheel_extfrom pip.pep425tags import get_supportedfrom pip._vendor import html5lib, requests, sixfrom pip._vendor.packaging.version import parse as parse_versionfrom pip._vendor.packaging.utils import canonicalize_namefrom pip._vendor.packaging import specifiersfrom pip._vendor.requests.exceptions import SSLErrorfrom pip._vendor.distlib.compat import unescape__all__ = ['FormatControl', 'fmt_ctl_handle_mutual_exclude', 'PackageFinder']SECURE_ORIGINS = [# protocol, hostname, port# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)("https", "*", "*"),("*", "localhost", "*"),("*", "127.0.0.0/8", "*"),("*", "::1/128", "*"),("file", "*", None),# ssh is always secure.("ssh", "*", "*"),]logger = logging.getLogger(__name__)class InstallationCandidate(object):def __init__(self, project, version, location):self.project = projectself.version = parse_version(version)self.location = locationself._key = (self.project, self.version, self.location)def __repr__(self):return "<InstallationCandidate({0!r}, {1!r}, {2!r})>".format(self.project, self.version, self.location,)def __hash__(self):return hash(self._key)def __lt__(self, other):return self._compare(other, lambda s, o: s < o)def __le__(self, other):return self._compare(other, lambda s, o: s <= o)def __eq__(self, other):return self._compare(other, lambda s, o: s == o)def __ge__(self, other):return self._compare(other, lambda s, o: s >= o)def __gt__(self, other):return self._compare(other, lambda s, o: s > o)def __ne__(self, other):return self._compare(other, lambda s, o: s != o)def _compare(self, other, method):if not isinstance(other, InstallationCandidate):return NotImplementedreturn method(self._key, other._key)class PackageFinder(object):"""This finds packages.This is meant to match easy_install's technique for looking forpackages, by reading pages and looking for appropriate links."""def __init__(self, find_links, index_urls, allow_all_prereleases=False,trusted_hosts=None, process_dependency_links=False,session=None, format_control=None, platform=None,versions=None, abi=None, implementation=None):"""Create a PackageFinder.:param format_control: A FormatControl object or None. Used to controlthe selection of source packages / binary packages when consultingthe index and links.:param platform: A string or None. If None, searches for packagesthat are supported by the current system. Otherwise, will findpackages that can be built on the platform passed in. Thesepackages will only be downloaded for distribution: they willnot be built locally.:param versions: A list of strings or None. This is passed directlyto pep425tags.py in the get_supported() method.:param abi: A string or None. This is passed directlyto pep425tags.py in the get_supported() method.:param implementation: A string or None. This is passed directlyto pep425tags.py in the get_supported() method."""if session is None:raise TypeError("PackageFinder() missing 1 required keyword argument: ""'session'")# Build find_links. If an argument starts with ~, it may be# a local file relative to a home directory. So try normalizing# it and if it exists, use the normalized version.# This is deliberately conservative - it might be fine just to# blindly normalize anything starting with a ~...self.find_links = []for link in find_links:if link.startswith('~'):new_link = normalize_path(link)if os.path.exists(new_link):link = new_linkself.find_links.append(link)self.index_urls = index_urlsself.dependency_links = []# These are boring links that have already been logged somehow:self.logged_links = set()self.format_control = format_control or FormatControl(set(), set())# Domains that we won't emit warnings for when not using HTTPSself.secure_origins = [("*", host, "*")for host in (trusted_hosts if trusted_hosts else [])]# Do we want to allow _all_ pre-releases?self.allow_all_prereleases = allow_all_prereleases# Do we process dependency links?self.process_dependency_links = process_dependency_links# The Session we'll use to make requestsself.session = session# The valid tags to check potential found wheel candidates againstself.valid_tags = get_supported(versions=versions,platform=platform,abi=abi,impl=implementation,)# If we don't have TLS enabled, then WARN if anyplace we're looking# relies on TLS.if not HAS_TLS:for link in itertools.chain(self.index_urls, self.find_links):parsed = urllib_parse.urlparse(link)if parsed.scheme == "https":logger.warning("pip is configured with locations that require ""TLS/SSL, however the ssl module in Python is not ""available.")breakdef add_dependency_links(self, links):# # FIXME: this shouldn't be global list this, it should only# # apply to requirements of the package that specifies the# # dependency_links value# # FIXME: also, we should track comes_from (i.e., use Link)if self.process_dependency_links:warnings.warn("Dependency Links processing has been deprecated and will be ""removed in a future release.",RemovedInPip10Warning,)self.dependency_links.extend(links)@staticmethoddef _sort_locations(locations, expand_dir=False):"""Sort locations into "files" (archives) and "urls", and returna pair of lists (files,urls)"""files = []urls = []# puts the url for the given file path into the appropriate listdef sort_path(path):url = path_to_url(path)if mimetypes.guess_type(url, strict=False)[0] == 'text/html':urls.append(url)else:files.append(url)for url in locations:is_local_path = os.path.exists(url)is_file_url = url.startswith('file:')if is_local_path or is_file_url:if is_local_path:path = urlelse:path = url_to_path(url)if os.path.isdir(path):if expand_dir:path = os.path.realpath(path)for item in os.listdir(path):sort_path(os.path.join(path, item))elif is_file_url:urls.append(url)elif os.path.isfile(path):sort_path(path)else:logger.warning("Url '%s' is ignored: it is neither a file ""nor a directory.", url)elif is_url(url):# Only add url with clear schemeurls.append(url)else:logger.warning("Url '%s' is ignored. It is either a non-existing ""path or lacks a specific scheme.", url)return files, urlsdef _candidate_sort_key(self, candidate):"""Function used to generate link sort key for link tuples.The greater the return value, the more preferred it is.If not finding wheels, then sorted by version only.If finding wheels, then the sort order is by version, then:1. existing installs2. wheels ordered via Wheel.support_index_min(self.valid_tags)3. source archivesNote: it was considered to embed this logic into the Linkcomparison operators, but then different sdist linkswith the same version, would have to be considered equal"""support_num = len(self.valid_tags)if candidate.location.is_wheel:# can raise InvalidWheelFilenamewheel = Wheel(candidate.location.filename)if not wheel.supported(self.valid_tags):raise UnsupportedWheel("%s is not a supported wheel for this platform. It ""can't be sorted." % wheel.filename)pri = -(wheel.support_index_min(self.valid_tags))else: # sdistpri = -(support_num)return (candidate.version, pri)def _validate_secure_origin(self, logger, location):# Determine if this url used a secure transport mechanismparsed = urllib_parse.urlparse(str(location))origin = (parsed.scheme, parsed.hostname, parsed.port)# The protocol to use to see if the protocol matches.# Don't count the repository type as part of the protocol: in# cases such as "git+ssh", only use "ssh". (I.e., Only verify against# the last scheme.)protocol = origin[0].rsplit('+', 1)[-1]# Determine if our origin is a secure origin by looking through our# hardcoded list of secure origins, as well as any additional ones# configured on this PackageFinder instance.for secure_origin in (SECURE_ORIGINS + self.secure_origins):if protocol != secure_origin[0] and secure_origin[0] != "*":continuetry:# We need to do this decode dance to ensure that we have a# unicode object, even on Python 2.x.addr = ipaddress.ip_address(origin[1]if (isinstance(origin[1], six.text_type) ororigin[1] is None)else origin[1].decode("utf8"))network = ipaddress.ip_network(secure_origin[1]if isinstance(secure_origin[1], six.text_type)else secure_origin[1].decode("utf8"))except ValueError:# We don't have both a valid address or a valid network, so# we'll check this origin against hostnames.if (origin[1] andorigin[1].lower() != secure_origin[1].lower() andsecure_origin[1] != "*"):continueelse:# We have a valid address and network, so see if the address# is contained within the network.if addr not in network:continue# Check to see if the port patchesif (origin[2] != secure_origin[2] andsecure_origin[2] != "*" andsecure_origin[2] is not None):continue# If we've gotten here, then this origin matches the current# secure origin and we should return Truereturn True# If we've gotten to this point, then the origin isn't secure and we# will not accept it as a valid location to search. We will however# log a warning that we are ignoring it.logger.warning("The repository located at %s is not a trusted or secure host and ""is being ignored. If this repository is available via HTTPS it ""is recommended to use HTTPS instead, otherwise you may silence ""this warning and allow it anyways with '--trusted-host %s'.",parsed.hostname,parsed.hostname,)return Falsedef _get_index_urls_locations(self, project_name):"""Returns the locations found via self.index_urlsChecks the url_name on the main (first in the list) index anduse this url_name to produce all locations"""def mkurl_pypi_url(url):loc = posixpath.join(url,urllib_parse.quote(canonicalize_name(project_name)))# For maximum compatibility with easy_install, ensure the path# ends in a trailing slash. Although this isn't in the spec# (and PyPI can handle it without the slash) some other index# implementations might break if they relied on easy_install's# behavior.if not loc.endswith('/'):loc = loc + '/'return locreturn [mkurl_pypi_url(url) for url in self.index_urls]def find_all_candidates(self, project_name):"""Find all available InstallationCandidate for project_nameThis checks index_urls, find_links and dependency_links.All versions found are returned as an InstallationCandidate list.See _link_package_versions for details on which files are accepted"""index_locations = self._get_index_urls_locations(project_name)index_file_loc, index_url_loc = self._sort_locations(index_locations)fl_file_loc, fl_url_loc = self._sort_locations(self.find_links, expand_dir=True)dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links)file_locations = (Link(url) for url in itertools.chain(index_file_loc, fl_file_loc, dep_file_loc))# We trust every url that the user has given us whether it was given# via --index-url or --find-links# We explicitly do not trust links that came from dependency_links# We want to filter out any thing which does not have a secure origin.url_locations = [link for link in itertools.chain((Link(url) for url in index_url_loc),(Link(url) for url in fl_url_loc),(Link(url) for url in dep_url_loc),)if self._validate_secure_origin(logger, link)]logger.debug('%d location(s) to search for versions of %s:',len(url_locations), project_name)for location in url_locations:logger.debug('* %s', location)canonical_name = canonicalize_name(project_name)formats = fmt_ctl_formats(self.format_control, canonical_name)search = Search(project_name, canonical_name, formats)find_links_versions = self._package_versions(# We trust every directly linked archive in find_links(Link(url, '-f') for url in self.find_links),search)page_versions = []for page in self._get_pages(url_locations, project_name):logger.debug('Analyzing links from page %s', page.url)with indent_log():page_versions.extend(self._package_versions(page.links, search))dependency_versions = self._package_versions((Link(url) for url in self.dependency_links), search)if dependency_versions:logger.debug('dependency_links found: %s',', '.join([version.location.url for version in dependency_versions]))file_versions = self._package_versions(file_locations, search)if file_versions:file_versions.sort(reverse=True)logger.debug('Local files found: %s',', '.join([url_to_path(candidate.location.url)for candidate in file_versions]))# This is an intentional priority orderingreturn (file_versions + find_links_versions + page_versions +dependency_versions)def find_requirement(self, req, upgrade):"""Try to find a Link matching reqExpects req, an InstallRequirement and upgrade, a booleanReturns a Link if found,Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise"""all_candidates = self.find_all_candidates(req.name)# Filter out anything which doesn't match our specifiercompatible_versions = set(req.specifier.filter(# We turn the version object into a str here because otherwise# when we're debundled but setuptools isn't, Python will see# packaging.version.Version and# pkg_resources._vendor.packaging.version.Version as different# types. This way we'll use a str as a common data interchange# format. If we stop using the pkg_resources provided specifier# and start using our own, we can drop the cast to str().[str(c.version) for c in all_candidates],prereleases=(self.allow_all_prereleasesif self.allow_all_prereleases else None),))applicable_candidates = [# Again, converting to str to deal with debundling.c for c in all_candidates if str(c.version) in compatible_versions]if applicable_candidates:best_candidate = max(applicable_candidates,key=self._candidate_sort_key)else:best_candidate = Noneif req.satisfied_by is not None:installed_version = parse_version(req.satisfied_by.version)else:installed_version = Noneif installed_version is None and best_candidate is None:logger.critical('Could not find a version that satisfies the requirement %s ''(from versions: %s)',req,', '.join(sorted(set(str(c.version) for c in all_candidates),key=parse_version,)))raise DistributionNotFound('No matching distribution found for %s' % req)best_installed = Falseif installed_version and (best_candidate is None orbest_candidate.version <= installed_version):best_installed = Trueif not upgrade and installed_version is not None:if best_installed:logger.debug('Existing installed version (%s) is most up-to-date and ''satisfies requirement',installed_version,)else:logger.debug('Existing installed version (%s) satisfies requirement ''(most up-to-date version is %s)',installed_version,best_candidate.version,)return Noneif best_installed:# We have an existing version, and its the best versionlogger.debug('Installed version (%s) is most up-to-date (past versions: ''%s)',installed_version,', '.join(sorted(compatible_versions, key=parse_version)) or"none",)raise BestVersionAlreadyInstalledlogger.debug('Using version %s (newest of versions: %s)',best_candidate.version,', '.join(sorted(compatible_versions, key=parse_version)))return best_candidate.locationdef _get_pages(self, locations, project_name):"""Yields (page, page_url) from the given locations, skippinglocations that have errors."""seen = set()for location in locations:if location in seen:continueseen.add(location)page = self._get_page(location)if page is None:continueyield page_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')def _sort_links(self, links):"""Returns elements of links in order, non-egg links first, egg linkssecond, while eliminating duplicates"""eggs, no_eggs = [], []seen = set()for link in links:if link not in seen:seen.add(link)if link.egg_fragment:eggs.append(link)else:no_eggs.append(link)return no_eggs + eggsdef _package_versions(self, links, search):result = []for link in self._sort_links(links):v = self._link_package_versions(link, search)if v is not None:result.append(v)return resultdef _log_skipped_link(self, link, reason):if link not in self.logged_links:logger.debug('Skipping link %s; %s', link, reason)self.logged_links.add(link)def _link_package_versions(self, link, search):"""Return an InstallationCandidate or None"""version = Noneif link.egg_fragment:egg_info = link.egg_fragmentext = link.extelse:egg_info, ext = link.splitext()if not ext:self._log_skipped_link(link, 'not a file')returnif ext not in SUPPORTED_EXTENSIONS:self._log_skipped_link(link, 'unsupported archive format: %s' % ext)returnif "binary" not in search.formats and ext == wheel_ext:self._log_skipped_link(link, 'No binaries permitted for %s' % search.supplied)returnif "macosx10" in link.path and ext == '.zip':self._log_skipped_link(link, 'macosx10 one')returnif ext == wheel_ext:try:wheel = Wheel(link.filename)except InvalidWheelFilename:self._log_skipped_link(link, 'invalid wheel filename')returnif canonicalize_name(wheel.name) != search.canonical:self._log_skipped_link(link, 'wrong project name (not %s)' % search.supplied)returnif not wheel.supported(self.valid_tags):self._log_skipped_link(link, 'it is not compatible with this Python')returnversion = wheel.version# This should be up by the search.ok_binary check, but see issue 2700.if "source" not in search.formats and ext != wheel_ext:self._log_skipped_link(link, 'No sources permitted for %s' % search.supplied)returnif not version:version = egg_info_matches(egg_info, search.supplied, link)if version is None:self._log_skipped_link(link, 'wrong project name (not %s)' % search.supplied)returnmatch = self._py_version_re.search(version)if match:version = version[:match.start()]py_version = match.group(1)if py_version != sys.version[:3]:self._log_skipped_link(link, 'Python version is incorrect')returntry:support_this_python = check_requires_python(link.requires_python)except specifiers.InvalidSpecifier:logger.debug("Package %s has an invalid Requires-Python entry: %s",link.filename, link.requires_python)support_this_python = Trueif not support_this_python:logger.debug("The package %s is incompatible with the python""version in use. Acceptable python versions are:%s",link, link.requires_python)returnlogger.debug('Found link %s, version: %s', link, version)return InstallationCandidate(search.supplied, version, link)def _get_page(self, link):return HTMLPage.get_page(link, session=self.session)def egg_info_matches(egg_info, search_name, link,_egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):"""Pull the version part out of a string.:param egg_info: The string to parse. E.g. foo-2.1:param search_name: The name of the package this belongs to. None toinfer the name. Note that this cannot unambiguously parse stringslike foo-2-2 which might be foo, 2-2 or foo-2, 2.:param link: The link the string came from, for logging on failure."""match = _egg_info_re.search(egg_info)if not match:logger.debug('Could not parse version from link: %s', link)return Noneif search_name is None:full_match = match.group(0)return full_match[full_match.index('-'):]name = match.group(0).lower()# To match the "safe" name that pkg_resources creates:name = name.replace('_', '-')# project name and version must be separated by a dashlook_for = search_name.lower() + "-"if name.startswith(look_for):return match.group(0)[len(look_for):]else:return Noneclass HTMLPage(object):"""Represents one page, along with its URL"""def __init__(self, content, url, headers=None):# Determine if we have any encoding information in our headersencoding = Noneif headers and "Content-Type" in headers:content_type, params = cgi.parse_header(headers["Content-Type"])if "charset" in params:encoding = params['charset']self.content = contentself.parsed = html5lib.parse(self.content,transport_encoding=encoding,namespaceHTMLElements=False,)self.url = urlself.headers = headersdef __str__(self):return self.url@classmethoddef get_page(cls, link, skip_archives=True, session=None):if session is None:raise TypeError("get_page() missing 1 required keyword argument: 'session'")url = link.urlurl = url.split('#', 1)[0]# Check for VCS schemes that do not support lookup as web pages.from pip.vcs import VcsSupportfor scheme in VcsSupport.schemes:if url.lower().startswith(scheme) and url[len(scheme)] in '+:':logger.debug('Cannot look at %s URL %s', scheme, link)return Nonetry:if skip_archives:filename = link.filenamefor bad_ext in ARCHIVE_EXTENSIONS:if filename.endswith(bad_ext):content_type = cls._get_content_type(url, session=session,)if content_type.lower().startswith('text/html'):breakelse:logger.debug('Skipping page %s because of Content-Type: %s',link,content_type,)returnlogger.debug('Getting page %s', url)# Tack index.html onto file:// URLs that point to directories(scheme, netloc, path, params, query, fragment) = \urllib_parse.urlparse(url)if (scheme == 'file' andos.path.isdir(urllib_request.url2pathname(path))):# add trailing slash if not present so urljoin doesn't trim# final segmentif not url.endswith('/'):url += '/'url = urllib_parse.urljoin(url, 'index.html')logger.debug(' file: URL is directory, getting %s', url)resp = session.get(url,headers={"Accept": "text/html","Cache-Control": "max-age=600",},)resp.raise_for_status()# The check for archives above only works if the url ends with# something that looks like an archive. However that is not a# requirement of an url. Unless we issue a HEAD request on every# url we cannot know ahead of time for sure if something is HTML# or not. However we can check after we've downloaded it.content_type = resp.headers.get('Content-Type', 'unknown')if not content_type.lower().startswith("text/html"):logger.debug('Skipping page %s because of Content-Type: %s',link,content_type,)returninst = cls(resp.content, resp.url, resp.headers)except requests.HTTPError as exc:cls._handle_fail(link, exc, url)except SSLError as exc:reason = ("There was a problem confirming the ssl certificate: ""%s" % exc)cls._handle_fail(link, reason, url, meth=logger.info)except requests.ConnectionError as exc:cls._handle_fail(link, "connection error: %s" % exc, url)except requests.Timeout:cls._handle_fail(link, "timed out", url)else:return inst@staticmethoddef _handle_fail(link, reason, url, meth=None):if meth is None:meth = logger.debugmeth("Could not fetch URL %s: %s - skipping", link, reason)@staticmethoddef _get_content_type(url, session):"""Get the Content-Type of the given url, using a HEAD request"""scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)if scheme not in ('http', 'https'):# FIXME: some warning or something?# assertion error?return ''resp = session.head(url, allow_redirects=True)resp.raise_for_status()return resp.headers.get("Content-Type", "")@cached_propertydef base_url(self):bases = [x for x in self.parsed.findall(".//base")if x.get("href") is not None]if bases and bases[0].get("href"):return bases[0].get("href")else:return self.url@propertydef links(self):"""Yields all links in the page"""for anchor in self.parsed.findall(".//a"):if anchor.get("href"):href = anchor.get("href")url = self.clean_link(urllib_parse.urljoin(self.base_url, href))pyrequire = anchor.get('data-requires-python')pyrequire = unescape(pyrequire) if pyrequire else Noneyield Link(url, self, requires_python=pyrequire)_clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)def clean_link(self, url):"""Makes sure a link is fully encoded. That is, if a ' ' shows up inthe link, it will be rewritten to %20 (while not over-quoting% or other characters)."""return self._clean_re.sub(lambda match: '%%%2x' % ord(match.group(0)), url)class Link(object):def __init__(self, url, comes_from=None, requires_python=None):"""Object representing a parsed link from https://pypi.python.org/simple/*url:url of the resource pointed to (href of the link)comes_from:instance of HTMLPage where the link was found, or string.requires_python:String containing the `Requires-Python` metadata field, specifiedin PEP 345. This may be specified by a data-requires-pythonattribute in the HTML link tag, as described in PEP 503."""# url can be a UNC windows shareif url.startswith('\\\\'):url = path_to_url(url)self.url = urlself.comes_from = comes_fromself.requires_python = requires_python if requires_python else Nonedef __str__(self):if self.requires_python:rp = ' (requires-python:%s)' % self.requires_pythonelse:rp = ''if self.comes_from:return '%s (from %s)%s' % (self.url, self.comes_from, rp)else:return str(self.url)def __repr__(self):return '<Link %s>' % selfdef __eq__(self, other):if not isinstance(other, Link):return NotImplementedreturn self.url == other.urldef __ne__(self, other):if not isinstance(other, Link):return NotImplementedreturn self.url != other.urldef __lt__(self, other):if not isinstance(other, Link):return NotImplementedreturn self.url < other.urldef __le__(self, other):if not isinstance(other, Link):return NotImplementedreturn self.url <= other.urldef __gt__(self, other):if not isinstance(other, Link):return NotImplementedreturn self.url > other.urldef __ge__(self, other):if not isinstance(other, Link):return NotImplementedreturn self.url >= other.urldef __hash__(self):return hash(self.url)@propertydef filename(self):_, netloc, path, _, _ = urllib_parse.urlsplit(self.url)name = posixpath.basename(path.rstrip('/')) or netlocname = urllib_parse.unquote(name)assert name, ('URL %r produced no filename' % self.url)return name@propertydef scheme(self):return urllib_parse.urlsplit(self.url)[0]@propertydef netloc(self):return urllib_parse.urlsplit(self.url)[1]@propertydef path(self):return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])def splitext(self):return splitext(posixpath.basename(self.path.rstrip('/')))@propertydef ext(self):return self.splitext()[1]@propertydef url_without_fragment(self):scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)return urllib_parse.urlunsplit((scheme, netloc, path, query, None))_egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')@propertydef egg_fragment(self):match = self._egg_fragment_re.search(self.url)if not match:return Nonereturn match.group(1)_subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')@propertydef subdirectory_fragment(self):match = self._subdirectory_fragment_re.search(self.url)if not match:return Nonereturn match.group(1)_hash_re = re.compile(r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)')@propertydef hash(self):match = self._hash_re.search(self.url)if match:return match.group(2)return None@propertydef hash_name(self):match = self._hash_re.search(self.url)if match:return match.group(1)return None@propertydef show_url(self):return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])@propertydef is_wheel(self):return self.ext == wheel_ext@propertydef is_artifact(self):"""Determines if this points to an actual artifact (e.g. a tarball) or ifit points to an "abstract" thing like a path or a VCS location."""from pip.vcs import vcsif self.scheme in vcs.all_schemes:return Falsereturn TrueFormatControl = namedtuple('FormatControl', 'no_binary only_binary')"""This object has two fields, no_binary and only_binary.If a field is falsy, it isn't set. If it is {':all:'}, it should match allpackages except those listed in the other field. Only one field can be setto {':all:'} at a time. The rest of the time exact package name matchesare listed, with any given package only showing up in one field at a time."""def fmt_ctl_handle_mutual_exclude(value, target, other):new = value.split(',')while ':all:' in new:other.clear()target.clear()target.add(':all:')del new[:new.index(':all:') + 1]if ':none:' not in new:# Without a none, we want to discard everything as :all: covers itreturnfor name in new:if name == ':none:':target.clear()continuename = canonicalize_name(name)other.discard(name)target.add(name)def fmt_ctl_formats(fmt_ctl, canonical_name):result = set(["binary", "source"])if canonical_name in fmt_ctl.only_binary:result.discard('source')elif canonical_name in fmt_ctl.no_binary:result.discard('binary')elif ':all:' in fmt_ctl.only_binary:result.discard('source')elif ':all:' in fmt_ctl.no_binary:result.discard('binary')return frozenset(result)def fmt_ctl_no_binary(fmt_ctl):fmt_ctl_handle_mutual_exclude(':all:', fmt_ctl.no_binary, fmt_ctl.only_binary)def fmt_ctl_no_use_wheel(fmt_ctl):fmt_ctl_no_binary(fmt_ctl)warnings.warn('--no-use-wheel is deprecated and will be removed in the future. '' Please use --no-binary :all: instead.', RemovedInPip10Warning,stacklevel=2)Search = namedtuple('Search', 'supplied canonical formats')"""Capture key aspects of a search.:attribute supplied: The user supplied package.:attribute canonical: The canonical package name.:attribute formats: The formats allowed for this package. Should be a setwith 'binary' or 'source' or both in it."""
"""Exceptions used throughout package"""from __future__ import absolute_importfrom itertools import chain, groupby, repeatfrom pip._vendor.six import iteritemsclass PipError(Exception):"""Base pip exception"""class InstallationError(PipError):"""General exception during installation"""class UninstallationError(PipError):"""General exception during uninstallation"""class DistributionNotFound(InstallationError):"""Raised when a distribution cannot be found to satisfy a requirement"""class RequirementsFileParseError(InstallationError):"""Raised when a general error occurs parsing a requirements file line."""class BestVersionAlreadyInstalled(PipError):"""Raised when the most up-to-date version of a package is alreadyinstalled."""class BadCommand(PipError):"""Raised when virtualenv or a command is not found"""class CommandError(PipError):"""Raised when there is an error in command-line arguments"""class PreviousBuildDirError(PipError):"""Raised when there's a previous conflicting build directory"""class InvalidWheelFilename(InstallationError):"""Invalid wheel filename."""class UnsupportedWheel(InstallationError):"""Unsupported wheel."""class HashErrors(InstallationError):"""Multiple HashError instances rolled into one for reporting"""def __init__(self):self.errors = []def append(self, error):self.errors.append(error)def __str__(self):lines = []self.errors.sort(key=lambda e: e.order)for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):lines.append(cls.head)lines.extend(e.body() for e in errors_of_cls)if lines:return '\n'.join(lines)def __nonzero__(self):return bool(self.errors)def __bool__(self):return self.__nonzero__()class HashError(InstallationError):"""A failure to verify a package against known-good hashes:cvar order: An int sorting hash exception classes by difficulty ofrecovery (lower being harder), so the user doesn't bother frettingabout unpinned packages when he has deeper issues, like VCSdependencies, to deal with. Also keeps error reports in adeterministic order.:cvar head: A section heading for display above potentially manyexceptions of this kind:ivar req: The InstallRequirement that triggered this error. This ispasted on after the exception is instantiated, because it's nottypically available earlier."""req = Nonehead = ''def body(self):"""Return a summary of me for display under the heading.This default implementation simply prints a description of thetriggering requirement.:param req: The InstallRequirement that provoked this error, withpopulate_link() having already been called"""return ' %s' % self._requirement_name()def __str__(self):return '%s\n%s' % (self.head, self.body())def _requirement_name(self):"""Return a description of the requirement that triggered me.This default implementation returns long description of the req, withline numbers"""return str(self.req) if self.req else 'unknown package'class VcsHashUnsupported(HashError):"""A hash was provided for a version-control-system-based requirement, butwe don't have a method for hashing those."""order = 0head = ("Can't verify hashes for these requirements because we don't ""have a way to hash version control repositories:")class DirectoryUrlHashUnsupported(HashError):"""A hash was provided for a version-control-system-based requirement, butwe don't have a method for hashing those."""order = 1head = ("Can't verify hashes for these file:// requirements because they ""point to directories:")class HashMissing(HashError):"""A hash was needed for a requirement but is absent."""order = 2head = ('Hashes are required in --require-hashes mode, but they are ''missing from some requirements. Here is a list of those ''requirements along with the hashes their downloaded archives ''actually had. Add lines like these to your requirements files to ''prevent tampering. (If you did not enable --require-hashes ''manually, note that it turns on automatically when any package ''has a hash.)')def __init__(self, gotten_hash):""":param gotten_hash: The hash of the (possibly malicious) archive wejust downloaded"""self.gotten_hash = gotten_hashdef body(self):from pip.utils.hashes import FAVORITE_HASH # Dodge circular import.package = Noneif self.req:# In the case of URL-based requirements, display the original URL# seen in the requirements file rather than the package name,# so the output can be directly copied into the requirements file.package = (self.req.original_link if self.req.original_link# In case someone feeds something downright stupid# to InstallRequirement's constructor.else getattr(self.req, 'req', None))return ' %s --hash=%s:%s' % (package or 'unknown package',FAVORITE_HASH,self.gotten_hash)class HashUnpinned(HashError):"""A requirement had a hash specified but was not pinned to a specificversion."""order = 3head = ('In --require-hashes mode, all requirements must have their ''versions pinned with ==. These do not:')class HashMismatch(HashError):"""Distribution file hash values don't match.:ivar package_name: The name of the package that triggered the hashmismatch. Feel free to write to this after the exception is raise toimprove its error message."""order = 4head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS ''FILE. If you have updated the package versions, please update ''the hashes. Otherwise, examine the package contents carefully; ''someone may have tampered with them.')def __init__(self, allowed, gots):""":param allowed: A dict of algorithm names pointing to lists of allowedhex digests:param gots: A dict of algorithm names pointing to hashes weactually got from the files under suspicion"""self.allowed = allowedself.gots = gotsdef body(self):return ' %s:\n%s' % (self._requirement_name(),self._hash_comparison())def _hash_comparison(self):"""Return a comparison of actual and expected hash values.Example::Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeor 123451234512345123451234512345123451234512345Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef"""def hash_then_or(hash_name):# For now, all the decent hashes have 6-char names, so we can get# away with hard-coding space literals.return chain([hash_name], repeat(' or'))lines = []for hash_name, expecteds in iteritems(self.allowed):prefix = hash_then_or(hash_name)lines.extend((' Expected %s %s' % (next(prefix), e))for e in expecteds)lines.append(' Got %s\n' %self.gots[hash_name].hexdigest())prefix = ' or'return '\n'.join(lines)class UnsupportedPythonVersion(InstallationError):"""Unsupported python version according to Requires-Python packagemetadata."""
from __future__ import absolute_importimport cgiimport email.utilsimport getpassimport jsonimport loggingimport mimetypesimport osimport platformimport reimport shutilimport sysimport tempfiletry:import ssl # noqaHAS_TLS = Trueexcept ImportError:HAS_TLS = Falsefrom pip._vendor.six.moves.urllib import parse as urllib_parsefrom pip._vendor.six.moves.urllib import request as urllib_requestimport pipfrom pip.exceptions import InstallationError, HashMismatchfrom pip.models import PyPIfrom pip.utils import (splitext, rmtree, format_size, display_path,backup_dir, ask_path_exists, unpack_file,ARCHIVE_EXTENSIONS, consume, call_subprocess)from pip.utils.encoding import auto_decodefrom pip.utils.filesystem import check_path_ownerfrom pip.utils.logging import indent_logfrom pip.utils.setuptools_build import SETUPTOOLS_SHIMfrom pip.utils.glibc import libc_verfrom pip.utils.ui import DownloadProgressBar, DownloadProgressSpinnerfrom pip.locations import write_delete_marker_filefrom pip.vcs import vcsfrom pip._vendor import requests, sixfrom pip._vendor.requests.adapters import BaseAdapter, HTTPAdapterfrom pip._vendor.requests.auth import AuthBase, HTTPBasicAuthfrom pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Responsefrom pip._vendor.requests.utils import get_netrc_authfrom pip._vendor.requests.structures import CaseInsensitiveDictfrom pip._vendor.requests.packages import urllib3from pip._vendor.cachecontrol import CacheControlAdapterfrom pip._vendor.cachecontrol.caches import FileCachefrom pip._vendor.lockfile import LockErrorfrom pip._vendor.six.moves import xmlrpc_client__all__ = ['get_file_content','is_url', 'url_to_path', 'path_to_url','is_archive_file', 'unpack_vcs_link','unpack_file_url', 'is_vcs_url', 'is_file_url','unpack_http_url', 'unpack_url']logger = logging.getLogger(__name__)def user_agent():"""Return a string representing the user agent."""data = {"installer": {"name": "pip", "version": pip.__version__},"python": platform.python_version(),"implementation": {"name": platform.python_implementation(),},}if data["implementation"]["name"] == 'CPython':data["implementation"]["version"] = platform.python_version()elif data["implementation"]["name"] == 'PyPy':if sys.pypy_version_info.releaselevel == 'final':pypy_version_info = sys.pypy_version_info[:3]else:pypy_version_info = sys.pypy_version_infodata["implementation"]["version"] = ".".join([str(x) for x in pypy_version_info])elif data["implementation"]["name"] == 'Jython':# Complete Guessdata["implementation"]["version"] = platform.python_version()elif data["implementation"]["name"] == 'IronPython':# Complete Guessdata["implementation"]["version"] = platform.python_version()if sys.platform.startswith("linux"):from pip._vendor import distrodistro_infos = dict(filter(lambda x: x[1],zip(["name", "version", "id"], distro.linux_distribution()),))libc = dict(filter(lambda x: x[1],zip(["lib", "version"], libc_ver()),))if libc:distro_infos["libc"] = libcif distro_infos:data["distro"] = distro_infosif sys.platform.startswith("darwin") and platform.mac_ver()[0]:data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}if platform.system():data.setdefault("system", {})["name"] = platform.system()if platform.release():data.setdefault("system", {})["release"] = platform.release()if platform.machine():data["cpu"] = platform.machine()# Python 2.6 doesn't have ssl.OPENSSL_VERSION.if HAS_TLS and sys.version_info[:2] > (2, 6):data["openssl_version"] = ssl.OPENSSL_VERSIONreturn "{data[installer][name]}/{data[installer][version]} {json}".format(data=data,json=json.dumps(data, separators=(",", ":"), sort_keys=True),)class MultiDomainBasicAuth(AuthBase):def __init__(self, prompting=True):self.prompting = promptingself.passwords = {}def __call__(self, req):parsed = urllib_parse.urlparse(req.url)# Get the netloc without any embedded credentialsnetloc = parsed.netloc.rsplit("@", 1)[-1]# Set the url of the request to the url without any credentialsreq.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])# Use any stored credentials that we have for this netlocusername, password = self.passwords.get(netloc, (None, None))# Extract credentials embedded in the url if we have none storedif username is None:username, password = self.parse_credentials(parsed.netloc)# Get creds from netrc if we still don't have themif username is None and password is None:netrc_auth = get_netrc_auth(req.url)username, password = netrc_auth if netrc_auth else (None, None)if username or password:# Store the username and passwordself.passwords[netloc] = (username, password)# Send the basic auth with this requestreq = HTTPBasicAuth(username or "", password or "")(req)# Attach a hook to handle 401 responsesreq.register_hook("response", self.handle_401)return reqdef handle_401(self, resp, **kwargs):# We only care about 401 responses, anything else we want to just# pass through the actual responseif resp.status_code != 401:return resp# We are not able to prompt the user so simply return the responseif not self.prompting:return respparsed = urllib_parse.urlparse(resp.url)# Prompt the user for a new username and passwordusername = six.moves.input("User for %s: " % parsed.netloc)password = getpass.getpass("Password: ")# Store the new username and password to use for future requestsif username or password:self.passwords[parsed.netloc] = (username, password)# Consume content and release the original connection to allow our new# request to reuse the same one.resp.contentresp.raw.release_conn()# Add our new username and password to the requestreq = HTTPBasicAuth(username or "", password or "")(resp.request)# Send our new requestnew_resp = resp.connection.send(req, **kwargs)new_resp.history.append(resp)return new_respdef parse_credentials(self, netloc):if "@" in netloc:userinfo = netloc.rsplit("@", 1)[0]if ":" in userinfo:return userinfo.split(":", 1)return userinfo, Nonereturn None, Noneclass LocalFSAdapter(BaseAdapter):def send(self, request, stream=None, timeout=None, verify=None, cert=None,proxies=None):pathname = url_to_path(request.url)resp = Response()resp.status_code = 200resp.url = request.urltry:stats = os.stat(pathname)except OSError as exc:resp.status_code = 404resp.raw = excelse:modified = email.utils.formatdate(stats.st_mtime, usegmt=True)content_type = mimetypes.guess_type(pathname)[0] or "text/plain"resp.headers = CaseInsensitiveDict({"Content-Type": content_type,"Content-Length": stats.st_size,"Last-Modified": modified,})resp.raw = open(pathname, "rb")resp.close = resp.raw.closereturn respdef close(self):passclass SafeFileCache(FileCache):"""A file based cache which is safe to use even when the target directory maynot be accessible or writable."""def __init__(self, *args, **kwargs):super(SafeFileCache, self).__init__(*args, **kwargs)# Check to ensure that the directory containing our cache directory# is owned by the user current executing pip. If it does not exist# we will check the parent directory until we find one that does exist.# If it is not owned by the user executing pip then we will disable# the cache and log a warning.if not check_path_owner(self.directory):logger.warning("The directory '%s' or its parent directory is not owned by ""the current user and the cache has been disabled. Please ""check the permissions and owner of that directory. If ""executing pip with sudo, you may want sudo's -H flag.",self.directory,)# Set our directory to None to disable the Cacheself.directory = Nonedef get(self, *args, **kwargs):# If we don't have a directory, then the cache should be a no-op.if self.directory is None:returntry:return super(SafeFileCache, self).get(*args, **kwargs)except (LockError, OSError, IOError):# We intentionally silence this error, if we can't access the cache# then we can just skip caching and process the request as if# caching wasn't enabled.passdef set(self, *args, **kwargs):# If we don't have a directory, then the cache should be a no-op.if self.directory is None:returntry:return super(SafeFileCache, self).set(*args, **kwargs)except (LockError, OSError, IOError):# We intentionally silence this error, if we can't access the cache# then we can just skip caching and process the request as if# caching wasn't enabled.passdef delete(self, *args, **kwargs):# If we don't have a directory, then the cache should be a no-op.if self.directory is None:returntry:return super(SafeFileCache, self).delete(*args, **kwargs)except (LockError, OSError, IOError):# We intentionally silence this error, if we can't access the cache# then we can just skip caching and process the request as if# caching wasn't enabled.passclass InsecureHTTPAdapter(HTTPAdapter):def cert_verify(self, conn, url, verify, cert):conn.cert_reqs = 'CERT_NONE'conn.ca_certs = Noneclass PipSession(requests.Session):timeout = Nonedef __init__(self, *args, **kwargs):retries = kwargs.pop("retries", 0)cache = kwargs.pop("cache", None)insecure_hosts = kwargs.pop("insecure_hosts", [])super(PipSession, self).__init__(*args, **kwargs)# Attach our User Agent to the requestself.headers["User-Agent"] = user_agent()# Attach our Authentication handler to the sessionself.auth = MultiDomainBasicAuth()# Create our urllib3.Retry instance which will allow us to customize# how we handle retries.retries = urllib3.Retry(# Set the total number of retries that a particular request can# have.total=retries,# A 503 error from PyPI typically means that the Fastly -> Origin# connection got interrupted in some way. A 503 error in general# is typically considered a transient error so we'll go ahead and# retry it.status_forcelist=[503],# Add a small amount of back off between failed requests in# order to prevent hammering the service.backoff_factor=0.25,)# We want to _only_ cache responses on securely fetched origins. We do# this because we can't validate the response of an insecurely fetched# origin, and we don't want someone to be able to poison the cache and# require manual eviction from the cache to fix it.if cache:secure_adapter = CacheControlAdapter(cache=SafeFileCache(cache, use_dir_lock=True),max_retries=retries,)else:secure_adapter = HTTPAdapter(max_retries=retries)# Our Insecure HTTPAdapter disables HTTPS validation. It does not# support caching (see above) so we'll use it for all http:// URLs as# well as any https:// host that we've marked as ignoring TLS errors# for.insecure_adapter = InsecureHTTPAdapter(max_retries=retries)self.mount("https://", secure_adapter)self.mount("http://", insecure_adapter)# Enable file:// urlsself.mount("file://", LocalFSAdapter())# We want to use a non-validating adapter for any requests which are# deemed insecure.for host in insecure_hosts:self.mount("https://{0}/".format(host), insecure_adapter)def request(self, method, url, *args, **kwargs):# Allow setting a default timeout on a sessionkwargs.setdefault("timeout", self.timeout)# Dispatch the actual requestreturn super(PipSession, self).request(method, url, *args, **kwargs)def get_file_content(url, comes_from=None, session=None):"""Gets the content of a file; it may be a filename, file: URL, orhttp: URL. Returns (location, content). Content is unicode."""if session is None:raise TypeError("get_file_content() missing 1 required keyword argument: 'session'")match = _scheme_re.search(url)if match:scheme = match.group(1).lower()if (scheme == 'file' and comes_from andcomes_from.startswith('http')):raise InstallationError('Requirements file %s references URL %s, which is local'% (comes_from, url))if scheme == 'file':path = url.split(':', 1)[1]path = path.replace('\\', '/')match = _url_slash_drive_re.match(path)if match:path = match.group(1) + ':' + path.split('|', 1)[1]path = urllib_parse.unquote(path)if path.startswith('/'):path = '/' + path.lstrip('/')url = pathelse:# FIXME: catch some errorsresp = session.get(url)resp.raise_for_status()return resp.url, resp.texttry:with open(url, 'rb') as f:content = auto_decode(f.read())except IOError as exc:raise InstallationError('Could not open requirements file: %s' % str(exc))return url, content_scheme_re = re.compile(r'^(http|https|file):', re.I)_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)def is_url(name):"""Returns true if the name looks like a URL"""if ':' not in name:return Falsescheme = name.split(':', 1)[0].lower()return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemesdef url_to_path(url):"""Convert a file: URL to a path."""assert url.startswith('file:'), ("You can only turn file: urls into filenames (not %r)" % url)_, netloc, path, _, _ = urllib_parse.urlsplit(url)# if we have a UNC path, prepend UNC share notationif netloc:netloc = '\\\\' + netlocpath = urllib_request.url2pathname(netloc + path)return pathdef path_to_url(path):"""Convert a path to a file: URL. The path will be made absolute and havequoted path parts."""path = os.path.normpath(os.path.abspath(path))url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))return urldef is_archive_file(name):"""Return True if `name` is a considered as an archive file."""ext = splitext(name)[1].lower()if ext in ARCHIVE_EXTENSIONS:return Truereturn Falsedef unpack_vcs_link(link, location):vcs_backend = _get_used_vcs_backend(link)vcs_backend.unpack(location)def _get_used_vcs_backend(link):for backend in vcs.backends:if link.scheme in backend.schemes:vcs_backend = backend(link.url)return vcs_backenddef is_vcs_url(link):return bool(_get_used_vcs_backend(link))def is_file_url(link):return link.url.lower().startswith('file:')def is_dir_url(link):"""Return whether a file:// Link points to a directory.``link`` must not have any other scheme but file://. Call is_file_url()first."""link_path = url_to_path(link.url_without_fragment)return os.path.isdir(link_path)def _progress_indicator(iterable, *args, **kwargs):return iterabledef _download_url(resp, link, content_file, hashes):try:total_length = int(resp.headers['content-length'])except (ValueError, KeyError, TypeError):total_length = 0cached_resp = getattr(resp, "from_cache", False)if logger.getEffectiveLevel() > logging.INFO:show_progress = Falseelif cached_resp:show_progress = Falseelif total_length > (40 * 1000):show_progress = Trueelif not total_length:show_progress = Trueelse:show_progress = Falseshow_url = link.show_urldef resp_read(chunk_size):try:# Special case for urllib3.for chunk in resp.raw.stream(chunk_size,# We use decode_content=False here because we don't# want urllib3 to mess with the raw bytes we get# from the server. If we decompress inside of# urllib3 then we cannot verify the checksum# because the checksum will be of the compressed# file. This breakage will only occur if the# server adds a Content-Encoding header, which# depends on how the server was configured:# - Some servers will notice that the file isn't a# compressible file and will leave the file alone# and with an empty Content-Encoding# - Some servers will notice that the file is# already compressed and will leave the file# alone and will add a Content-Encoding: gzip# header# - Some servers won't notice anything at all and# will take a file that's already been compressed# and compress it again and set the# Content-Encoding: gzip header## By setting this not to decode automatically we# hope to eliminate problems with the second case.decode_content=False):yield chunkexcept AttributeError:# Standard file-like object.while True:chunk = resp.raw.read(chunk_size)if not chunk:breakyield chunkdef written_chunks(chunks):for chunk in chunks:content_file.write(chunk)yield chunkprogress_indicator = _progress_indicatorif link.netloc == PyPI.netloc:url = show_urlelse:url = link.url_without_fragmentif show_progress: # We don't show progress on cached responsesif total_length:logger.info("Downloading %s (%s)", url, format_size(total_length))progress_indicator = DownloadProgressBar(max=total_length).iterelse:logger.info("Downloading %s", url)progress_indicator = DownloadProgressSpinner().iterelif cached_resp:logger.info("Using cached %s", url)else:logger.info("Downloading %s", url)logger.debug('Downloading from URL %s', link)downloaded_chunks = written_chunks(progress_indicator(resp_read(CONTENT_CHUNK_SIZE),CONTENT_CHUNK_SIZE))if hashes:hashes.check_against_chunks(downloaded_chunks)else:consume(downloaded_chunks)def _copy_file(filename, location, link):copy = Truedownload_location = os.path.join(location, link.filename)if os.path.exists(download_location):response = ask_path_exists('The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %display_path(download_location), ('i', 'w', 'b', 'a'))if response == 'i':copy = Falseelif response == 'w':logger.warning('Deleting %s', display_path(download_location))os.remove(download_location)elif response == 'b':dest_file = backup_dir(download_location)logger.warning('Backing up %s to %s',display_path(download_location),display_path(dest_file),)shutil.move(download_location, dest_file)elif response == 'a':sys.exit(-1)if copy:shutil.copy(filename, download_location)logger.info('Saved %s', display_path(download_location))def unpack_http_url(link, location, download_dir=None,session=None, hashes=None):if session is None:raise TypeError("unpack_http_url() missing 1 required keyword argument: 'session'")temp_dir = tempfile.mkdtemp('-unpack', 'pip-')# If a download dir is specified, is the file already downloaded there?already_downloaded_path = Noneif download_dir:already_downloaded_path = _check_download_dir(link,download_dir,hashes)if already_downloaded_path:from_path = already_downloaded_pathcontent_type = mimetypes.guess_type(from_path)[0]else:# let's download to a tmp dirfrom_path, content_type = _download_http_url(link,session,temp_dir,hashes)# unpack the archive to the build dir location. even when only downloading# archives, they have to be unpacked to parse dependenciesunpack_file(from_path, location, content_type, link)# a download dir is specified; let's copy the archive thereif download_dir and not already_downloaded_path:_copy_file(from_path, download_dir, link)if not already_downloaded_path:os.unlink(from_path)rmtree(temp_dir)def unpack_file_url(link, location, download_dir=None, hashes=None):"""Unpack link into location.If download_dir is provided and link points to a file, make a copyof the link file inside download_dir."""link_path = url_to_path(link.url_without_fragment)# If it's a url to a local directoryif is_dir_url(link):if os.path.isdir(location):rmtree(location)shutil.copytree(link_path, location, symlinks=True)if download_dir:logger.info('Link is a directory, ignoring download_dir')return# If --require-hashes is off, `hashes` is either empty, the# link's embedded hash, or MissingHashes; it is required to# match. If --require-hashes is on, we are satisfied by any# hash in `hashes` matching: a URL-based or an option-based# one; no internet-sourced hash will be in `hashes`.if hashes:hashes.check_against_path(link_path)# If a download dir is specified, is the file already there and valid?already_downloaded_path = Noneif download_dir:already_downloaded_path = _check_download_dir(link,download_dir,hashes)if already_downloaded_path:from_path = already_downloaded_pathelse:from_path = link_pathcontent_type = mimetypes.guess_type(from_path)[0]# unpack the archive to the build dir location. even when only downloading# archives, they have to be unpacked to parse dependenciesunpack_file(from_path, location, content_type, link)# a download dir is specified and not already downloadedif download_dir and not already_downloaded_path:_copy_file(from_path, download_dir, link)def _copy_dist_from_dir(link_path, location):"""Copy distribution files in `link_path` to `location`.Invoked when user requests to install a local directory. E.g.:pip install .pip install ~/dev/git-repos/python-prompt-toolkit"""# Note: This is currently VERY SLOW if you have a lot of data in the# directory, because it copies everything with `shutil.copytree`.# What it should really do is build an sdist and install that.# See https://github.com/pypa/pip/issues/2195if os.path.isdir(location):rmtree(location)# build an sdistsetup_py = 'setup.py'sdist_args = [sys.executable]sdist_args.append('-c')sdist_args.append(SETUPTOOLS_SHIM % setup_py)sdist_args.append('sdist')sdist_args += ['--dist-dir', location]logger.info('Running setup.py sdist for %s', link_path)with indent_log():call_subprocess(sdist_args, cwd=link_path, show_stdout=False)# unpack sdist into `location`sdist = os.path.join(location, os.listdir(location)[0])logger.info('Unpacking sdist %s into %s', sdist, location)unpack_file(sdist, location, content_type=None, link=None)class PipXmlrpcTransport(xmlrpc_client.Transport):"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`object."""def __init__(self, index_url, session, use_datetime=False):xmlrpc_client.Transport.__init__(self, use_datetime)index_parts = urllib_parse.urlparse(index_url)self._scheme = index_parts.schemeself._session = sessiondef request(self, host, handler, request_body, verbose=False):parts = (self._scheme, host, handler, None, None, None)url = urllib_parse.urlunparse(parts)try:headers = {'Content-Type': 'text/xml'}response = self._session.post(url, data=request_body,headers=headers, stream=True)response.raise_for_status()self.verbose = verbosereturn self.parse_response(response.raw)except requests.HTTPError as exc:logger.critical("HTTP error %s while getting %s",exc.response.status_code, url,)raisedef unpack_url(link, location, download_dir=None,only_download=False, session=None, hashes=None):"""Unpack link.If link is a VCS link:if only_download, export into download_dir and ignore locationelse unpack into locationfor other types of link:- unpack into location- if download_dir, copy the file into download_dir- if only_download, mark location for deletion:param hashes: A Hashes object, one of whose embedded hashes must match,or HashMismatch will be raised. If the Hashes is empty, no matches arerequired, and unhashable types of requirements (like VCS ones, whichwould ordinarily raise HashUnsupported) are allowed."""# non-editable vcs urlsif is_vcs_url(link):unpack_vcs_link(link, location)# file urlselif is_file_url(link):unpack_file_url(link, location, download_dir, hashes=hashes)# http urlselse:if session is None:session = PipSession()unpack_http_url(link,location,download_dir,session,hashes=hashes)if only_download:write_delete_marker_file(location)def _download_http_url(link, session, temp_dir, hashes):"""Download link url into temp_dir using provided session"""target_url = link.url.split('#', 1)[0]try:resp = session.get(target_url,# We use Accept-Encoding: identity here because requests# defaults to accepting compressed responses. This breaks in# a variety of ways depending on how the server is configured.# - Some servers will notice that the file isn't a compressible# file and will leave the file alone and with an empty# Content-Encoding# - Some servers will notice that the file is already# compressed and will leave the file alone and will add a# Content-Encoding: gzip header# - Some servers won't notice anything at all and will take# a file that's already been compressed and compress it again# and set the Content-Encoding: gzip header# By setting this to request only the identity encoding We're# hoping to eliminate the third case. Hopefully there does not# exist a server which when given a file will notice it is# already compressed and that you're not asking for a# compressed file and will then decompress it before sending# because if that's the case I don't think it'll ever be# possible to make this work.headers={"Accept-Encoding": "identity"},stream=True,)resp.raise_for_status()except requests.HTTPError as exc:logger.critical("HTTP error %s while getting %s", exc.response.status_code, link,)raisecontent_type = resp.headers.get('content-type', '')filename = link.filename # fallback# Have a look at the Content-Disposition header for a better guesscontent_disposition = resp.headers.get('content-disposition')if content_disposition:type, params = cgi.parse_header(content_disposition)# We use ``or`` here because we don't want to use an "empty" value# from the filename param.filename = params.get('filename') or filenameext = splitext(filename)[1]if not ext:ext = mimetypes.guess_extension(content_type)if ext:filename += extif not ext and link.url != resp.url:ext = os.path.splitext(resp.url)[1]if ext:filename += extfile_path = os.path.join(temp_dir, filename)with open(file_path, 'wb') as content_file:_download_url(resp, link, content_file, hashes)return file_path, content_typedef _check_download_dir(link, download_dir, hashes):""" Check download_dir for previously downloaded file with correct hashIf a correct file is found return its path else None"""download_path = os.path.join(download_dir, link.filename)if os.path.exists(download_path):# If already downloaded, does its hash match?logger.info('File was already downloaded %s', download_path)if hashes:try:hashes.check_against_path(download_path)except HashMismatch:logger.warning('Previously-downloaded file %s has bad hash. ''Re-downloading.',download_path)os.unlink(download_path)return Nonereturn download_pathreturn None
# This is a copy of the Python logging.config.dictconfig module,# reproduced with permission. It is provided here for backwards# compatibility for Python versions prior to 2.7.## Copyright 2009-2010 by Vinay Sajip. All Rights Reserved.## Permission to use, copy, modify, and distribute this software and its# documentation for any purpose and without fee is hereby granted,# provided that the above copyright notice appear in all copies and that# both that copyright notice and this permission notice appear in# supporting documentation, and that the name of Vinay Sajip# not be used in advertising or publicity pertaining to distribution# of the software without specific, written prior permission.# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.from __future__ import absolute_importimport logging.handlersimport reimport sysimport typesfrom pip._vendor import six# flake8: noqaIDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)def valid_ident(s):m = IDENTIFIER.match(s)if not m:raise ValueError('Not a valid Python identifier: %r' % s)return True## This function is defined in logging only in recent versions of Python#try:from logging import _checkLevelexcept ImportError:def _checkLevel(level):if isinstance(level, int):rv = levelelif str(level) == level:if level not in logging._levelNames:raise ValueError('Unknown level: %r' % level)rv = logging._levelNames[level]else:raise TypeError('Level not an integer or a ''valid string: %r' % level)return rv# The ConvertingXXX classes are wrappers around standard Python containers,# and they serve to convert any suitable values in the container. The# conversion converts base dicts, lists and tuples to their wrapped# equivalents, whereas strings which match a conversion format are converted# appropriately.## Each wrapper should have a configurator attribute holding the actual# configurator to use for conversion.class ConvertingDict(dict):"""A converting dictionary wrapper."""def __getitem__(self, key):value = dict.__getitem__(self, key)result = self.configurator.convert(value)# If the converted value is different, save for next timeif value is not result:self[key] = resultif type(result) in (ConvertingDict, ConvertingList,ConvertingTuple):result.parent = selfresult.key = keyreturn resultdef get(self, key, default=None):value = dict.get(self, key, default)result = self.configurator.convert(value)# If the converted value is different, save for next timeif value is not result:self[key] = resultif type(result) in (ConvertingDict, ConvertingList,ConvertingTuple):result.parent = selfresult.key = keyreturn resultdef pop(self, key, default=None):value = dict.pop(self, key, default)result = self.configurator.convert(value)if value is not result:if type(result) in (ConvertingDict, ConvertingList,ConvertingTuple):result.parent = selfresult.key = keyreturn resultclass ConvertingList(list):"""A converting list wrapper."""def __getitem__(self, key):value = list.__getitem__(self, key)result = self.configurator.convert(value)# If the converted value is different, save for next timeif value is not result:self[key] = resultif type(result) in (ConvertingDict, ConvertingList,ConvertingTuple):result.parent = selfresult.key = keyreturn resultdef pop(self, idx=-1):value = list.pop(self, idx)result = self.configurator.convert(value)if value is not result:if type(result) in (ConvertingDict, ConvertingList,ConvertingTuple):result.parent = selfreturn resultclass ConvertingTuple(tuple):"""A converting tuple wrapper."""def __getitem__(self, key):value = tuple.__getitem__(self, key)result = self.configurator.convert(value)if value is not result:if type(result) in (ConvertingDict, ConvertingList,ConvertingTuple):result.parent = selfresult.key = keyreturn resultclass BaseConfigurator(object):"""The configurator base class which defines some useful defaults."""CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')DIGIT_PATTERN = re.compile(r'^\d+$')value_converters = {'ext' : 'ext_convert','cfg' : 'cfg_convert',}# We might want to use a different one, e.g. importlibimporter = __import__def __init__(self, config):self.config = ConvertingDict(config)self.config.configurator = selfdef resolve(self, s):"""Resolve strings to objects using standard import and attributesyntax."""name = s.split('.')used = name.pop(0)try:found = self.importer(used)for frag in name:used += '.' + fragtry:found = getattr(found, frag)except AttributeError:self.importer(used)found = getattr(found, frag)return foundexcept ImportError:e, tb = sys.exc_info()[1:]v = ValueError('Cannot resolve %r: %s' % (s, e))v.__cause__, v.__traceback__ = e, tbraise vdef ext_convert(self, value):"""Default converter for the ext:// protocol."""return self.resolve(value)def cfg_convert(self, value):"""Default converter for the cfg:// protocol."""rest = valuem = self.WORD_PATTERN.match(rest)if m is None:raise ValueError("Unable to convert %r" % value)else:rest = rest[m.end():]d = self.config[m.groups()[0]]# print d, restwhile rest:m = self.DOT_PATTERN.match(rest)if m:d = d[m.groups()[0]]else:m = self.INDEX_PATTERN.match(rest)if m:idx = m.groups()[0]if not self.DIGIT_PATTERN.match(idx):d = d[idx]else:try:n = int(idx) # try as number first (most likely)d = d[n]except TypeError:d = d[idx]if m:rest = rest[m.end():]else:raise ValueError('Unable to convert ''%r at %r' % (value, rest))# rest should be emptyreturn ddef convert(self, value):"""Convert values to an appropriate type. dicts, lists and tuples arereplaced by their converting alternatives. Strings are checked tosee if they have a conversion format and are converted if they do."""if not isinstance(value, ConvertingDict) and isinstance(value, dict):value = ConvertingDict(value)value.configurator = selfelif not isinstance(value, ConvertingList) and isinstance(value, list):value = ConvertingList(value)value.configurator = selfelif not isinstance(value, ConvertingTuple) and\isinstance(value, tuple):value = ConvertingTuple(value)value.configurator = selfelif isinstance(value, six.string_types): # str for py3km = self.CONVERT_PATTERN.match(value)if m:d = m.groupdict()prefix = d['prefix']converter = self.value_converters.get(prefix, None)if converter:suffix = d['suffix']converter = getattr(self, converter)value = converter(suffix)return valuedef configure_custom(self, config):"""Configure an object with a user-supplied factory."""c = config.pop('()')if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:c = self.resolve(c)props = config.pop('.', None)# Check for valid identifierskwargs = dict((k, config[k]) for k in config if valid_ident(k))result = c(**kwargs)if props:for name, value in props.items():setattr(result, name, value)return resultdef as_tuple(self, value):"""Utility function which converts lists to tuples."""if isinstance(value, list):value = tuple(value)return valueclass DictConfigurator(BaseConfigurator):"""Configure logging using a dictionary-like object to describe theconfiguration."""def configure(self):"""Do the configuration."""config = self.configif 'version' not in config:raise ValueError("dictionary doesn't specify a version")if config['version'] != 1:raise ValueError("Unsupported version: %s" % config['version'])incremental = config.pop('incremental', False)EMPTY_DICT = {}logging._acquireLock()try:if incremental:handlers = config.get('handlers', EMPTY_DICT)# incremental handler config only if handler name# ties in to logging._handlers (Python 2.7)if sys.version_info[:2] == (2, 7):for name in handlers:if name not in logging._handlers:raise ValueError('No handler found with ''name %r' % name)else:try:handler = logging._handlers[name]handler_config = handlers[name]level = handler_config.get('level', None)if level:handler.setLevel(_checkLevel(level))except StandardError as e:raise ValueError('Unable to configure handler ''%r: %s' % (name, e))loggers = config.get('loggers', EMPTY_DICT)for name in loggers:try:self.configure_logger(name, loggers[name], True)except StandardError as e:raise ValueError('Unable to configure logger ''%r: %s' % (name, e))root = config.get('root', None)if root:try:self.configure_root(root, True)except StandardError as e:raise ValueError('Unable to configure root ''logger: %s' % e)else:disable_existing = config.pop('disable_existing_loggers', True)logging._handlers.clear()del logging._handlerList[:]# Do formatters first - they don't refer to anything elseformatters = config.get('formatters', EMPTY_DICT)for name in formatters:try:formatters[name] = self.configure_formatter(formatters[name])except StandardError as e:raise ValueError('Unable to configure ''formatter %r: %s' % (name, e))# Next, do filters - they don't refer to anything else, eitherfilters = config.get('filters', EMPTY_DICT)for name in filters:try:filters[name] = self.configure_filter(filters[name])except StandardError as e:raise ValueError('Unable to configure ''filter %r: %s' % (name, e))# Next, do handlers - they refer to formatters and filters# As handlers can refer to other handlers, sort the keys# to allow a deterministic order of configurationhandlers = config.get('handlers', EMPTY_DICT)for name in sorted(handlers):try:handler = self.configure_handler(handlers[name])handler.name = namehandlers[name] = handlerexcept StandardError as e:raise ValueError('Unable to configure handler ''%r: %s' % (name, e))# Next, do loggers - they refer to handlers and filters# we don't want to lose the existing loggers,# since other threads may have pointers to them.# existing is set to contain all existing loggers,# and as we go through the new configuration we# remove any which are configured. At the end,# what's left in existing is the set of loggers# which were in the previous configuration but# which are not in the new configuration.root = logging.rootexisting = list(root.manager.loggerDict)# The list needs to be sorted so that we can# avoid disabling child loggers of explicitly# named loggers. With a sorted list it is easier# to find the child loggers.existing.sort()# We'll keep the list of existing loggers# which are children of named loggers here...child_loggers = []# now set up the new ones...loggers = config.get('loggers', EMPTY_DICT)for name in loggers:if name in existing:i = existing.index(name)prefixed = name + "."pflen = len(prefixed)num_existing = len(existing)i = i + 1 # look at the entry after namewhile (i < num_existing) and\(existing[i][:pflen] == prefixed):child_loggers.append(existing[i])i = i + 1existing.remove(name)try:self.configure_logger(name, loggers[name])except StandardError as e:raise ValueError('Unable to configure logger ''%r: %s' % (name, e))# Disable any old loggers. There's no point deleting# them as other threads may continue to hold references# and by disabling them, you stop them doing any logging.# However, don't disable children of named loggers, as that's# probably not what was intended by the user.for log in existing:logger = root.manager.loggerDict[log]if log in child_loggers:logger.level = logging.NOTSETlogger.handlers = []logger.propagate = Trueelif disable_existing:logger.disabled = True# And finally, do the root loggerroot = config.get('root', None)if root:try:self.configure_root(root)except StandardError as e:raise ValueError('Unable to configure root ''logger: %s' % e)finally:logging._releaseLock()def configure_formatter(self, config):"""Configure a formatter from a dictionary."""if '()' in config:factory = config['()'] # for use in exception handlertry:result = self.configure_custom(config)except TypeError as te:if "'format'" not in str(te):raise# Name of parameter changed from fmt to format.# Retry with old name.# This is so that code can be used with older Python versions#(e.g. by Django)config['fmt'] = config.pop('format')config['()'] = factoryresult = self.configure_custom(config)else:fmt = config.get('format', None)dfmt = config.get('datefmt', None)result = logging.Formatter(fmt, dfmt)return resultdef configure_filter(self, config):"""Configure a filter from a dictionary."""if '()' in config:result = self.configure_custom(config)else:name = config.get('name', '')result = logging.Filter(name)return resultdef add_filters(self, filterer, filters):"""Add filters to a filterer from a list of names."""for f in filters:try:filterer.addFilter(self.config['filters'][f])except StandardError as e:raise ValueError('Unable to add filter %r: %s' % (f, e))def configure_handler(self, config):"""Configure a handler from a dictionary."""formatter = config.pop('formatter', None)if formatter:try:formatter = self.config['formatters'][formatter]except StandardError as e:raise ValueError('Unable to set formatter ''%r: %s' % (formatter, e))level = config.pop('level', None)filters = config.pop('filters', None)if '()' in config:c = config.pop('()')if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:c = self.resolve(c)factory = celse:klass = self.resolve(config.pop('class'))# Special case for handler which refers to another handlerif issubclass(klass, logging.handlers.MemoryHandler) and\'target' in config:try:config['target'] = self.config['handlers'][config['target']]except StandardError as e:raise ValueError('Unable to set target handler ''%r: %s' % (config['target'], e))elif issubclass(klass, logging.handlers.SMTPHandler) and\'mailhost' in config:config['mailhost'] = self.as_tuple(config['mailhost'])elif issubclass(klass, logging.handlers.SysLogHandler) and\'address' in config:config['address'] = self.as_tuple(config['address'])factory = klasskwargs = dict((k, config[k]) for k in config if valid_ident(k))try:result = factory(**kwargs)except TypeError as te:if "'stream'" not in str(te):raise# The argument name changed from strm to stream# Retry with old name.# This is so that code can be used with older Python versions#(e.g. by Django)kwargs['strm'] = kwargs.pop('stream')result = factory(**kwargs)if formatter:result.setFormatter(formatter)if level is not None:result.setLevel(_checkLevel(level))if filters:self.add_filters(result, filters)return resultdef add_handlers(self, logger, handlers):"""Add handlers to a logger from a list of names."""for h in handlers:try:logger.addHandler(self.config['handlers'][h])except StandardError as e:raise ValueError('Unable to add handler %r: %s' % (h, e))def common_logger_config(self, logger, config, incremental=False):"""Perform configuration which is common to root and non-root loggers."""level = config.get('level', None)if level is not None:logger.setLevel(_checkLevel(level))if not incremental:# Remove any existing handlersfor h in logger.handlers[:]:logger.removeHandler(h)handlers = config.get('handlers', None)if handlers:self.add_handlers(logger, handlers)filters = config.get('filters', None)if filters:self.add_filters(logger, filters)def configure_logger(self, name, config, incremental=False):"""Configure a non-root logger from a dictionary."""logger = logging.getLogger(name)self.common_logger_config(logger, config, incremental)propagate = config.get('propagate', None)if propagate is not None:logger.propagate = propagatedef configure_root(self, config, incremental=False):"""Configure a root logger from a dictionary."""root = logging.getLogger()self.common_logger_config(root, config, incremental)dictConfigClass = DictConfiguratordef dictConfig(config):"""Configure logging using a dictionary."""dictConfigClass(config).configure()
"""Stuff that differs in different Python versions and platformdistributions."""from __future__ import absolute_import, divisionimport osimport sysfrom pip._vendor.six import text_typetry:from logging.config import dictConfig as logging_dictConfigexcept ImportError:from pip.compat.dictconfig import dictConfig as logging_dictConfigtry:from collections import OrderedDictexcept ImportError:from pip._vendor.ordereddict import OrderedDicttry:import ipaddressexcept ImportError:try:from pip._vendor import ipaddressexcept ImportError:import ipaddr as ipaddressipaddress.ip_address = ipaddress.IPAddressipaddress.ip_network = ipaddress.IPNetworktry:import sysconfigdef get_stdlib():paths = [sysconfig.get_path("stdlib"),sysconfig.get_path("platstdlib"),]return set(filter(bool, paths))except ImportError:from distutils import sysconfigdef get_stdlib():paths = [sysconfig.get_python_lib(standard_lib=True),sysconfig.get_python_lib(standard_lib=True, plat_specific=True),]return set(filter(bool, paths))__all__ = ["logging_dictConfig", "ipaddress", "uses_pycache", "console_to_str","native_str", "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile","OrderedDict",]if sys.version_info >= (3, 4):uses_pycache = Truefrom importlib.util import cache_from_sourceelse:import impuses_pycache = hasattr(imp, 'cache_from_source')if uses_pycache:cache_from_source = imp.cache_from_sourceelse:cache_from_source = Noneif sys.version_info >= (3,):def console_to_str(s):try:return s.decode(sys.__stdout__.encoding)except UnicodeDecodeError:return s.decode('utf_8')def native_str(s, replace=False):if isinstance(s, bytes):return s.decode('utf-8', 'replace' if replace else 'strict')return selse:def console_to_str(s):return sdef native_str(s, replace=False):# Replace is ignored -- unicode to UTF-8 can't failif isinstance(s, text_type):return s.encode('utf-8')return sdef total_seconds(td):if hasattr(td, "total_seconds"):return td.total_seconds()else:val = td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6return val / 10 ** 6def get_path_uid(path):"""Return path's uid.Does not follow symlinks:https://github.com/pypa/pip/pull/935#discussion_r5307003Placed this function in compat due to differences on AIX andJython, that should eventually go away.:raises OSError: When path is a symlink or can't be read."""if hasattr(os, 'O_NOFOLLOW'):fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)file_uid = os.fstat(fd).st_uidos.close(fd)else: # AIX and Jython# WARNING: time of check vulnerability, but best we can do w/o NOFOLLOWif not os.path.islink(path):# older versions of Jython don't have `os.fstat`file_uid = os.stat(path).st_uidelse:# raise OSError for parity with os.O_NOFOLLOW aboveraise OSError("%s is a symlink; Will not return uid for symlinks" % path)return file_uiddef expanduser(path):"""Expand ~ and ~user constructions.Includes a workaround for http://bugs.python.org/issue14768"""expanded = os.path.expanduser(path)if path.startswith('~/') and expanded.startswith('//'):expanded = expanded[1:]return expanded# packages in the stdlib that may have installation metadata, but should not be# considered 'installed'. this theoretically could be determined based on# dist.location (py27:`sysconfig.get_paths()['stdlib']`,# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may# make this ineffective, so hard-codingstdlib_pkgs = ('python', 'wsgiref')if sys.version_info >= (2, 7):stdlib_pkgs += ('argparse',)# windows detection, covers cpython and ironpythonWINDOWS = (sys.platform.startswith("win") or(sys.platform == 'cli' and os.name == 'nt'))def samefile(file1, file2):"""Provide an alternative for os.path.samefile on Windows/Python2"""if hasattr(os.path, 'samefile'):return os.path.samefile(file1, file2)else:path1 = os.path.normcase(os.path.abspath(file1))path2 = os.path.normcase(os.path.abspath(file2))return path1 == path2
# -*- coding: utf-8 -*-from __future__ import absolute_importimport loggingimport osimport warningsfrom pip.basecommand import RequirementCommandfrom pip.exceptions import CommandError, PreviousBuildDirErrorfrom pip.req import RequirementSetfrom pip.utils import import_or_raisefrom pip.utils.build import BuildDirectoryfrom pip.utils.deprecation import RemovedInPip10Warningfrom pip.wheel import WheelCache, WheelBuilderfrom pip import cmdoptionslogger = logging.getLogger(__name__)class WheelCommand(RequirementCommand):"""Build Wheel archives for your requirements and dependencies.Wheel is a built-package format, and offers the advantage of notrecompiling your software during every install. For more details, see thewheel docs: https://wheel.readthedocs.io/en/latest/Requirements: setuptools>=0.8, and wheel.'pip wheel' uses the bdist_wheel setuptools extension from the wheelpackage to build individual wheels."""name = 'wheel'usage = """%prog [options] <requirement specifier> ...%prog [options] -r <requirements file> ...%prog [options] [-e] <vcs project url> ...%prog [options] [-e] <local project path> ...%prog [options] <archive url/path> ..."""summary = 'Build wheels from your requirements.'def __init__(self, *args, **kw):super(WheelCommand, self).__init__(*args, **kw)cmd_opts = self.cmd_optscmd_opts.add_option('-w', '--wheel-dir',dest='wheel_dir',metavar='dir',default=os.curdir,help=("Build wheels into <dir>, where the default is the ""current working directory."),)cmd_opts.add_option(cmdoptions.use_wheel())cmd_opts.add_option(cmdoptions.no_use_wheel())cmd_opts.add_option(cmdoptions.no_binary())cmd_opts.add_option(cmdoptions.only_binary())cmd_opts.add_option('--build-option',dest='build_options',metavar='options',action='append',help="Extra arguments to be supplied to 'setup.py bdist_wheel'.")cmd_opts.add_option(cmdoptions.constraints())cmd_opts.add_option(cmdoptions.editable())cmd_opts.add_option(cmdoptions.requirements())cmd_opts.add_option(cmdoptions.src())cmd_opts.add_option(cmdoptions.ignore_requires_python())cmd_opts.add_option(cmdoptions.no_deps())cmd_opts.add_option(cmdoptions.build_dir())cmd_opts.add_option('--global-option',dest='global_options',action='append',metavar='options',help="Extra global options to be supplied to the setup.py ""call before the 'bdist_wheel' command.")cmd_opts.add_option('--pre',action='store_true',default=False,help=("Include pre-release and development versions. By default, ""pip only finds stable versions."),)cmd_opts.add_option(cmdoptions.no_clean())cmd_opts.add_option(cmdoptions.require_hashes())index_opts = cmdoptions.make_option_group(cmdoptions.index_group,self.parser,)self.parser.insert_option_group(0, index_opts)self.parser.insert_option_group(0, cmd_opts)def check_required_packages(self):import_or_raise('wheel.bdist_wheel',CommandError,"'pip wheel' requires the 'wheel' package. To fix this, run: ""pip install wheel")pkg_resources = import_or_raise('pkg_resources',CommandError,"'pip wheel' requires setuptools >= 0.8 for dist-info support."" To fix this, run: pip install --upgrade setuptools")if not hasattr(pkg_resources, 'DistInfoDistribution'):raise CommandError("'pip wheel' requires setuptools >= 0.8 for dist-info ""support. To fix this, run: pip install --upgrade ""setuptools")def run(self, options, args):self.check_required_packages()cmdoptions.resolve_wheel_no_use_binary(options)cmdoptions.check_install_build_global(options)if options.allow_external:warnings.warn("--allow-external has been deprecated and will be removed in ""the future. Due to changes in the repository protocol, it no ""longer has any effect.",RemovedInPip10Warning,)if options.allow_all_external:warnings.warn("--allow-all-external has been deprecated and will be removed ""in the future. Due to changes in the repository protocol, it ""no longer has any effect.",RemovedInPip10Warning,)if options.allow_unverified:warnings.warn("--allow-unverified has been deprecated and will be removed ""in the future. Due to changes in the repository protocol, it ""no longer has any effect.",RemovedInPip10Warning,)index_urls = [options.index_url] + options.extra_index_urlsif options.no_index:logger.debug('Ignoring indexes: %s', ','.join(index_urls))index_urls = []if options.build_dir:options.build_dir = os.path.abspath(options.build_dir)options.src_dir = os.path.abspath(options.src_dir)with self._build_session(options) as session:finder = self._build_package_finder(options, session)build_delete = (not (options.no_clean or options.build_dir))wheel_cache = WheelCache(options.cache_dir, options.format_control)with BuildDirectory(options.build_dir,delete=build_delete) as build_dir:requirement_set = RequirementSet(build_dir=build_dir,src_dir=options.src_dir,download_dir=None,ignore_dependencies=options.ignore_dependencies,ignore_installed=True,ignore_requires_python=options.ignore_requires_python,isolated=options.isolated_mode,session=session,wheel_cache=wheel_cache,wheel_download_dir=options.wheel_dir,require_hashes=options.require_hashes)self.populate_requirement_set(requirement_set, args, options, finder, session, self.name,wheel_cache)if not requirement_set.has_requirements:returntry:# build wheelswb = WheelBuilder(requirement_set,finder,build_options=options.build_options or [],global_options=options.global_options or [],)if not wb.build():raise CommandError("Failed to build one or more wheels")except PreviousBuildDirError:options.no_clean = Trueraisefinally:if not options.no_clean:requirement_set.cleanup_files()
from __future__ import absolute_importimport pipfrom pip.wheel import WheelCachefrom pip.req import InstallRequirement, RequirementSet, parse_requirementsfrom pip.basecommand import Commandfrom pip.exceptions import InstallationErrorclass UninstallCommand(Command):"""Uninstall packages.pip is able to uninstall most installed packages. Known exceptions are:- Pure distutils packages installed with ``python setup.py install``, whichleave behind no metadata to determine what files were installed.- Script wrappers installed by ``python setup.py develop``."""name = 'uninstall'usage = """%prog [options] <package> ...%prog [options] -r <requirements file> ..."""summary = 'Uninstall packages.'def __init__(self, *args, **kw):super(UninstallCommand, self).__init__(*args, **kw)self.cmd_opts.add_option('-r', '--requirement',dest='requirements',action='append',default=[],metavar='file',help='Uninstall all the packages listed in the given requirements ''file. This option can be used multiple times.',)self.cmd_opts.add_option('-y', '--yes',dest='yes',action='store_true',help="Don't ask for confirmation of uninstall deletions.")self.parser.insert_option_group(0, self.cmd_opts)def run(self, options, args):with self._build_session(options) as session:format_control = pip.index.FormatControl(set(), set())wheel_cache = WheelCache(options.cache_dir, format_control)requirement_set = RequirementSet(build_dir=None,src_dir=None,download_dir=None,isolated=options.isolated_mode,session=session,wheel_cache=wheel_cache,)for name in args:requirement_set.add_requirement(InstallRequirement.from_line(name, isolated=options.isolated_mode,wheel_cache=wheel_cache))for filename in options.requirements:for req in parse_requirements(filename,options=options,session=session,wheel_cache=wheel_cache):requirement_set.add_requirement(req)if not requirement_set.has_requirements:raise InstallationError('You must give at least one requirement to %(name)s (see ''"pip help %(name)s")' % dict(name=self.name))requirement_set.uninstall(auto_confirm=options.yes)
from __future__ import absolute_importfrom email.parser import FeedParserimport loggingimport osfrom pip.basecommand import Commandfrom pip.status_codes import SUCCESS, ERRORfrom pip._vendor import pkg_resourcesfrom pip._vendor.packaging.utils import canonicalize_namelogger = logging.getLogger(__name__)class ShowCommand(Command):"""Show information about one or more installed packages."""name = 'show'usage = """%prog [options] <package> ..."""summary = 'Show information about installed packages.'def __init__(self, *args, **kw):super(ShowCommand, self).__init__(*args, **kw)self.cmd_opts.add_option('-f', '--files',dest='files',action='store_true',default=False,help='Show the full list of installed files for each package.')self.parser.insert_option_group(0, self.cmd_opts)def run(self, options, args):if not args:logger.warning('ERROR: Please provide a package name or names.')return ERRORquery = argsresults = search_packages_info(query)if not print_results(results, list_files=options.files, verbose=options.verbose):return ERRORreturn SUCCESSdef search_packages_info(query):"""Gather details from installed distributions. Print distribution name,version, location, and installed files. Installed files requires apip generated 'installed-files.txt' in the distributions '.egg-info'directory."""installed = {}for p in pkg_resources.working_set:installed[canonicalize_name(p.project_name)] = pquery_names = [canonicalize_name(name) for name in query]for dist in [installed[pkg] for pkg in query_names if pkg in installed]:package = {'name': dist.project_name,'version': dist.version,'location': dist.location,'requires': [dep.project_name for dep in dist.requires()],}file_list = Nonemetadata = Noneif isinstance(dist, pkg_resources.DistInfoDistribution):# RECORDs should be part of .dist-info metadatasif dist.has_metadata('RECORD'):lines = dist.get_metadata_lines('RECORD')paths = [l.split(',')[0] for l in lines]paths = [os.path.join(dist.location, p) for p in paths]file_list = [os.path.relpath(p, dist.location) for p in paths]if dist.has_metadata('METADATA'):metadata = dist.get_metadata('METADATA')else:# Otherwise use pip's log for .egg-info'sif dist.has_metadata('installed-files.txt'):paths = dist.get_metadata_lines('installed-files.txt')paths = [os.path.join(dist.egg_info, p) for p in paths]file_list = [os.path.relpath(p, dist.location) for p in paths]if dist.has_metadata('PKG-INFO'):metadata = dist.get_metadata('PKG-INFO')if dist.has_metadata('entry_points.txt'):entry_points = dist.get_metadata_lines('entry_points.txt')package['entry_points'] = entry_pointsif dist.has_metadata('INSTALLER'):for line in dist.get_metadata_lines('INSTALLER'):if line.strip():package['installer'] = line.strip()break# @todo: Should pkg_resources.Distribution have a# `get_pkg_info` method?feed_parser = FeedParser()feed_parser.feed(metadata)pkg_info_dict = feed_parser.close()for key in ('metadata-version', 'summary','home-page', 'author', 'author-email', 'license'):package[key] = pkg_info_dict.get(key)# It looks like FeedParser cannot deal with repeated headersclassifiers = []for line in metadata.splitlines():if line.startswith('Classifier: '):classifiers.append(line[len('Classifier: '):])package['classifiers'] = classifiersif file_list:package['files'] = sorted(file_list)yield packagedef print_results(distributions, list_files=False, verbose=False):"""Print the informations from installed distributions found."""results_printed = Falsefor i, dist in enumerate(distributions):results_printed = Trueif i > 0:logger.info("---")logger.info("Name: %s", dist.get('name', ''))logger.info("Version: %s", dist.get('version', ''))logger.info("Summary: %s", dist.get('summary', ''))logger.info("Home-page: %s", dist.get('home-page', ''))logger.info("Author: %s", dist.get('author', ''))logger.info("Author-email: %s", dist.get('author-email', ''))logger.info("License: %s", dist.get('license', ''))logger.info("Location: %s", dist.get('location', ''))logger.info("Requires: %s", ', '.join(dist.get('requires', [])))if verbose:logger.info("Metadata-Version: %s",dist.get('metadata-version', ''))logger.info("Installer: %s", dist.get('installer', ''))logger.info("Classifiers:")for classifier in dist.get('classifiers', []):logger.info(" %s", classifier)logger.info("Entry-points:")for entry in dist.get('entry_points', []):logger.info(" %s", entry.strip())if list_files:logger.info("Files:")for line in dist.get('files', []):logger.info(" %s", line.strip())if "files" not in dist:logger.info("Cannot locate installed-files.txt")return results_printed
from __future__ import absolute_importimport loggingimport sysimport textwrapfrom pip.basecommand import Command, SUCCESSfrom pip.compat import OrderedDictfrom pip.download import PipXmlrpcTransportfrom pip.models import PyPIfrom pip.utils import get_terminal_sizefrom pip.utils.logging import indent_logfrom pip.exceptions import CommandErrorfrom pip.status_codes import NO_MATCHES_FOUNDfrom pip._vendor.packaging.version import parse as parse_versionfrom pip._vendor import pkg_resourcesfrom pip._vendor.six.moves import xmlrpc_clientlogger = logging.getLogger(__name__)class SearchCommand(Command):"""Search for PyPI packages whose name or summary contains <query>."""name = 'search'usage = """%prog [options] <query>"""summary = 'Search PyPI for packages.'def __init__(self, *args, **kw):super(SearchCommand, self).__init__(*args, **kw)self.cmd_opts.add_option('-i', '--index',dest='index',metavar='URL',default=PyPI.pypi_url,help='Base URL of Python Package Index (default %default)')self.parser.insert_option_group(0, self.cmd_opts)def run(self, options, args):if not args:raise CommandError('Missing required argument (search query).')query = argspypi_hits = self.search(query, options)hits = transform_hits(pypi_hits)terminal_width = Noneif sys.stdout.isatty():terminal_width = get_terminal_size()[0]print_results(hits, terminal_width=terminal_width)if pypi_hits:return SUCCESSreturn NO_MATCHES_FOUNDdef search(self, query, options):index_url = options.indexwith self._build_session(options) as session:transport = PipXmlrpcTransport(index_url, session)pypi = xmlrpc_client.ServerProxy(index_url, transport)hits = pypi.search({'name': query, 'summary': query}, 'or')return hitsdef transform_hits(hits):"""The list from pypi is really a list of versions. We want a list ofpackages with the list of versions stored inline. This converts thelist from pypi into one we can use."""packages = OrderedDict()for hit in hits:name = hit['name']summary = hit['summary']version = hit['version']if name not in packages.keys():packages[name] = {'name': name,'summary': summary,'versions': [version],}else:packages[name]['versions'].append(version)# if this is the highest version, replace summary and scoreif version == highest_version(packages[name]['versions']):packages[name]['summary'] = summaryreturn list(packages.values())def print_results(hits, name_column_width=None, terminal_width=None):if not hits:returnif name_column_width is None:name_column_width = max([len(hit['name']) + len(hit.get('versions', ['-'])[-1])for hit in hits]) + 4installed_packages = [p.project_name for p in pkg_resources.working_set]for hit in hits:name = hit['name']summary = hit['summary'] or ''version = hit.get('versions', ['-'])[-1]if terminal_width is not None:target_width = terminal_width - name_column_width - 5if target_width > 10:# wrap and indent summary to fit terminalsummary = textwrap.wrap(summary, target_width)summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)line = '%-*s - %s' % (name_column_width,'%s (%s)' % (name, version), summary)try:logger.info(line)if name in installed_packages:dist = pkg_resources.get_distribution(name)with indent_log():latest = highest_version(hit['versions'])if dist.version == latest:logger.info('INSTALLED: %s (latest)', dist.version)else:logger.info('INSTALLED: %s', dist.version)logger.info('LATEST: %s', latest)except UnicodeEncodeError:passdef highest_version(versions):return max(versions, key=parse_version)
from __future__ import absolute_importimport jsonimport loggingimport warningstry:from itertools import zip_longestexcept ImportError:from itertools import izip_longest as zip_longestfrom pip._vendor import sixfrom pip.basecommand import Commandfrom pip.exceptions import CommandErrorfrom pip.index import PackageFinderfrom pip.utils import (get_installed_distributions, dist_is_editable)from pip.utils.deprecation import RemovedInPip10Warningfrom pip.cmdoptions import make_option_group, index_grouplogger = logging.getLogger(__name__)class ListCommand(Command):"""List installed packages, including editables.Packages are listed in a case-insensitive sorted order."""name = 'list'usage = """%prog [options]"""summary = 'List installed packages.'def __init__(self, *args, **kw):super(ListCommand, self).__init__(*args, **kw)cmd_opts = self.cmd_optscmd_opts.add_option('-o', '--outdated',action='store_true',default=False,help='List outdated packages')cmd_opts.add_option('-u', '--uptodate',action='store_true',default=False,help='List uptodate packages')cmd_opts.add_option('-e', '--editable',action='store_true',default=False,help='List editable projects.')cmd_opts.add_option('-l', '--local',action='store_true',default=False,help=('If in a virtualenv that has global access, do not list ''globally-installed packages.'),)self.cmd_opts.add_option('--user',dest='user',action='store_true',default=False,help='Only output packages installed in user-site.')cmd_opts.add_option('--pre',action='store_true',default=False,help=("Include pre-release and development versions. By default, ""pip only finds stable versions."),)cmd_opts.add_option('--format',action='store',dest='list_format',choices=('legacy', 'columns', 'freeze', 'json'),help="Select the output format among: legacy (default), columns, ""freeze or json.",)cmd_opts.add_option('--not-required',action='store_true',dest='not_required',help="List packages that are not dependencies of ""installed packages.",)index_opts = make_option_group(index_group, self.parser)self.parser.insert_option_group(0, index_opts)self.parser.insert_option_group(0, cmd_opts)def _build_package_finder(self, options, index_urls, session):"""Create a package finder appropriate to this list command."""return PackageFinder(find_links=options.find_links,index_urls=index_urls,allow_all_prereleases=options.pre,trusted_hosts=options.trusted_hosts,process_dependency_links=options.process_dependency_links,session=session,)def run(self, options, args):if options.allow_external:warnings.warn("--allow-external has been deprecated and will be removed in ""the future. Due to changes in the repository protocol, it no ""longer has any effect.",RemovedInPip10Warning,)if options.allow_all_external:warnings.warn("--allow-all-external has been deprecated and will be removed ""in the future. Due to changes in the repository protocol, it ""no longer has any effect.",RemovedInPip10Warning,)if options.allow_unverified:warnings.warn("--allow-unverified has been deprecated and will be removed ""in the future. Due to changes in the repository protocol, it ""no longer has any effect.",RemovedInPip10Warning,)if options.list_format is None:warnings.warn("The default format will switch to columns in the future. ""You can use --format=(legacy|columns) (or define a ""format=(legacy|columns) in your pip.conf under the [list] ""section) to disable this warning.",RemovedInPip10Warning,)if options.outdated and options.uptodate:raise CommandError("Options --outdated and --uptodate cannot be combined.")packages = get_installed_distributions(local_only=options.local,user_only=options.user,editables_only=options.editable,)if options.outdated:packages = self.get_outdated(packages, options)elif options.uptodate:packages = self.get_uptodate(packages, options)if options.not_required:packages = self.get_not_required(packages, options)self.output_package_listing(packages, options)def get_outdated(self, packages, options):return [dist for dist in self.iter_packages_latest_infos(packages, options)if dist.latest_version > dist.parsed_version]def get_uptodate(self, packages, options):return [dist for dist in self.iter_packages_latest_infos(packages, options)if dist.latest_version == dist.parsed_version]def get_not_required(self, packages, options):dep_keys = set()for dist in packages:dep_keys.update(requirement.key for requirement in dist.requires())return set(pkg for pkg in packages if pkg.key not in dep_keys)def iter_packages_latest_infos(self, packages, options):index_urls = [options.index_url] + options.extra_index_urlsif options.no_index:logger.debug('Ignoring indexes: %s', ','.join(index_urls))index_urls = []dependency_links = []for dist in packages:if dist.has_metadata('dependency_links.txt'):dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'),)with self._build_session(options) as session:finder = self._build_package_finder(options, index_urls, session)finder.add_dependency_links(dependency_links)for dist in packages:typ = 'unknown'all_candidates = finder.find_all_candidates(dist.key)if not options.pre:# Remove prereleasesall_candidates = [candidate for candidate in all_candidatesif not candidate.version.is_prerelease]if not all_candidates:continuebest_candidate = max(all_candidates,key=finder._candidate_sort_key)remote_version = best_candidate.versionif best_candidate.location.is_wheel:typ = 'wheel'else:typ = 'sdist'# This is dirty but makes the rest of the code much cleanerdist.latest_version = remote_versiondist.latest_filetype = typyield distdef output_legacy(self, dist):if dist_is_editable(dist):return '%s (%s, %s)' % (dist.project_name,dist.version,dist.location,)else:return '%s (%s)' % (dist.project_name, dist.version)def output_legacy_latest(self, dist):return '%s - Latest: %s [%s]' % (self.output_legacy(dist),dist.latest_version,dist.latest_filetype,)def output_package_listing(self, packages, options):packages = sorted(packages,key=lambda dist: dist.project_name.lower(),)if options.list_format == 'columns' and packages:data, header = format_for_columns(packages, options)self.output_package_listing_columns(data, header)elif options.list_format == 'freeze':for dist in packages:logger.info("%s==%s", dist.project_name, dist.version)elif options.list_format == 'json':logger.info(format_for_json(packages, options))else: # legacyfor dist in packages:if options.outdated:logger.info(self.output_legacy_latest(dist))else:logger.info(self.output_legacy(dist))def output_package_listing_columns(self, data, header):# insert the header first: we need to know the size of column namesif len(data) > 0:data.insert(0, header)pkg_strings, sizes = tabulate(data)# Create and add a separator.if len(data) > 0:pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))for val in pkg_strings:logger.info(val)def tabulate(vals):# From pfmoore on GitHub:# https://github.com/pypa/pip/issues/3651#issuecomment-216932564assert len(vals) > 0sizes = [0] * max(len(x) for x in vals)for row in vals:sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]result = []for row in vals:display = " ".join([str(c).ljust(s) if c is not None else ''for s, c in zip_longest(sizes, row)])result.append(display)return result, sizesdef format_for_columns(pkgs, options):"""Convert the package data into something usableby output_package_listing_columns."""running_outdated = options.outdated# Adjust the header for the `pip list --outdated` case.if running_outdated:header = ["Package", "Version", "Latest", "Type"]else:header = ["Package", "Version"]data = []if any(dist_is_editable(x) for x in pkgs):header.append("Location")for proj in pkgs:# if we're working on the 'outdated' list, separate out the# latest_version and typerow = [proj.project_name, proj.version]if running_outdated:row.append(proj.latest_version)row.append(proj.latest_filetype)if dist_is_editable(proj):row.append(proj.location)data.append(row)return data, headerdef format_for_json(packages, options):data = []for dist in packages:info = {'name': dist.project_name,'version': six.text_type(dist.version),}if options.outdated:info['latest_version'] = six.text_type(dist.latest_version)info['latest_filetype'] = dist.latest_filetypedata.append(info)return json.dumps(data)
from __future__ import absolute_importimport loggingimport operatorimport osimport tempfileimport shutilimport warningstry:import wheelexcept ImportError:wheel = Nonefrom pip.req import RequirementSetfrom pip.basecommand import RequirementCommandfrom pip.locations import virtualenv_no_global, distutils_schemefrom pip.exceptions import (InstallationError, CommandError, PreviousBuildDirError,)from pip import cmdoptionsfrom pip.utils import ensure_dir, get_installed_versionfrom pip.utils.build import BuildDirectoryfrom pip.utils.deprecation import RemovedInPip10Warningfrom pip.utils.filesystem import check_path_ownerfrom pip.wheel import WheelCache, WheelBuilderlogger = logging.getLogger(__name__)class InstallCommand(RequirementCommand):"""Install packages from:- PyPI (and other indexes) using requirement specifiers.- VCS project urls.- Local project directories.- Local or remote source archives.pip also supports installing from "requirements files", which providean easy way to specify a whole environment to be installed."""name = 'install'usage = """%prog [options] <requirement specifier> [package-index-options] ...%prog [options] -r <requirements file> [package-index-options] ...%prog [options] [-e] <vcs project url> ...%prog [options] [-e] <local project path> ...%prog [options] <archive url/path> ..."""summary = 'Install packages.'def __init__(self, *args, **kw):super(InstallCommand, self).__init__(*args, **kw)cmd_opts = self.cmd_optscmd_opts.add_option(cmdoptions.constraints())cmd_opts.add_option(cmdoptions.editable())cmd_opts.add_option(cmdoptions.requirements())cmd_opts.add_option(cmdoptions.build_dir())cmd_opts.add_option('-t', '--target',dest='target_dir',metavar='dir',default=None,help='Install packages into <dir>. ''By default this will not replace existing files/folders in ''<dir>. Use --upgrade to replace existing packages in <dir> ''with new versions.')cmd_opts.add_option('-d', '--download', '--download-dir', '--download-directory',dest='download_dir',metavar='dir',default=None,help=("Download packages into <dir> instead of installing them, ""regardless of what's already installed."),)cmd_opts.add_option(cmdoptions.src())cmd_opts.add_option('-U', '--upgrade',dest='upgrade',action='store_true',help='Upgrade all specified packages to the newest available ''version. The handling of dependencies depends on the ''upgrade-strategy used.')cmd_opts.add_option('--upgrade-strategy',dest='upgrade_strategy',default='eager',choices=['only-if-needed', 'eager'],help='Determines how dependency upgrading should be handled. ''"eager" - dependencies are upgraded regardless of ''whether the currently installed version satisfies the ''requirements of the upgraded package(s). ''"only-if-needed" - are upgraded only when they do not ''satisfy the requirements of the upgraded package(s).')cmd_opts.add_option('--force-reinstall',dest='force_reinstall',action='store_true',help='When upgrading, reinstall all packages even if they are ''already up-to-date.')cmd_opts.add_option('-I', '--ignore-installed',dest='ignore_installed',action='store_true',help='Ignore the installed packages (reinstalling instead).')cmd_opts.add_option(cmdoptions.ignore_requires_python())cmd_opts.add_option(cmdoptions.no_deps())cmd_opts.add_option(cmdoptions.install_options())cmd_opts.add_option(cmdoptions.global_options())cmd_opts.add_option('--user',dest='use_user_site',action='store_true',help="Install to the Python user install directory for your ""platform. Typically ~/.local/, or %APPDATA%\Python on ""Windows. (See the Python documentation for site.USER_BASE ""for full details.)")cmd_opts.add_option('--egg',dest='as_egg',action='store_true',help="Install packages as eggs, not 'flat', like pip normally ""does. This option is not about installing *from* eggs. ""(WARNING: Because this option overrides pip's normal install"" logic, requirements files may not behave as expected.)")cmd_opts.add_option('--root',dest='root_path',metavar='dir',default=None,help="Install everything relative to this alternate root ""directory.")cmd_opts.add_option('--prefix',dest='prefix_path',metavar='dir',default=None,help="Installation prefix where lib, bin and other top-level ""folders are placed")cmd_opts.add_option("--compile",action="store_true",dest="compile",default=True,help="Compile py files to pyc",)cmd_opts.add_option("--no-compile",action="store_false",dest="compile",help="Do not compile py files to pyc",)cmd_opts.add_option(cmdoptions.use_wheel())cmd_opts.add_option(cmdoptions.no_use_wheel())cmd_opts.add_option(cmdoptions.no_binary())cmd_opts.add_option(cmdoptions.only_binary())cmd_opts.add_option(cmdoptions.pre())cmd_opts.add_option(cmdoptions.no_clean())cmd_opts.add_option(cmdoptions.require_hashes())index_opts = cmdoptions.make_option_group(cmdoptions.index_group,self.parser,)self.parser.insert_option_group(0, index_opts)self.parser.insert_option_group(0, cmd_opts)def run(self, options, args):cmdoptions.resolve_wheel_no_use_binary(options)cmdoptions.check_install_build_global(options)if options.as_egg:warnings.warn("--egg has been deprecated and will be removed in the future. ""This flag is mutually exclusive with large parts of pip, and ""actually using it invalidates pip's ability to manage the ""installation process.",RemovedInPip10Warning,)if options.allow_external:warnings.warn("--allow-external has been deprecated and will be removed in ""the future. Due to changes in the repository protocol, it no ""longer has any effect.",RemovedInPip10Warning,)if options.allow_all_external:warnings.warn("--allow-all-external has been deprecated and will be removed ""in the future. Due to changes in the repository protocol, it ""no longer has any effect.",RemovedInPip10Warning,)if options.allow_unverified:warnings.warn("--allow-unverified has been deprecated and will be removed ""in the future. Due to changes in the repository protocol, it ""no longer has any effect.",RemovedInPip10Warning,)if options.download_dir:warnings.warn("pip install --download has been deprecated and will be ""removed in the future. Pip now has a download command that ""should be used instead.",RemovedInPip10Warning,)options.ignore_installed = Trueif options.build_dir:options.build_dir = os.path.abspath(options.build_dir)options.src_dir = os.path.abspath(options.src_dir)install_options = options.install_options or []if options.use_user_site:if options.prefix_path:raise CommandError("Can not combine '--user' and '--prefix' as they imply ""different installation locations")if virtualenv_no_global():raise InstallationError("Can not perform a '--user' install. User site-packages ""are not visible in this virtualenv.")install_options.append('--user')install_options.append('--prefix=')temp_target_dir = Noneif options.target_dir:options.ignore_installed = Truetemp_target_dir = tempfile.mkdtemp()options.target_dir = os.path.abspath(options.target_dir)if (os.path.exists(options.target_dir) and notos.path.isdir(options.target_dir)):raise CommandError("Target path exists but is not a directory, will not ""continue.")install_options.append('--home=' + temp_target_dir)global_options = options.global_options or []with self._build_session(options) as session:finder = self._build_package_finder(options, session)build_delete = (not (options.no_clean or options.build_dir))wheel_cache = WheelCache(options.cache_dir, options.format_control)if options.cache_dir and not check_path_owner(options.cache_dir):logger.warning("The directory '%s' or its parent directory is not owned ""by the current user and caching wheels has been ""disabled. check the permissions and owner of that ""directory. If executing pip with sudo, you may want ""sudo's -H flag.",options.cache_dir,)options.cache_dir = Nonewith BuildDirectory(options.build_dir,delete=build_delete) as build_dir:requirement_set = RequirementSet(build_dir=build_dir,src_dir=options.src_dir,download_dir=options.download_dir,upgrade=options.upgrade,upgrade_strategy=options.upgrade_strategy,as_egg=options.as_egg,ignore_installed=options.ignore_installed,ignore_dependencies=options.ignore_dependencies,ignore_requires_python=options.ignore_requires_python,force_reinstall=options.force_reinstall,use_user_site=options.use_user_site,target_dir=temp_target_dir,session=session,pycompile=options.compile,isolated=options.isolated_mode,wheel_cache=wheel_cache,require_hashes=options.require_hashes,)self.populate_requirement_set(requirement_set, args, options, finder, session, self.name,wheel_cache)if not requirement_set.has_requirements:returntry:if (options.download_dir or not wheel or notoptions.cache_dir):# on -d don't do complex things like building# wheels, and don't try to build wheels when wheel is# not installed.requirement_set.prepare_files(finder)else:# build wheels before install.wb = WheelBuilder(requirement_set,finder,build_options=[],global_options=[],)# Ignore the result: a failed wheel will be# installed from the sdist/vcs whatever.wb.build(autobuilding=True)if not options.download_dir:requirement_set.install(install_options,global_options,root=options.root_path,prefix=options.prefix_path,)possible_lib_locations = get_lib_location_guesses(user=options.use_user_site,home=temp_target_dir,root=options.root_path,prefix=options.prefix_path,isolated=options.isolated_mode,)reqs = sorted(requirement_set.successfully_installed,key=operator.attrgetter('name'))items = []for req in reqs:item = req.nametry:installed_version = get_installed_version(req.name, possible_lib_locations)if installed_version:item += '-' + installed_versionexcept Exception:passitems.append(item)installed = ' '.join(items)if installed:logger.info('Successfully installed %s', installed)else:downloaded = ' '.join([req.namefor req in requirement_set.successfully_downloaded])if downloaded:logger.info('Successfully downloaded %s', downloaded)except PreviousBuildDirError:options.no_clean = Trueraisefinally:# Clean upif not options.no_clean:requirement_set.cleanup_files()if options.target_dir:ensure_dir(options.target_dir)# Checking both purelib and platlib directories for installed# packages to be moved to target directorylib_dir_list = []purelib_dir = distutils_scheme('', home=temp_target_dir)['purelib']platlib_dir = distutils_scheme('', home=temp_target_dir)['platlib']if os.path.exists(purelib_dir):lib_dir_list.append(purelib_dir)if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:lib_dir_list.append(platlib_dir)for lib_dir in lib_dir_list:for item in os.listdir(lib_dir):target_item_dir = os.path.join(options.target_dir, item)if os.path.exists(target_item_dir):if not options.upgrade:logger.warning('Target directory %s already exists. Specify ''--upgrade to force replacement.',target_item_dir)continueif os.path.islink(target_item_dir):logger.warning('Target directory %s already exists and is ''a link. Pip will not automatically replace ''links, please remove if replacement is ''desired.',target_item_dir)continueif os.path.isdir(target_item_dir):shutil.rmtree(target_item_dir)else:os.remove(target_item_dir)shutil.move(os.path.join(lib_dir, item),target_item_dir)shutil.rmtree(temp_target_dir)return requirement_setdef get_lib_location_guesses(*args, **kwargs):scheme = distutils_scheme('', *args, **kwargs)return [scheme['purelib'], scheme['platlib']]
from __future__ import absolute_importfrom pip.basecommand import Command, SUCCESSfrom pip.exceptions import CommandErrorclass HelpCommand(Command):"""Show help for commands"""name = 'help'usage = """%prog <command>"""summary = 'Show help for commands.'def run(self, options, args):from pip.commands import commands_dict, get_similar_commandstry:# 'pip help' with no args is handled by pip.__init__.parseopt()cmd_name = args[0] # the command we need help forexcept IndexError:return SUCCESSif cmd_name not in commands_dict:guess = get_similar_commands(cmd_name)msg = ['unknown command "%s"' % cmd_name]if guess:msg.append('maybe you meant "%s"' % guess)raise CommandError(' - '.join(msg))command = commands_dict[cmd_name]()command.parser.print_help()return SUCCESS
from __future__ import absolute_importimport hashlibimport loggingimport sysfrom pip.basecommand import Commandfrom pip.status_codes import ERRORfrom pip.utils import read_chunksfrom pip.utils.hashes import FAVORITE_HASH, STRONG_HASHESlogger = logging.getLogger(__name__)class HashCommand(Command):"""Compute a hash of a local package archive.These can be used with --hash in a requirements file to do repeatableinstalls."""name = 'hash'usage = '%prog [options] <file> ...'summary = 'Compute hashes of package archives.'def __init__(self, *args, **kw):super(HashCommand, self).__init__(*args, **kw)self.cmd_opts.add_option('-a', '--algorithm',dest='algorithm',choices=STRONG_HASHES,action='store',default=FAVORITE_HASH,help='The hash algorithm to use: one of %s' %', '.join(STRONG_HASHES))self.parser.insert_option_group(0, self.cmd_opts)def run(self, options, args):if not args:self.parser.print_usage(sys.stderr)return ERRORalgorithm = options.algorithmfor path in args:logger.info('%s:\n--hash=%s:%s',path, algorithm, _hash_of_file(path, algorithm))def _hash_of_file(path, algorithm):"""Return the hash digest of a file."""with open(path, 'rb') as archive:hash = hashlib.new(algorithm)for chunk in read_chunks(archive):hash.update(chunk)return hash.hexdigest()
from __future__ import absolute_importimport sysimport pipfrom pip.compat import stdlib_pkgsfrom pip.basecommand import Commandfrom pip.operations.freeze import freezefrom pip.wheel import WheelCacheDEV_PKGS = ('pip', 'setuptools', 'distribute', 'wheel')class FreezeCommand(Command):"""Output installed packages in requirements format.packages are listed in a case-insensitive sorted order."""name = 'freeze'usage = """%prog [options]"""summary = 'Output installed packages in requirements format.'log_streams = ("ext://sys.stderr", "ext://sys.stderr")def __init__(self, *args, **kw):super(FreezeCommand, self).__init__(*args, **kw)self.cmd_opts.add_option('-r', '--requirement',dest='requirements',action='append',default=[],metavar='file',help="Use the order in the given requirements file and its ""comments when generating output. This option can be ""used multiple times.")self.cmd_opts.add_option('-f', '--find-links',dest='find_links',action='append',default=[],metavar='URL',help='URL for finding packages, which will be added to the ''output.')self.cmd_opts.add_option('-l', '--local',dest='local',action='store_true',default=False,help='If in a virtualenv that has global access, do not output ''globally-installed packages.')self.cmd_opts.add_option('--user',dest='user',action='store_true',default=False,help='Only output packages installed in user-site.')self.cmd_opts.add_option('--all',dest='freeze_all',action='store_true',help='Do not skip these packages in the output:'' %s' % ', '.join(DEV_PKGS))self.parser.insert_option_group(0, self.cmd_opts)def run(self, options, args):format_control = pip.index.FormatControl(set(), set())wheel_cache = WheelCache(options.cache_dir, format_control)skip = set(stdlib_pkgs)if not options.freeze_all:skip.update(DEV_PKGS)freeze_kwargs = dict(requirement=options.requirements,find_links=options.find_links,local_only=options.local,user_only=options.user,skip_regex=options.skip_requirements_regex,isolated=options.isolated_mode,wheel_cache=wheel_cache,skip=skip)for line in freeze(**freeze_kwargs):sys.stdout.write(line + '\n')
from __future__ import absolute_importimport loggingimport osfrom pip.exceptions import CommandErrorfrom pip.index import FormatControlfrom pip.req import RequirementSetfrom pip.basecommand import RequirementCommandfrom pip import cmdoptionsfrom pip.utils import ensure_dir, normalize_pathfrom pip.utils.build import BuildDirectoryfrom pip.utils.filesystem import check_path_ownerlogger = logging.getLogger(__name__)class DownloadCommand(RequirementCommand):"""Download packages from:- PyPI (and other indexes) using requirement specifiers.- VCS project urls.- Local project directories.- Local or remote source archives.pip also supports downloading from "requirements files", which providean easy way to specify a whole environment to be downloaded."""name = 'download'usage = """%prog [options] <requirement specifier> [package-index-options] ...%prog [options] -r <requirements file> [package-index-options] ...%prog [options] [-e] <vcs project url> ...%prog [options] [-e] <local project path> ...%prog [options] <archive url/path> ..."""summary = 'Download packages.'def __init__(self, *args, **kw):super(DownloadCommand, self).__init__(*args, **kw)cmd_opts = self.cmd_optscmd_opts.add_option(cmdoptions.constraints())cmd_opts.add_option(cmdoptions.editable())cmd_opts.add_option(cmdoptions.requirements())cmd_opts.add_option(cmdoptions.build_dir())cmd_opts.add_option(cmdoptions.no_deps())cmd_opts.add_option(cmdoptions.global_options())cmd_opts.add_option(cmdoptions.no_binary())cmd_opts.add_option(cmdoptions.only_binary())cmd_opts.add_option(cmdoptions.src())cmd_opts.add_option(cmdoptions.pre())cmd_opts.add_option(cmdoptions.no_clean())cmd_opts.add_option(cmdoptions.require_hashes())cmd_opts.add_option('-d', '--dest', '--destination-dir', '--destination-directory',dest='download_dir',metavar='dir',default=os.curdir,help=("Download packages into <dir>."),)cmd_opts.add_option('--platform',dest='platform',metavar='platform',default=None,help=("Only download wheels compatible with <platform>. ""Defaults to the platform of the running system."),)cmd_opts.add_option('--python-version',dest='python_version',metavar='python_version',default=None,help=("Only download wheels compatible with Python ""interpreter version <version>. If not specified, then the ""current system interpreter minor version is used. A major ""version (e.g. '2') can be specified to match all ""minor revs of that major version. A minor version ""(e.g. '34') can also be specified."),)cmd_opts.add_option('--implementation',dest='implementation',metavar='implementation',default=None,help=("Only download wheels compatible with Python ""implementation <implementation>, e.g. 'pp', 'jy', 'cp', "" or 'ip'. If not specified, then the current ""interpreter implementation is used. Use 'py' to force ""implementation-agnostic wheels."),)cmd_opts.add_option('--abi',dest='abi',metavar='abi',default=None,help=("Only download wheels compatible with Python ""abi <abi>, e.g. 'pypy_41'. If not specified, then the ""current interpreter abi tag is used. Generally ""you will need to specify --implementation, ""--platform, and --python-version when using ""this option."),)index_opts = cmdoptions.make_option_group(cmdoptions.non_deprecated_index_group,self.parser,)self.parser.insert_option_group(0, index_opts)self.parser.insert_option_group(0, cmd_opts)def run(self, options, args):options.ignore_installed = Trueif options.python_version:python_versions = [options.python_version]else:python_versions = Nonedist_restriction_set = any([options.python_version,options.platform,options.abi,options.implementation,])binary_only = FormatControl(set(), set([':all:']))if dist_restriction_set and options.format_control != binary_only:raise CommandError("--only-binary=:all: must be set and --no-binary must not ""be set (or must be set to :none:) when restricting platform ""and interpreter constraints using --python-version, ""--platform, --abi, or --implementation.")options.src_dir = os.path.abspath(options.src_dir)options.download_dir = normalize_path(options.download_dir)ensure_dir(options.download_dir)with self._build_session(options) as session:finder = self._build_package_finder(options=options,session=session,platform=options.platform,python_versions=python_versions,abi=options.abi,implementation=options.implementation,)build_delete = (not (options.no_clean or options.build_dir))if options.cache_dir and not check_path_owner(options.cache_dir):logger.warning("The directory '%s' or its parent directory is not owned ""by the current user and caching wheels has been ""disabled. check the permissions and owner of that ""directory. If executing pip with sudo, you may want ""sudo's -H flag.",options.cache_dir,)options.cache_dir = Nonewith BuildDirectory(options.build_dir,delete=build_delete) as build_dir:requirement_set = RequirementSet(build_dir=build_dir,src_dir=options.src_dir,download_dir=options.download_dir,ignore_installed=True,ignore_dependencies=options.ignore_dependencies,session=session,isolated=options.isolated_mode,require_hashes=options.require_hashes)self.populate_requirement_set(requirement_set,args,options,finder,session,self.name,None)if not requirement_set.has_requirements:returnrequirement_set.prepare_files(finder)downloaded = ' '.join([req.name for req in requirement_set.successfully_downloaded])if downloaded:logger.info('Successfully downloaded %s', downloaded)# Clean upif not options.no_clean:requirement_set.cleanup_files()return requirement_set
from __future__ import absolute_importimport sysfrom pip.basecommand import CommandBASE_COMPLETION = """# pip %(shell)s completion start%(script)s# pip %(shell)s completion end"""COMPLETION_SCRIPTS = {'bash': """_pip_completion(){COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\COMP_CWORD=$COMP_CWORD \\PIP_AUTO_COMPLETE=1 $1 ) )}complete -o default -F _pip_completion pip""", 'zsh': """function _pip_completion {local words cwordread -Ac wordsread -cn cwordreply=( $( COMP_WORDS="$words[*]" \\COMP_CWORD=$(( cword-1 )) \\PIP_AUTO_COMPLETE=1 $words[1] ) )}compctl -K _pip_completion pip""", 'fish': """function __fish_complete_pipset -lx COMP_WORDS (commandline -o) ""set -lx COMP_CWORD (math (contains -i -- (commandline -t) $COMP_WORDS)-1)set -lx PIP_AUTO_COMPLETE 1string split \ -- (eval $COMP_WORDS[1])endcomplete -fa "(__fish_complete_pip)" -c pip"""}class CompletionCommand(Command):"""A helper command to be used for command completion."""name = 'completion'summary = 'A helper command used for command completion.'def __init__(self, *args, **kw):super(CompletionCommand, self).__init__(*args, **kw)cmd_opts = self.cmd_optscmd_opts.add_option('--bash', '-b',action='store_const',const='bash',dest='shell',help='Emit completion code for bash')cmd_opts.add_option('--zsh', '-z',action='store_const',const='zsh',dest='shell',help='Emit completion code for zsh')cmd_opts.add_option('--fish', '-f',action='store_const',const='fish',dest='shell',help='Emit completion code for fish')self.parser.insert_option_group(0, cmd_opts)def run(self, options, args):"""Prints the completion code of the given shell"""shells = COMPLETION_SCRIPTS.keys()shell_options = ['--' + shell for shell in sorted(shells)]if options.shell in shells:script = COMPLETION_SCRIPTS.get(options.shell, '')print(BASE_COMPLETION % {'script': script, 'shell': options.shell})else:sys.stderr.write('ERROR: You must pass %s\n' % ' or '.join(shell_options))
import loggingfrom pip.basecommand import Commandfrom pip.operations.check import check_requirementsfrom pip.utils import get_installed_distributionslogger = logging.getLogger(__name__)class CheckCommand(Command):"""Verify installed packages have compatible dependencies."""name = 'check'usage = """%prog [options]"""summary = 'Verify installed packages have compatible dependencies.'def run(self, options, args):dists = get_installed_distributions(local_only=False, skip=())missing_reqs_dict, incompatible_reqs_dict = check_requirements(dists)for dist in dists:key = '%s==%s' % (dist.project_name, dist.version)for requirement in missing_reqs_dict.get(key, []):logger.info("%s %s requires %s, which is not installed.",dist.project_name, dist.version, requirement.project_name)for requirement, actual in incompatible_reqs_dict.get(key, []):logger.info("%s %s has requirement %s, but you have %s %s.",dist.project_name, dist.version, requirement,actual.project_name, actual.version)if missing_reqs_dict or incompatible_reqs_dict:return 1else:logger.info("No broken requirements found.")
"""Package containing all pip commands"""from __future__ import absolute_importfrom pip.commands.completion import CompletionCommandfrom pip.commands.download import DownloadCommandfrom pip.commands.freeze import FreezeCommandfrom pip.commands.hash import HashCommandfrom pip.commands.help import HelpCommandfrom pip.commands.list import ListCommandfrom pip.commands.check import CheckCommandfrom pip.commands.search import SearchCommandfrom pip.commands.show import ShowCommandfrom pip.commands.install import InstallCommandfrom pip.commands.uninstall import UninstallCommandfrom pip.commands.wheel import WheelCommandcommands_dict = {CompletionCommand.name: CompletionCommand,FreezeCommand.name: FreezeCommand,HashCommand.name: HashCommand,HelpCommand.name: HelpCommand,SearchCommand.name: SearchCommand,ShowCommand.name: ShowCommand,InstallCommand.name: InstallCommand,UninstallCommand.name: UninstallCommand,DownloadCommand.name: DownloadCommand,ListCommand.name: ListCommand,CheckCommand.name: CheckCommand,WheelCommand.name: WheelCommand,}commands_order = [InstallCommand,DownloadCommand,UninstallCommand,FreezeCommand,ListCommand,ShowCommand,CheckCommand,SearchCommand,WheelCommand,HashCommand,CompletionCommand,HelpCommand,]def get_summaries(ordered=True):"""Yields sorted (command name, command summary) tuples."""if ordered:cmditems = _sort_commands(commands_dict, commands_order)else:cmditems = commands_dict.items()for name, command_class in cmditems:yield (name, command_class.summary)def get_similar_commands(name):"""Command name auto-correct."""from difflib import get_close_matchesname = name.lower()close_commands = get_close_matches(name, commands_dict.keys())if close_commands:return close_commands[0]else:return Falsedef _sort_commands(cmddict, order):def keyfn(key):try:return order.index(key[1])except ValueError:# unordered items should come lastreturn 0xffreturn sorted(cmddict.items(), key=keyfn)
"""shared options and groupsThe principle here is to define options once, but *not* instantiate themglobally. One reason being that options with action='append' can carry statebetween parses. pip parses general options twice internally, and shouldn'tpass on state. To be consistent, all options will follow this design."""from __future__ import absolute_importfrom functools import partialfrom optparse import OptionGroup, SUPPRESS_HELP, Optionimport warningsfrom pip.index import (FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary,fmt_ctl_no_use_wheel)from pip.models import PyPIfrom pip.locations import USER_CACHE_DIR, src_prefixfrom pip.utils.hashes import STRONG_HASHESdef make_option_group(group, parser):"""Return an OptionGroup objectgroup -- assumed to be dict with 'name' and 'options' keysparser -- an optparse Parser"""option_group = OptionGroup(parser, group['name'])for option in group['options']:option_group.add_option(option())return option_groupdef resolve_wheel_no_use_binary(options):if not options.use_wheel:control = options.format_controlfmt_ctl_no_use_wheel(control)def check_install_build_global(options, check_options=None):"""Disable wheels if per-setup.py call options are set.:param options: The OptionParser options to update.:param check_options: The options to check, if not supplied defaults tooptions."""if check_options is None:check_options = optionsdef getname(n):return getattr(check_options, n, None)names = ["build_options", "global_options", "install_options"]if any(map(getname, names)):control = options.format_controlfmt_ctl_no_binary(control)warnings.warn('Disabling all use of wheels due to the use of --build-options ''/ --global-options / --install-options.', stacklevel=2)############ options ############help_ = partial(Option,'-h', '--help',dest='help',action='help',help='Show help.')isolated_mode = partial(Option,"--isolated",dest="isolated_mode",action="store_true",default=False,help=("Run pip in an isolated mode, ignoring environment variables and user ""configuration."),)require_virtualenv = partial(Option,# Run only if inside a virtualenv, bail if not.'--require-virtualenv', '--require-venv',dest='require_venv',action='store_true',default=False,help=SUPPRESS_HELP)verbose = partial(Option,'-v', '--verbose',dest='verbose',action='count',default=0,help='Give more output. Option is additive, and can be used up to 3 times.')version = partial(Option,'-V', '--version',dest='version',action='store_true',help='Show version and exit.')quiet = partial(Option,'-q', '--quiet',dest='quiet',action='count',default=0,help=('Give less output. Option is additive, and can be used up to 3'' times (corresponding to WARNING, ERROR, and CRITICAL logging'' levels).'))log = partial(Option,"--log", "--log-file", "--local-log",dest="log",metavar="path",help="Path to a verbose appending log.")no_input = partial(Option,# Don't ask for input'--no-input',dest='no_input',action='store_true',default=False,help=SUPPRESS_HELP)proxy = partial(Option,'--proxy',dest='proxy',type='str',default='',help="Specify a proxy in the form [user:passwd@]proxy.server:port.")retries = partial(Option,'--retries',dest='retries',type='int',default=5,help="Maximum number of retries each connection should attempt ""(default %default times).")timeout = partial(Option,'--timeout', '--default-timeout',metavar='sec',dest='timeout',type='float',default=15,help='Set the socket timeout (default %default seconds).')default_vcs = partial(Option,# The default version control system for editables, e.g. 'svn''--default-vcs',dest='default_vcs',type='str',default='',help=SUPPRESS_HELP)skip_requirements_regex = partial(Option,# A regex to be used to skip requirements'--skip-requirements-regex',dest='skip_requirements_regex',type='str',default='',help=SUPPRESS_HELP)def exists_action():return Option(# Option when path already exist'--exists-action',dest='exists_action',type='choice',choices=['s', 'i', 'w', 'b', 'a'],default=[],action='append',metavar='action',help="Default action when a path already exists: ""(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.")cert = partial(Option,'--cert',dest='cert',type='str',metavar='path',help="Path to alternate CA bundle.")client_cert = partial(Option,'--client-cert',dest='client_cert',type='str',default=None,metavar='path',help="Path to SSL client certificate, a single file containing the ""private key and the certificate in PEM format.")index_url = partial(Option,'-i', '--index-url', '--pypi-url',dest='index_url',metavar='URL',default=PyPI.simple_url,help="Base URL of Python Package Index (default %default). ""This should point to a repository compliant with PEP 503 ""(the simple repository API) or a local directory laid out ""in the same format.")def extra_index_url():return Option('--extra-index-url',dest='extra_index_urls',metavar='URL',action='append',default=[],help="Extra URLs of package indexes to use in addition to ""--index-url. Should follow the same rules as ""--index-url.")no_index = partial(Option,'--no-index',dest='no_index',action='store_true',default=False,help='Ignore package index (only looking at --find-links URLs instead).')def find_links():return Option('-f', '--find-links',dest='find_links',action='append',default=[],metavar='url',help="If a url or path to an html file, then parse for links to ""archives. If a local path or file:// url that's a directory, ""then look for archives in the directory listing.")def allow_external():return Option("--allow-external",dest="allow_external",action="append",default=[],metavar="PACKAGE",help=SUPPRESS_HELP,)allow_all_external = partial(Option,"--allow-all-external",dest="allow_all_external",action="store_true",default=False,help=SUPPRESS_HELP,)def trusted_host():return Option("--trusted-host",dest="trusted_hosts",action="append",metavar="HOSTNAME",default=[],help="Mark this host as trusted, even though it does not have valid ""or any HTTPS.",)# Remove after 7.0no_allow_external = partial(Option,"--no-allow-external",dest="allow_all_external",action="store_false",default=False,help=SUPPRESS_HELP,)# Remove --allow-insecure after 7.0def allow_unsafe():return Option("--allow-unverified", "--allow-insecure",dest="allow_unverified",action="append",default=[],metavar="PACKAGE",help=SUPPRESS_HELP,)# Remove after 7.0no_allow_unsafe = partial(Option,"--no-allow-insecure",dest="allow_all_insecure",action="store_false",default=False,help=SUPPRESS_HELP)# Remove after 1.5process_dependency_links = partial(Option,"--process-dependency-links",dest="process_dependency_links",action="store_true",default=False,help="Enable the processing of dependency links.",)def constraints():return Option('-c', '--constraint',dest='constraints',action='append',default=[],metavar='file',help='Constrain versions using the given constraints file. ''This option can be used multiple times.')def requirements():return Option('-r', '--requirement',dest='requirements',action='append',default=[],metavar='file',help='Install from the given requirements file. ''This option can be used multiple times.')def editable():return Option('-e', '--editable',dest='editables',action='append',default=[],metavar='path/url',help=('Install a project in editable mode (i.e. setuptools ''"develop mode") from a local project path or a VCS url.'),)src = partial(Option,'--src', '--source', '--source-dir', '--source-directory',dest='src_dir',metavar='dir',default=src_prefix,help='Directory to check out editable projects into. ''The default in a virtualenv is "<venv path>/src". ''The default for global installs is "<current dir>/src".')# XXX: deprecated, remove in 9.0use_wheel = partial(Option,'--use-wheel',dest='use_wheel',action='store_true',default=True,help=SUPPRESS_HELP,)# XXX: deprecated, remove in 9.0no_use_wheel = partial(Option,'--no-use-wheel',dest='use_wheel',action='store_false',default=True,help=('Do not Find and prefer wheel archives when searching indexes and ''find-links locations. DEPRECATED in favour of --no-binary.'),)def _get_format_control(values, option):"""Get a format_control object."""return getattr(values, option.dest)def _handle_no_binary(option, opt_str, value, parser):existing = getattr(parser.values, option.dest)fmt_ctl_handle_mutual_exclude(value, existing.no_binary, existing.only_binary)def _handle_only_binary(option, opt_str, value, parser):existing = getattr(parser.values, option.dest)fmt_ctl_handle_mutual_exclude(value, existing.only_binary, existing.no_binary)def no_binary():return Option("--no-binary", dest="format_control", action="callback",callback=_handle_no_binary, type="str",default=FormatControl(set(), set()),help="Do not use binary packages. Can be supplied multiple times, and ""each time adds to the existing value. Accepts either :all: to ""disable all binary packages, :none: to empty the set, or one or ""more package names with commas between them. Note that some ""packages are tricky to compile and may fail to install when ""this option is used on them.")def only_binary():return Option("--only-binary", dest="format_control", action="callback",callback=_handle_only_binary, type="str",default=FormatControl(set(), set()),help="Do not use source packages. Can be supplied multiple times, and ""each time adds to the existing value. Accepts either :all: to ""disable all source packages, :none: to empty the set, or one or ""more package names with commas between them. Packages without ""binary distributions will fail to install when this option is ""used on them.")cache_dir = partial(Option,"--cache-dir",dest="cache_dir",default=USER_CACHE_DIR,metavar="dir",help="Store the cache data in <dir>.")no_cache = partial(Option,"--no-cache-dir",dest="cache_dir",action="store_false",help="Disable the cache.",)no_deps = partial(Option,'--no-deps', '--no-dependencies',dest='ignore_dependencies',action='store_true',default=False,help="Don't install package dependencies.")build_dir = partial(Option,'-b', '--build', '--build-dir', '--build-directory',dest='build_dir',metavar='dir',help='Directory to unpack packages into and build in.')ignore_requires_python = partial(Option,'--ignore-requires-python',dest='ignore_requires_python',action='store_true',help='Ignore the Requires-Python information.')install_options = partial(Option,'--install-option',dest='install_options',action='append',metavar='options',help="Extra arguments to be supplied to the setup.py install ""command (use like --install-option=\"--install-scripts=/usr/local/""bin\"). Use multiple --install-option options to pass multiple ""options to setup.py install. If you are using an option with a ""directory path, be sure to use absolute path.")global_options = partial(Option,'--global-option',dest='global_options',action='append',metavar='options',help="Extra global options to be supplied to the setup.py ""call before the install command.")no_clean = partial(Option,'--no-clean',action='store_true',default=False,help="Don't clean up build directories.")pre = partial(Option,'--pre',action='store_true',default=False,help="Include pre-release and development versions. By default, ""pip only finds stable versions.")disable_pip_version_check = partial(Option,"--disable-pip-version-check",dest="disable_pip_version_check",action="store_true",default=False,help="Don't periodically check PyPI to determine whether a new version ""of pip is available for download. Implied with --no-index.")# Deprecated, Remove lateralways_unzip = partial(Option,'-Z', '--always-unzip',dest='always_unzip',action='store_true',help=SUPPRESS_HELP,)def _merge_hash(option, opt_str, value, parser):"""Given a value spelled "algo:digest", append the digest to a listpointed to in a dict by the algo name."""if not parser.values.hashes:parser.values.hashes = {}try:algo, digest = value.split(':', 1)except ValueError:parser.error('Arguments to %s must be a hash name ''followed by a value, like --hash=sha256:abcde...' %opt_str)if algo not in STRONG_HASHES:parser.error('Allowed hash algorithms for %s are %s.' %(opt_str, ', '.join(STRONG_HASHES)))parser.values.hashes.setdefault(algo, []).append(digest)hash = partial(Option,'--hash',# Hash values eventually end up in InstallRequirement.hashes due to# __dict__ copying in process_line().dest='hashes',action='callback',callback=_merge_hash,type='string',help="Verify that the package's archive matches this "'hash before installing. Example: --hash=sha256:abcdef...')require_hashes = partial(Option,'--require-hashes',dest='require_hashes',action='store_true',default=False,help='Require a hash to check each requirement against, for ''repeatable installs. This option is implied when any package in a ''requirements file has a --hash option.')########### groups ###########general_group = {'name': 'General Options','options': [help_,isolated_mode,require_virtualenv,verbose,version,quiet,log,no_input,proxy,retries,timeout,default_vcs,skip_requirements_regex,exists_action,trusted_host,cert,client_cert,cache_dir,no_cache,disable_pip_version_check,]}non_deprecated_index_group = {'name': 'Package Index Options','options': [index_url,extra_index_url,no_index,find_links,process_dependency_links,]}index_group = {'name': 'Package Index Options (including deprecated options)','options': non_deprecated_index_group['options'] + [allow_external,allow_all_external,no_allow_external,allow_unsafe,no_allow_unsafe,]}
"""Base option parser setup"""from __future__ import absolute_importimport sysimport optparseimport osimport reimport textwrapfrom distutils.util import strtoboolfrom pip._vendor.six import string_typesfrom pip._vendor.six.moves import configparserfrom pip.locations import (legacy_config_file, config_basename, running_under_virtualenv,site_config_files)from pip.utils import appdirs, get_terminal_size_environ_prefix_re = re.compile(r"^PIP_", re.I)class PrettyHelpFormatter(optparse.IndentedHelpFormatter):"""A prettier/less verbose help formatter for optparse."""def __init__(self, *args, **kwargs):# help position must be aligned with __init__.parseopts.descriptionkwargs['max_help_position'] = 30kwargs['indent_increment'] = 1kwargs['width'] = get_terminal_size()[0] - 2optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)def format_option_strings(self, option):return self._format_option_strings(option, ' <%s>', ', ')def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):"""Return a comma-separated list of option strings and metavars.:param option: tuple of (short opt, long opt), e.g: ('-f', '--format'):param mvarfmt: metavar format string - evaluated as mvarfmt % metavar:param optsep: separator"""opts = []if option._short_opts:opts.append(option._short_opts[0])if option._long_opts:opts.append(option._long_opts[0])if len(opts) > 1:opts.insert(1, optsep)if option.takes_value():metavar = option.metavar or option.dest.lower()opts.append(mvarfmt % metavar.lower())return ''.join(opts)def format_heading(self, heading):if heading == 'Options':return ''return heading + ':\n'def format_usage(self, usage):"""Ensure there is only one newline between usage and the first headingif there is no description."""msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")return msgdef format_description(self, description):# leave full control over description to usif description:if hasattr(self.parser, 'main'):label = 'Commands'else:label = 'Description'# some doc strings have initial newlines, some don'tdescription = description.lstrip('\n')# some doc strings have final newlines and spaces, some don'tdescription = description.rstrip()# dedent, then reindentdescription = self.indent_lines(textwrap.dedent(description), " ")description = '%s:\n%s\n' % (label, description)return descriptionelse:return ''def format_epilog(self, epilog):# leave full control over epilog to usif epilog:return epilogelse:return ''def indent_lines(self, text, indent):new_lines = [indent + line for line in text.split('\n')]return "\n".join(new_lines)class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):"""Custom help formatter for use in ConfigOptionParser.This is updates the defaults before expanding them, allowingthem to show up correctly in the help listing."""def expand_default(self, option):if self.parser is not None:self.parser._update_defaults(self.parser.defaults)return optparse.IndentedHelpFormatter.expand_default(self, option)class CustomOptionParser(optparse.OptionParser):def insert_option_group(self, idx, *args, **kwargs):"""Insert an OptionGroup at a given position."""group = self.add_option_group(*args, **kwargs)self.option_groups.pop()self.option_groups.insert(idx, group)return group@propertydef option_list_all(self):"""Get a list of all options, including those in option groups."""res = self.option_list[:]for i in self.option_groups:res.extend(i.option_list)return resclass ConfigOptionParser(CustomOptionParser):"""Custom option parser which updates its defaults by checking theconfiguration files and environmental variables"""isolated = Falsedef __init__(self, *args, **kwargs):self.config = configparser.RawConfigParser()self.name = kwargs.pop('name')self.isolated = kwargs.pop("isolated", False)self.files = self.get_config_files()if self.files:self.config.read(self.files)assert self.nameoptparse.OptionParser.__init__(self, *args, **kwargs)def get_config_files(self):# the files returned by this method will be parsed in order with the# first files listed being overridden by later files in standard# ConfigParser fashionconfig_file = os.environ.get('PIP_CONFIG_FILE', False)if config_file == os.devnull:return []# at the base we have any site-wide configurationfiles = list(site_config_files)# per-user configuration nextif not self.isolated:if config_file and os.path.exists(config_file):files.append(config_file)else:# This is the legacy config file, we consider it to be a lower# priority than the new file location.files.append(legacy_config_file)# This is the new config file, we consider it to be a higher# priority than the legacy file.files.append(os.path.join(appdirs.user_config_dir("pip"),config_basename,))# finally virtualenv configuration first trumping othersif running_under_virtualenv():venv_config_file = os.path.join(sys.prefix,config_basename,)if os.path.exists(venv_config_file):files.append(venv_config_file)return filesdef check_default(self, option, key, val):try:return option.check_value(key, val)except optparse.OptionValueError as exc:print("An error occurred during configuration: %s" % exc)sys.exit(3)def _update_defaults(self, defaults):"""Updates the given defaults with values from the config files andthe environ. Does a little special handling for certain types ofoptions (lists)."""# Then go and look for the other sources of configuration:config = {}# 1. config filesfor section in ('global', self.name):config.update(self.normalize_keys(self.get_config_section(section)))# 2. environmental variablesif not self.isolated:config.update(self.normalize_keys(self.get_environ_vars()))# Accumulate complex default state.self.values = optparse.Values(self.defaults)late_eval = set()# Then set the options with those valuesfor key, val in config.items():# ignore empty valuesif not val:continueoption = self.get_option(key)# Ignore options not present in this parser. E.g. non-globals put# in [global] by users that want them to apply to all applicable# commands.if option is None:continueif option.action in ('store_true', 'store_false', 'count'):val = strtobool(val)elif option.action == 'append':val = val.split()val = [self.check_default(option, key, v) for v in val]elif option.action == 'callback':late_eval.add(option.dest)opt_str = option.get_opt_string()val = option.convert_value(opt_str, val)# From take_actionargs = option.callback_args or ()kwargs = option.callback_kwargs or {}option.callback(option, opt_str, val, self, *args, **kwargs)else:val = self.check_default(option, key, val)defaults[option.dest] = valfor key in late_eval:defaults[key] = getattr(self.values, key)self.values = Nonereturn defaultsdef normalize_keys(self, items):"""Return a config dictionary with normalized keys regardless ofwhether the keys were specified in environment variables or in configfiles"""normalized = {}for key, val in items:key = key.replace('_', '-')if not key.startswith('--'):key = '--%s' % key # only prefer long optsnormalized[key] = valreturn normalizeddef get_config_section(self, name):"""Get a section of a configuration"""if self.config.has_section(name):return self.config.items(name)return []def get_environ_vars(self):"""Returns a generator with all environmental vars with prefix PIP_"""for key, val in os.environ.items():if _environ_prefix_re.search(key):yield (_environ_prefix_re.sub("", key).lower(), val)def get_default_values(self):"""Overriding to make updating the defaults after instantiation ofthe option parser possible, _update_defaults() does the dirty work."""if not self.process_default_values:# Old, pre-Optik 1.5 behaviour.return optparse.Values(self.defaults)defaults = self._update_defaults(self.defaults.copy()) # oursfor option in self._get_all_options():default = defaults.get(option.dest)if isinstance(default, string_types):opt_str = option.get_opt_string()defaults[option.dest] = option.check_value(opt_str, default)return optparse.Values(defaults)def error(self, msg):self.print_usage(sys.stderr)self.exit(2, "%s\n" % msg)
"""Base Command class, and related routines"""from __future__ import absolute_importimport loggingimport osimport sysimport optparseimport warningsfrom pip import cmdoptionsfrom pip.index import PackageFinderfrom pip.locations import running_under_virtualenvfrom pip.download import PipSessionfrom pip.exceptions import (BadCommand, InstallationError, UninstallationError,CommandError, PreviousBuildDirError)from pip.compat import logging_dictConfigfrom pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatterfrom pip.req import InstallRequirement, parse_requirementsfrom pip.status_codes import (SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND,PREVIOUS_BUILD_DIR_ERROR,)from pip.utils import deprecation, get_prog, normalize_pathfrom pip.utils.logging import IndentingFormatterfrom pip.utils.outdated import pip_version_check__all__ = ['Command']logger = logging.getLogger(__name__)class Command(object):name = Noneusage = Nonehidden = Falselog_streams = ("ext://sys.stdout", "ext://sys.stderr")def __init__(self, isolated=False):parser_kw = {'usage': self.usage,'prog': '%s %s' % (get_prog(), self.name),'formatter': UpdatingDefaultsHelpFormatter(),'add_help_option': False,'name': self.name,'description': self.__doc__,'isolated': isolated,}self.parser = ConfigOptionParser(**parser_kw)# Commands should add options to this option groupoptgroup_name = '%s Options' % self.name.capitalize()self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)# Add the general optionsgen_opts = cmdoptions.make_option_group(cmdoptions.general_group,self.parser,)self.parser.add_option_group(gen_opts)def _build_session(self, options, retries=None, timeout=None):session = PipSession(cache=(normalize_path(os.path.join(options.cache_dir, "http"))if options.cache_dir else None),retries=retries if retries is not None else options.retries,insecure_hosts=options.trusted_hosts,)# Handle custom ca-bundles from the userif options.cert:session.verify = options.cert# Handle SSL client certificateif options.client_cert:session.cert = options.client_cert# Handle timeoutsif options.timeout or timeout:session.timeout = (timeout if timeout is not None else options.timeout)# Handle configured proxiesif options.proxy:session.proxies = {"http": options.proxy,"https": options.proxy,}# Determine if we can prompt the user for authentication or notsession.auth.prompting = not options.no_inputreturn sessiondef parse_args(self, args):# factored out for testabilityreturn self.parser.parse_args(args)def main(self, args):options, args = self.parse_args(args)if options.quiet:if options.quiet == 1:level = "WARNING"if options.quiet == 2:level = "ERROR"else:level = "CRITICAL"elif options.verbose:level = "DEBUG"else:level = "INFO"# The root logger should match the "console" level *unless* we# specified "--log" to send debug logs to a file.root_level = levelif options.log:root_level = "DEBUG"logging_dictConfig({"version": 1,"disable_existing_loggers": False,"filters": {"exclude_warnings": {"()": "pip.utils.logging.MaxLevelFilter","level": logging.WARNING,},},"formatters": {"indent": {"()": IndentingFormatter,"format": "%(message)s",},},"handlers": {"console": {"level": level,"class": "pip.utils.logging.ColorizedStreamHandler","stream": self.log_streams[0],"filters": ["exclude_warnings"],"formatter": "indent",},"console_errors": {"level": "WARNING","class": "pip.utils.logging.ColorizedStreamHandler","stream": self.log_streams[1],"formatter": "indent",},"user_log": {"level": "DEBUG","class": "pip.utils.logging.BetterRotatingFileHandler","filename": options.log or "/dev/null","delay": True,"formatter": "indent",},},"root": {"level": root_level,"handlers": list(filter(None, ["console","console_errors","user_log" if options.log else None,])),},# Disable any logging besides WARNING unless we have DEBUG level# logging enabled. These use both pip._vendor and the bare names# for the case where someone unbundles our libraries."loggers": dict((name,{"level": ("WARNING"if level in ["INFO", "ERROR"]else "DEBUG"),},)for name in ["pip._vendor", "distlib", "requests", "urllib3"]),})if sys.version_info[:2] == (2, 6):warnings.warn("Python 2.6 is no longer supported by the Python core team, ""please upgrade your Python. A future version of pip will ""drop support for Python 2.6",deprecation.Python26DeprecationWarning)# TODO: try to get these passing down from the command?# without resorting to os.environ to hold these.if options.no_input:os.environ['PIP_NO_INPUT'] = '1'if options.exists_action:os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)if options.require_venv:# If a venv is required check if it can really be foundif not running_under_virtualenv():logger.critical('Could not find an activated virtualenv (required).')sys.exit(VIRTUALENV_NOT_FOUND)try:status = self.run(options, args)# FIXME: all commands should return an exit status# and when it is done, isinstance is not needed anymoreif isinstance(status, int):return statusexcept PreviousBuildDirError as exc:logger.critical(str(exc))logger.debug('Exception information:', exc_info=True)return PREVIOUS_BUILD_DIR_ERRORexcept (InstallationError, UninstallationError, BadCommand) as exc:logger.critical(str(exc))logger.debug('Exception information:', exc_info=True)return ERRORexcept CommandError as exc:logger.critical('ERROR: %s', exc)logger.debug('Exception information:', exc_info=True)return ERRORexcept KeyboardInterrupt:logger.critical('Operation cancelled by user')logger.debug('Exception information:', exc_info=True)return ERRORexcept:logger.critical('Exception:', exc_info=True)return UNKNOWN_ERRORfinally:# Check if we're using the latest version of pip availableif (not options.disable_pip_version_check and notgetattr(options, "no_index", False)):with self._build_session(options,retries=0,timeout=min(5, options.timeout)) as session:pip_version_check(session)return SUCCESSclass RequirementCommand(Command):@staticmethoddef populate_requirement_set(requirement_set, args, options, finder,session, name, wheel_cache):"""Marshal cmd line args into a requirement set."""for filename in options.constraints:for req in parse_requirements(filename,constraint=True, finder=finder, options=options,session=session, wheel_cache=wheel_cache):requirement_set.add_requirement(req)for req in args:requirement_set.add_requirement(InstallRequirement.from_line(req, None, isolated=options.isolated_mode,wheel_cache=wheel_cache))for req in options.editables:requirement_set.add_requirement(InstallRequirement.from_editable(req,default_vcs=options.default_vcs,isolated=options.isolated_mode,wheel_cache=wheel_cache))found_req_in_file = Falsefor filename in options.requirements:for req in parse_requirements(filename,finder=finder, options=options, session=session,wheel_cache=wheel_cache):found_req_in_file = Truerequirement_set.add_requirement(req)# If --require-hashes was a line in a requirements file, tell# RequirementSet about it:requirement_set.require_hashes = options.require_hashesif not (args or options.editables or found_req_in_file):opts = {'name': name}if options.find_links:msg = ('You must give at least one requirement to ''%(name)s (maybe you meant "pip %(name)s ''%(links)s"?)' %dict(opts, links=' '.join(options.find_links)))else:msg = ('You must give at least one requirement ''to %(name)s (see "pip help %(name)s")' % opts)logger.warning(msg)def _build_package_finder(self, options, session,platform=None, python_versions=None,abi=None, implementation=None):"""Create a package finder appropriate to this requirement command."""index_urls = [options.index_url] + options.extra_index_urlsif options.no_index:logger.debug('Ignoring indexes: %s', ','.join(index_urls))index_urls = []return PackageFinder(find_links=options.find_links,format_control=options.format_control,index_urls=index_urls,trusted_hosts=options.trusted_hosts,allow_all_prereleases=options.pre,process_dependency_links=options.process_dependency_links,session=session,platform=platform,versions=python_versions,abi=abi,implementation=implementation,)
# coding: utf8"""webencodings.x_user_defined~~~~~~~~~~~~~~~~~~~~~~~~~~~An implementation of the x-user-defined encoding.:copyright: Copyright 2012 by Simon Sapin:license: BSD, see LICENSE for details."""from __future__ import unicode_literalsimport codecs### Codec APIsclass Codec(codecs.Codec):def encode(self, input, errors='strict'):return codecs.charmap_encode(input, errors, encoding_table)def decode(self, input, errors='strict'):return codecs.charmap_decode(input, errors, decoding_table)class IncrementalEncoder(codecs.IncrementalEncoder):def encode(self, input, final=False):return codecs.charmap_encode(input, self.errors, encoding_table)[0]class IncrementalDecoder(codecs.IncrementalDecoder):def decode(self, input, final=False):return codecs.charmap_decode(input, self.errors, decoding_table)[0]class StreamWriter(Codec, codecs.StreamWriter):passclass StreamReader(Codec, codecs.StreamReader):pass### encodings module APIcodec_info = codecs.CodecInfo(name='x-user-defined',encode=Codec().encode,decode=Codec().decode,incrementalencoder=IncrementalEncoder,incrementaldecoder=IncrementalDecoder,streamreader=StreamReader,streamwriter=StreamWriter,)### Decoding Table# Python 3:# for c in range(256): print(' %r' % chr(c if c < 128 else c + 0xF700))decoding_table = ('\x00''\x01''\x02''\x03''\x04''\x05''\x06''\x07''\x08''\t''\n''\x0b''\x0c''\r''\x0e''\x0f''\x10''\x11''\x12''\x13''\x14''\x15''\x16''\x17''\x18''\x19''\x1a''\x1b''\x1c''\x1d''\x1e''\x1f'' ''!''"''#''$''%''&'"'"'('')''*''+'',''-''.''/''0''1''2''3''4''5''6''7''8''9'':'';''<''=''>''?''@''A''B''C''D''E''F''G''H''I''J''K''L''M''N''O''P''Q''R''S''T''U''V''W''X''Y''Z''[''\\'']''^''_''`''a''b''c''d''e''f''g''h''i''j''k''l''m''n''o''p''q''r''s''t''u''v''w''x''y''z''{''|''}''~''\x7f''\uf780''\uf781''\uf782''\uf783''\uf784''\uf785''\uf786''\uf787''\uf788''\uf789''\uf78a''\uf78b''\uf78c''\uf78d''\uf78e''\uf78f''\uf790''\uf791''\uf792''\uf793''\uf794''\uf795''\uf796''\uf797''\uf798''\uf799''\uf79a''\uf79b''\uf79c''\uf79d''\uf79e''\uf79f''\uf7a0''\uf7a1''\uf7a2''\uf7a3''\uf7a4''\uf7a5''\uf7a6''\uf7a7''\uf7a8''\uf7a9''\uf7aa''\uf7ab''\uf7ac''\uf7ad''\uf7ae''\uf7af''\uf7b0''\uf7b1''\uf7b2''\uf7b3''\uf7b4''\uf7b5''\uf7b6''\uf7b7''\uf7b8''\uf7b9''\uf7ba''\uf7bb''\uf7bc''\uf7bd''\uf7be''\uf7bf''\uf7c0''\uf7c1''\uf7c2''\uf7c3''\uf7c4''\uf7c5''\uf7c6''\uf7c7''\uf7c8''\uf7c9''\uf7ca''\uf7cb''\uf7cc''\uf7cd''\uf7ce''\uf7cf''\uf7d0''\uf7d1''\uf7d2''\uf7d3''\uf7d4''\uf7d5''\uf7d6''\uf7d7''\uf7d8''\uf7d9''\uf7da''\uf7db''\uf7dc''\uf7dd''\uf7de''\uf7df''\uf7e0''\uf7e1''\uf7e2''\uf7e3''\uf7e4''\uf7e5''\uf7e6''\uf7e7''\uf7e8''\uf7e9''\uf7ea''\uf7eb''\uf7ec''\uf7ed''\uf7ee''\uf7ef''\uf7f0''\uf7f1''\uf7f2''\uf7f3''\uf7f4''\uf7f5''\uf7f6''\uf7f7''\uf7f8''\uf7f9''\uf7fa''\uf7fb''\uf7fc''\uf7fd''\uf7fe''\uf7ff')### Encoding tableencoding_table = codecs.charmap_build(decoding_table)
# coding: utf8"""webencodings.tests~~~~~~~~~~~~~~~~~~A basic test suite for Encoding.:copyright: Copyright 2012 by Simon Sapin:license: BSD, see LICENSE for details."""from __future__ import unicode_literalsfrom . import (lookup, LABELS, decode, encode, iter_decode, iter_encode,IncrementalDecoder, IncrementalEncoder, UTF8)def assert_raises(exception, function, *args, **kwargs):try:function(*args, **kwargs)except exception:returnelse: # pragma: no coverraise AssertionError('Did not raise %s.' % exception)def test_labels():assert lookup('utf-8').name == 'utf-8'assert lookup('Utf-8').name == 'utf-8'assert lookup('UTF-8').name == 'utf-8'assert lookup('utf8').name == 'utf-8'assert lookup('utf8').name == 'utf-8'assert lookup('utf8 ').name == 'utf-8'assert lookup(' \r\nutf8\t').name == 'utf-8'assert lookup('u8') is None # Python label.assert lookup('utf-8 ') is None # Non-ASCII white space.assert lookup('US-ASCII').name == 'windows-1252'assert lookup('iso-8859-1').name == 'windows-1252'assert lookup('latin1').name == 'windows-1252'assert lookup('LATIN1').name == 'windows-1252'assert lookup('latin-1') is Noneassert lookup('LATİN1') is None # ASCII-only case insensitivity.def test_all_labels():for label in LABELS:assert decode(b'', label) == ('', lookup(label))assert encode('', label) == b''for repeat in [0, 1, 12]:output, _ = iter_decode([b''] * repeat, label)assert list(output) == []assert list(iter_encode([''] * repeat, label)) == []decoder = IncrementalDecoder(label)assert decoder.decode(b'') == ''assert decoder.decode(b'', final=True) == ''encoder = IncrementalEncoder(label)assert encoder.encode('') == b''assert encoder.encode('', final=True) == b''# All encoding names are valid labels too:for name in set(LABELS.values()):assert lookup(name).name == namedef test_invalid_label():assert_raises(LookupError, decode, b'\xEF\xBB\xBF\xc3\xa9', 'invalid')assert_raises(LookupError, encode, 'é', 'invalid')assert_raises(LookupError, iter_decode, [], 'invalid')assert_raises(LookupError, iter_encode, [], 'invalid')assert_raises(LookupError, IncrementalDecoder, 'invalid')assert_raises(LookupError, IncrementalEncoder, 'invalid')def test_decode():assert decode(b'\x80', 'latin1') == ('€', lookup('latin1'))assert decode(b'\x80', lookup('latin1')) == ('€', lookup('latin1'))assert decode(b'\xc3\xa9', 'utf8') == ('é', lookup('utf8'))assert decode(b'\xc3\xa9', UTF8) == ('é', lookup('utf8'))assert decode(b'\xc3\xa9', 'ascii') == ('é', lookup('ascii'))assert decode(b'\xEF\xBB\xBF\xc3\xa9', 'ascii') == ('é', lookup('utf8')) # UTF-8 with BOMassert decode(b'\xFE\xFF\x00\xe9', 'ascii') == ('é', lookup('utf-16be')) # UTF-16-BE with BOMassert decode(b'\xFF\xFE\xe9\x00', 'ascii') == ('é', lookup('utf-16le')) # UTF-16-LE with BOMassert decode(b'\xFE\xFF\xe9\x00', 'ascii') == ('\ue900', lookup('utf-16be'))assert decode(b'\xFF\xFE\x00\xe9', 'ascii') == ('\ue900', lookup('utf-16le'))assert decode(b'\x00\xe9', 'UTF-16BE') == ('é', lookup('utf-16be'))assert decode(b'\xe9\x00', 'UTF-16LE') == ('é', lookup('utf-16le'))assert decode(b'\xe9\x00', 'UTF-16') == ('é', lookup('utf-16le'))assert decode(b'\xe9\x00', 'UTF-16BE') == ('\ue900', lookup('utf-16be'))assert decode(b'\x00\xe9', 'UTF-16LE') == ('\ue900', lookup('utf-16le'))assert decode(b'\x00\xe9', 'UTF-16') == ('\ue900', lookup('utf-16le'))def test_encode():assert encode('é', 'latin1') == b'\xe9'assert encode('é', 'utf8') == b'\xc3\xa9'assert encode('é', 'utf8') == b'\xc3\xa9'assert encode('é', 'utf-16') == b'\xe9\x00'assert encode('é', 'utf-16le') == b'\xe9\x00'assert encode('é', 'utf-16be') == b'\x00\xe9'def test_iter_decode():def iter_decode_to_string(input, fallback_encoding):output, _encoding = iter_decode(input, fallback_encoding)return ''.join(output)assert iter_decode_to_string([], 'latin1') == ''assert iter_decode_to_string([b''], 'latin1') == ''assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é'assert iter_decode_to_string([b'hello'], 'latin1') == 'hello'assert iter_decode_to_string([b'he', b'llo'], 'latin1') == 'hello'assert iter_decode_to_string([b'hell', b'o'], 'latin1') == 'hello'assert iter_decode_to_string([b'\xc3\xa9'], 'latin1') == 'é'assert iter_decode_to_string([b'\xEF\xBB\xBF\xc3\xa9'], 'latin1') == 'é'assert iter_decode_to_string([b'\xEF\xBB\xBF', b'\xc3', b'\xa9'], 'latin1') == 'é'assert iter_decode_to_string([b'\xEF\xBB\xBF', b'a', b'\xc3'], 'latin1') == 'a\uFFFD'assert iter_decode_to_string([b'', b'\xEF', b'', b'', b'\xBB\xBF\xc3', b'\xa9'], 'latin1') == 'é'assert iter_decode_to_string([b'\xEF\xBB\xBF'], 'latin1') == ''assert iter_decode_to_string([b'\xEF\xBB'], 'latin1') == 'ï»'assert iter_decode_to_string([b'\xFE\xFF\x00\xe9'], 'latin1') == 'é'assert iter_decode_to_string([b'\xFF\xFE\xe9\x00'], 'latin1') == 'é'assert iter_decode_to_string([b'', b'\xFF', b'', b'', b'\xFE\xe9', b'\x00'], 'latin1') == 'é'assert iter_decode_to_string([b'', b'h\xe9', b'llo'], 'x-user-defined') == 'h\uF7E9llo'def test_iter_encode():assert b''.join(iter_encode([], 'latin1')) == b''assert b''.join(iter_encode([''], 'latin1')) == b''assert b''.join(iter_encode(['é'], 'latin1')) == b'\xe9'assert b''.join(iter_encode(['', 'é', '', ''], 'latin1')) == b'\xe9'assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16')) == b'\xe9\x00'assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16le')) == b'\xe9\x00'assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16be')) == b'\x00\xe9'assert b''.join(iter_encode(['', 'h\uF7E9', '', 'llo'], 'x-user-defined')) == b'h\xe9llo'def test_x_user_defined():encoded = b'2,\x0c\x0b\x1aO\xd9#\xcb\x0f\xc9\xbbt\xcf\xa8\xca'decoded = '2,\x0c\x0b\x1aO\uf7d9#\uf7cb\x0f\uf7c9\uf7bbt\uf7cf\uf7a8\uf7ca'encoded = b'aa'decoded = 'aa'assert decode(encoded, 'x-user-defined') == (decoded, lookup('x-user-defined'))assert encode(decoded, 'x-user-defined') == encoded
"""webencodings.mklabels~~~~~~~~~~~~~~~~~~~~~Regenarate the webencodings.labels module.:copyright: Copyright 2012 by Simon Sapin:license: BSD, see LICENSE for details."""import jsontry:from urllib import urlopenexcept ImportError:from urllib.request import urlopendef assert_lower(string):assert string == string.lower()return stringdef generate(url):parts = ['''\"""webencodings.labels~~~~~~~~~~~~~~~~~~~Map encoding labels to their name.:copyright: Copyright 2012 by Simon Sapin:license: BSD, see LICENSE for details."""# XXX Do not edit!# This file is automatically generated by mklabels.pyLABELS = {''']labels = [(repr(assert_lower(label)).lstrip('u'),repr(encoding['name']).lstrip('u'))for category in json.loads(urlopen(url).read().decode('ascii'))for encoding in category['encodings']for label in encoding['labels']]max_len = max(len(label) for label, name in labels)parts.extend(' %s:%s %s,\n' % (label, ' ' * (max_len - len(label)), name)for label, name in labels)parts.append('}')return ''.join(parts)if __name__ == '__main__':print(generate('http://encoding.spec.whatwg.org/encodings.json'))
"""webencodings.labels~~~~~~~~~~~~~~~~~~~Map encoding labels to their name.:copyright: Copyright 2012 by Simon Sapin:license: BSD, see LICENSE for details."""# XXX Do not edit!# This file is automatically generated by mklabels.pyLABELS = {'unicode-1-1-utf-8': 'utf-8','utf-8': 'utf-8','utf8': 'utf-8','866': 'ibm866','cp866': 'ibm866','csibm866': 'ibm866','ibm866': 'ibm866','csisolatin2': 'iso-8859-2','iso-8859-2': 'iso-8859-2','iso-ir-101': 'iso-8859-2','iso8859-2': 'iso-8859-2','iso88592': 'iso-8859-2','iso_8859-2': 'iso-8859-2','iso_8859-2:1987': 'iso-8859-2','l2': 'iso-8859-2','latin2': 'iso-8859-2','csisolatin3': 'iso-8859-3','iso-8859-3': 'iso-8859-3','iso-ir-109': 'iso-8859-3','iso8859-3': 'iso-8859-3','iso88593': 'iso-8859-3','iso_8859-3': 'iso-8859-3','iso_8859-3:1988': 'iso-8859-3','l3': 'iso-8859-3','latin3': 'iso-8859-3','csisolatin4': 'iso-8859-4','iso-8859-4': 'iso-8859-4','iso-ir-110': 'iso-8859-4','iso8859-4': 'iso-8859-4','iso88594': 'iso-8859-4','iso_8859-4': 'iso-8859-4','iso_8859-4:1988': 'iso-8859-4','l4': 'iso-8859-4','latin4': 'iso-8859-4','csisolatincyrillic': 'iso-8859-5','cyrillic': 'iso-8859-5','iso-8859-5': 'iso-8859-5','iso-ir-144': 'iso-8859-5','iso8859-5': 'iso-8859-5','iso88595': 'iso-8859-5','iso_8859-5': 'iso-8859-5','iso_8859-5:1988': 'iso-8859-5','arabic': 'iso-8859-6','asmo-708': 'iso-8859-6','csiso88596e': 'iso-8859-6','csiso88596i': 'iso-8859-6','csisolatinarabic': 'iso-8859-6','ecma-114': 'iso-8859-6','iso-8859-6': 'iso-8859-6','iso-8859-6-e': 'iso-8859-6','iso-8859-6-i': 'iso-8859-6','iso-ir-127': 'iso-8859-6','iso8859-6': 'iso-8859-6','iso88596': 'iso-8859-6','iso_8859-6': 'iso-8859-6','iso_8859-6:1987': 'iso-8859-6','csisolatingreek': 'iso-8859-7','ecma-118': 'iso-8859-7','elot_928': 'iso-8859-7','greek': 'iso-8859-7','greek8': 'iso-8859-7','iso-8859-7': 'iso-8859-7','iso-ir-126': 'iso-8859-7','iso8859-7': 'iso-8859-7','iso88597': 'iso-8859-7','iso_8859-7': 'iso-8859-7','iso_8859-7:1987': 'iso-8859-7','sun_eu_greek': 'iso-8859-7','csiso88598e': 'iso-8859-8','csisolatinhebrew': 'iso-8859-8','hebrew': 'iso-8859-8','iso-8859-8': 'iso-8859-8','iso-8859-8-e': 'iso-8859-8','iso-ir-138': 'iso-8859-8','iso8859-8': 'iso-8859-8','iso88598': 'iso-8859-8','iso_8859-8': 'iso-8859-8','iso_8859-8:1988': 'iso-8859-8','visual': 'iso-8859-8','csiso88598i': 'iso-8859-8-i','iso-8859-8-i': 'iso-8859-8-i','logical': 'iso-8859-8-i','csisolatin6': 'iso-8859-10','iso-8859-10': 'iso-8859-10','iso-ir-157': 'iso-8859-10','iso8859-10': 'iso-8859-10','iso885910': 'iso-8859-10','l6': 'iso-8859-10','latin6': 'iso-8859-10','iso-8859-13': 'iso-8859-13','iso8859-13': 'iso-8859-13','iso885913': 'iso-8859-13','iso-8859-14': 'iso-8859-14','iso8859-14': 'iso-8859-14','iso885914': 'iso-8859-14','csisolatin9': 'iso-8859-15','iso-8859-15': 'iso-8859-15','iso8859-15': 'iso-8859-15','iso885915': 'iso-8859-15','iso_8859-15': 'iso-8859-15','l9': 'iso-8859-15','iso-8859-16': 'iso-8859-16','cskoi8r': 'koi8-r','koi': 'koi8-r','koi8': 'koi8-r','koi8-r': 'koi8-r','koi8_r': 'koi8-r','koi8-u': 'koi8-u','csmacintosh': 'macintosh','mac': 'macintosh','macintosh': 'macintosh','x-mac-roman': 'macintosh','dos-874': 'windows-874','iso-8859-11': 'windows-874','iso8859-11': 'windows-874','iso885911': 'windows-874','tis-620': 'windows-874','windows-874': 'windows-874','cp1250': 'windows-1250','windows-1250': 'windows-1250','x-cp1250': 'windows-1250','cp1251': 'windows-1251','windows-1251': 'windows-1251','x-cp1251': 'windows-1251','ansi_x3.4-1968': 'windows-1252','ascii': 'windows-1252','cp1252': 'windows-1252','cp819': 'windows-1252','csisolatin1': 'windows-1252','ibm819': 'windows-1252','iso-8859-1': 'windows-1252','iso-ir-100': 'windows-1252','iso8859-1': 'windows-1252','iso88591': 'windows-1252','iso_8859-1': 'windows-1252','iso_8859-1:1987': 'windows-1252','l1': 'windows-1252','latin1': 'windows-1252','us-ascii': 'windows-1252','windows-1252': 'windows-1252','x-cp1252': 'windows-1252','cp1253': 'windows-1253','windows-1253': 'windows-1253','x-cp1253': 'windows-1253','cp1254': 'windows-1254','csisolatin5': 'windows-1254','iso-8859-9': 'windows-1254','iso-ir-148': 'windows-1254','iso8859-9': 'windows-1254','iso88599': 'windows-1254','iso_8859-9': 'windows-1254','iso_8859-9:1989': 'windows-1254','l5': 'windows-1254','latin5': 'windows-1254','windows-1254': 'windows-1254','x-cp1254': 'windows-1254','cp1255': 'windows-1255','windows-1255': 'windows-1255','x-cp1255': 'windows-1255','cp1256': 'windows-1256','windows-1256': 'windows-1256','x-cp1256': 'windows-1256','cp1257': 'windows-1257','windows-1257': 'windows-1257','x-cp1257': 'windows-1257','cp1258': 'windows-1258','windows-1258': 'windows-1258','x-cp1258': 'windows-1258','x-mac-cyrillic': 'x-mac-cyrillic','x-mac-ukrainian': 'x-mac-cyrillic','chinese': 'gbk','csgb2312': 'gbk','csiso58gb231280': 'gbk','gb2312': 'gbk','gb_2312': 'gbk','gb_2312-80': 'gbk','gbk': 'gbk','iso-ir-58': 'gbk','x-gbk': 'gbk','gb18030': 'gb18030','hz-gb-2312': 'hz-gb-2312','big5': 'big5','big5-hkscs': 'big5','cn-big5': 'big5','csbig5': 'big5','x-x-big5': 'big5','cseucpkdfmtjapanese': 'euc-jp','euc-jp': 'euc-jp','x-euc-jp': 'euc-jp','csiso2022jp': 'iso-2022-jp','iso-2022-jp': 'iso-2022-jp','csshiftjis': 'shift_jis','ms_kanji': 'shift_jis','shift-jis': 'shift_jis','shift_jis': 'shift_jis','sjis': 'shift_jis','windows-31j': 'shift_jis','x-sjis': 'shift_jis','cseuckr': 'euc-kr','csksc56011987': 'euc-kr','euc-kr': 'euc-kr','iso-ir-149': 'euc-kr','korean': 'euc-kr','ks_c_5601-1987': 'euc-kr','ks_c_5601-1989': 'euc-kr','ksc5601': 'euc-kr','ksc_5601': 'euc-kr','windows-949': 'euc-kr','csiso2022kr': 'iso-2022-kr','iso-2022-kr': 'iso-2022-kr','utf-16be': 'utf-16be','utf-16': 'utf-16le','utf-16le': 'utf-16le','x-user-defined': 'x-user-defined',}
# coding: utf8"""webencodings~~~~~~~~~~~~This is a Python implementation of the `WHATWG Encoding standard<http://encoding.spec.whatwg.org/>`. See README for details.:copyright: Copyright 2012 by Simon Sapin:license: BSD, see LICENSE for details."""from __future__ import unicode_literalsimport codecsfrom .labels import LABELSVERSION = '0.5'# Some names in Encoding are not valid Python aliases. Remap these.PYTHON_NAMES = {'iso-8859-8-i': 'iso-8859-8','x-mac-cyrillic': 'mac-cyrillic','macintosh': 'mac-roman','windows-874': 'cp874'}CACHE = {}def ascii_lower(string):r"""Transform (only) ASCII letters to lower case: A-Z is mapped to a-z.:param string: An Unicode string.:returns: A new Unicode string.This is used for `ASCII case-insensitive<http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_matching of encoding labels.The same matching is also used, among other things,for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_.This is different from the :meth:`~py:str.lower` method of Unicode stringswhich also affect non-ASCII characters,sometimes mapping them into the ASCII range:>>> keyword = u'Bac\N{KELVIN SIGN}ground'>>> assert keyword.lower() == u'background'>>> assert ascii_lower(keyword) != keyword.lower()>>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground'"""# This turns out to be faster than unicode.translate()return string.encode('utf8').lower().decode('utf8')def lookup(label):"""Look for an encoding by its label.This is the spec’s `get an encoding<http://encoding.spec.whatwg.org/#concept-encoding-get>`_ algorithm.Supported labels are listed there.:param label: A string.:returns:An :class:`Encoding` object, or :obj:`None` for an unknown label."""# Only strip ASCII whitespace: U+0009, U+000A, U+000C, U+000D, and U+0020.label = ascii_lower(label.strip('\t\n\f\r '))name = LABELS.get(label)if name is None:return Noneencoding = CACHE.get(name)if encoding is None:if name == 'x-user-defined':from .x_user_defined import codec_infoelse:python_name = PYTHON_NAMES.get(name, name)# Any python_name value that gets to here should be valid.codec_info = codecs.lookup(python_name)encoding = Encoding(name, codec_info)CACHE[name] = encodingreturn encodingdef _get_encoding(encoding_or_label):"""Accept either an encoding object or label.:param encoding: An :class:`Encoding` object or a label string.:returns: An :class:`Encoding` object.:raises: :exc:`~exceptions.LookupError` for an unknown label."""if hasattr(encoding_or_label, 'codec_info'):return encoding_or_labelencoding = lookup(encoding_or_label)if encoding is None:raise LookupError('Unknown encoding label: %r' % encoding_or_label)return encodingclass Encoding(object):"""Reresents a character encoding such as UTF-8,that can be used for decoding or encoding... attribute:: nameCanonical name of the encoding.. attribute:: codec_infoThe actual implementation of the encoding,a stdlib :class:`~codecs.CodecInfo` object.See :func:`codecs.register`."""def __init__(self, name, codec_info):self.name = nameself.codec_info = codec_infodef __repr__(self):return '<Encoding %s>' % self.name#: The UTF-8 encoding. Should be used for new content and formats.UTF8 = lookup('utf-8')_UTF16LE = lookup('utf-16le')_UTF16BE = lookup('utf-16be')def decode(input, fallback_encoding, errors='replace'):"""Decode a single string.:param input: A byte string:param fallback_encoding:An :class:`Encoding` object or a label string.The encoding to use if :obj:`input` does note have a BOM.:param errors: Type of error handling. See :func:`codecs.register`.:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.:return:A ``(output, encoding)`` tuple of an Unicode stringand an :obj:`Encoding`."""# Fail early if `encoding` is an invalid label.fallback_encoding = _get_encoding(fallback_encoding)bom_encoding, input = _detect_bom(input)encoding = bom_encoding or fallback_encodingreturn encoding.codec_info.decode(input, errors)[0], encodingdef _detect_bom(input):"""Return (bom_encoding, input), with any BOM removed from the input."""if input.startswith(b'\xFF\xFE'):return _UTF16LE, input[2:]if input.startswith(b'\xFE\xFF'):return _UTF16BE, input[2:]if input.startswith(b'\xEF\xBB\xBF'):return UTF8, input[3:]return None, inputdef encode(input, encoding=UTF8, errors='strict'):"""Encode a single string.:param input: An Unicode string.:param encoding: An :class:`Encoding` object or a label string.:param errors: Type of error handling. See :func:`codecs.register`.:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.:return: A byte string."""return _get_encoding(encoding).codec_info.encode(input, errors)[0]def iter_decode(input, fallback_encoding, errors='replace'):""""Pull"-based decoder.:param input:An iterable of byte strings.The input is first consumed just enough to determine the encodingbased on the precense of a BOM,then consumed on demand when the return value is.:param fallback_encoding:An :class:`Encoding` object or a label string.The encoding to use if :obj:`input` does note have a BOM.:param errors: Type of error handling. See :func:`codecs.register`.:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.:returns:An ``(output, encoding)`` tuple.:obj:`output` is an iterable of Unicode strings,:obj:`encoding` is the :obj:`Encoding` that is being used."""decoder = IncrementalDecoder(fallback_encoding, errors)generator = _iter_decode_generator(input, decoder)encoding = next(generator)return generator, encodingdef _iter_decode_generator(input, decoder):"""Return a generator that first yields the :obj:`Encoding`,then yields output chukns as Unicode strings."""decode = decoder.decodeinput = iter(input)for chunck in input:output = decode(chunck)if output:assert decoder.encoding is not Noneyield decoder.encodingyield outputbreakelse:# Input exhausted without determining the encodingoutput = decode(b'', final=True)assert decoder.encoding is not Noneyield decoder.encodingif output:yield outputreturnfor chunck in input:output = decode(chunck)if output:yield outputoutput = decode(b'', final=True)if output:yield outputdef iter_encode(input, encoding=UTF8, errors='strict'):"""“Pull”-based encoder.:param input: An iterable of Unicode strings.:param encoding: An :class:`Encoding` object or a label string.:param errors: Type of error handling. See :func:`codecs.register`.:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.:returns: An iterable of byte strings."""# Fail early if `encoding` is an invalid label.encode = IncrementalEncoder(encoding, errors).encodereturn _iter_encode_generator(input, encode)def _iter_encode_generator(input, encode):for chunck in input:output = encode(chunck)if output:yield outputoutput = encode('', final=True)if output:yield outputclass IncrementalDecoder(object):"""“Push”-based decoder.:param fallback_encoding:An :class:`Encoding` object or a label string.The encoding to use if :obj:`input` does note have a BOM.:param errors: Type of error handling. See :func:`codecs.register`.:raises: :exc:`~exceptions.LookupError` for an unknown encoding label."""def __init__(self, fallback_encoding, errors='replace'):# Fail early if `encoding` is an invalid label.self._fallback_encoding = _get_encoding(fallback_encoding)self._errors = errorsself._buffer = b''self._decoder = None#: The actual :class:`Encoding` that is being used,#: or :obj:`None` if that is not determined yet.#: (Ie. if there is not enough input yet to determine#: if there is a BOM.)self.encoding = None # Not known yet.def decode(self, input, final=False):"""Decode one chunk of the input.:param input: A byte string.:param final:Indicate that no more input is available.Must be :obj:`True` if this is the last call.:returns: An Unicode string."""decoder = self._decoderif decoder is not None:return decoder(input, final)input = self._buffer + inputencoding, input = _detect_bom(input)if encoding is None:if len(input) < 3 and not final: # Not enough data yet.self._buffer = inputreturn ''else: # No BOMencoding = self._fallback_encodingdecoder = encoding.codec_info.incrementaldecoder(self._errors).decodeself._decoder = decoderself.encoding = encodingreturn decoder(input, final)class IncrementalEncoder(object):"""“Push”-based encoder.:param encoding: An :class:`Encoding` object or a label string.:param errors: Type of error handling. See :func:`codecs.register`.:raises: :exc:`~exceptions.LookupError` for an unknown encoding label... method:: encode(input, final=False):param input: An Unicode string.:param final:Indicate that no more input is available.Must be :obj:`True` if this is the last call.:returns: A byte string."""def __init__(self, encoding=UTF8, errors='strict'):encoding = _get_encoding(encoding)self.encode = encoding.codec_info.incrementalencoder(errors).encode
"""Utilities for writing code that runs on Python 2 and 3"""# Copyright (c) 2010-2015 Benjamin Peterson## Permission is hereby granted, free of charge, to any person obtaining a copy# of this software and associated documentation files (the "Software"), to deal# in the Software without restriction, including without limitation the rights# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell# copies of the Software, and to permit persons to whom the Software is# furnished to do so, subject to the following conditions:## The above copyright notice and this permission notice shall be included in all# copies or substantial portions of the Software.## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE# SOFTWARE.from __future__ import absolute_importimport functoolsimport itertoolsimport operatorimport sysimport types__author__ = "Benjamin Peterson <benjamin@python.org>"__version__ = "1.10.0"# Useful for very coarse version differentiation.PY2 = sys.version_info[0] == 2PY3 = sys.version_info[0] == 3PY34 = sys.version_info[0:2] >= (3, 4)if PY3:string_types = str,integer_types = int,class_types = type,text_type = strbinary_type = bytesMAXSIZE = sys.maxsizeelse:string_types = basestring,integer_types = (int, long)class_types = (type, types.ClassType)text_type = unicodebinary_type = strif sys.platform.startswith("java"):# Jython always uses 32 bits.MAXSIZE = int((1 << 31) - 1)else:# It's possible to have sizeof(long) != sizeof(Py_ssize_t).class X(object):def __len__(self):return 1 << 31try:len(X())except OverflowError:# 32-bitMAXSIZE = int((1 << 31) - 1)else:# 64-bitMAXSIZE = int((1 << 63) - 1)del Xdef _add_doc(func, doc):"""Add documentation to a function."""func.__doc__ = docdef _import_module(name):"""Import module, returning the module after the last dot."""__import__(name)return sys.modules[name]class _LazyDescr(object):def __init__(self, name):self.name = namedef __get__(self, obj, tp):result = self._resolve()setattr(obj, self.name, result) # Invokes __set__.try:# This is a bit ugly, but it avoids running this again by# removing this descriptor.delattr(obj.__class__, self.name)except AttributeError:passreturn resultclass MovedModule(_LazyDescr):def __init__(self, name, old, new=None):super(MovedModule, self).__init__(name)if PY3:if new is None:new = nameself.mod = newelse:self.mod = olddef _resolve(self):return _import_module(self.mod)def __getattr__(self, attr):_module = self._resolve()value = getattr(_module, attr)setattr(self, attr, value)return valueclass _LazyModule(types.ModuleType):def __init__(self, name):super(_LazyModule, self).__init__(name)self.__doc__ = self.__class__.__doc__def __dir__(self):attrs = ["__doc__", "__name__"]attrs += [attr.name for attr in self._moved_attributes]return attrs# Subclasses should override this_moved_attributes = []class MovedAttribute(_LazyDescr):def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):super(MovedAttribute, self).__init__(name)if PY3:if new_mod is None:new_mod = nameself.mod = new_modif new_attr is None:if old_attr is None:new_attr = nameelse:new_attr = old_attrself.attr = new_attrelse:self.mod = old_modif old_attr is None:old_attr = nameself.attr = old_attrdef _resolve(self):module = _import_module(self.mod)return getattr(module, self.attr)class _SixMetaPathImporter(object):"""A meta path importer to import six.moves and its submodules.This class implements a PEP302 finder and loader. It should be compatiblewith Python 2.5 and all existing versions of Python3"""def __init__(self, six_module_name):self.name = six_module_nameself.known_modules = {}def _add_module(self, mod, *fullnames):for fullname in fullnames:self.known_modules[self.name + "." + fullname] = moddef _get_module(self, fullname):return self.known_modules[self.name + "." + fullname]def find_module(self, fullname, path=None):if fullname in self.known_modules:return selfreturn Nonedef __get_module(self, fullname):try:return self.known_modules[fullname]except KeyError:raise ImportError("This loader does not know module " + fullname)def load_module(self, fullname):try:# in case of a reloadreturn sys.modules[fullname]except KeyError:passmod = self.__get_module(fullname)if isinstance(mod, MovedModule):mod = mod._resolve()else:mod.__loader__ = selfsys.modules[fullname] = modreturn moddef is_package(self, fullname):"""Return true, if the named module is a package.We need this method to get correct spec objects withPython 3.4 (see PEP451)"""return hasattr(self.__get_module(fullname), "__path__")def get_code(self, fullname):"""Return NoneRequired, if is_package is implemented"""self.__get_module(fullname) # eventually raises ImportErrorreturn Noneget_source = get_code # same as get_code_importer = _SixMetaPathImporter(__name__)class _MovedItems(_LazyModule):"""Lazy loading of moved objects"""__path__ = [] # mark as package_moved_attributes = [MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),MovedAttribute("intern", "__builtin__", "sys"),MovedAttribute("map", "itertools", "builtins", "imap", "map"),MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),MovedAttribute("reduce", "__builtin__", "functools"),MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),MovedAttribute("StringIO", "StringIO", "io"),MovedAttribute("UserDict", "UserDict", "collections"),MovedAttribute("UserList", "UserList", "collections"),MovedAttribute("UserString", "UserString", "collections"),MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),MovedModule("builtins", "__builtin__"),MovedModule("configparser", "ConfigParser"),MovedModule("copyreg", "copy_reg"),MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),MovedModule("http_cookies", "Cookie", "http.cookies"),MovedModule("html_entities", "htmlentitydefs", "html.entities"),MovedModule("html_parser", "HTMLParser", "html.parser"),MovedModule("http_client", "httplib", "http.client"),MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),MovedModule("cPickle", "cPickle", "pickle"),MovedModule("queue", "Queue"),MovedModule("reprlib", "repr"),MovedModule("socketserver", "SocketServer"),MovedModule("_thread", "thread", "_thread"),MovedModule("tkinter", "Tkinter"),MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),MovedModule("tkinter_tix", "Tix", "tkinter.tix"),MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),MovedModule("tkinter_colorchooser", "tkColorChooser","tkinter.colorchooser"),MovedModule("tkinter_commondialog", "tkCommonDialog","tkinter.commondialog"),MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),MovedModule("tkinter_font", "tkFont", "tkinter.font"),MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),MovedModule("tkinter_tksimpledialog", "tkSimpleDialog","tkinter.simpledialog"),MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),]# Add windows specific modules.if sys.platform == "win32":_moved_attributes += [MovedModule("winreg", "_winreg"),]for attr in _moved_attributes:setattr(_MovedItems, attr.name, attr)if isinstance(attr, MovedModule):_importer._add_module(attr, "moves." + attr.name)del attr_MovedItems._moved_attributes = _moved_attributesmoves = _MovedItems(__name__ + ".moves")_importer._add_module(moves, "moves")class Module_six_moves_urllib_parse(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_parse"""_urllib_parse_moved_attributes = [MovedAttribute("ParseResult", "urlparse", "urllib.parse"),MovedAttribute("SplitResult", "urlparse", "urllib.parse"),MovedAttribute("parse_qs", "urlparse", "urllib.parse"),MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),MovedAttribute("urldefrag", "urlparse", "urllib.parse"),MovedAttribute("urljoin", "urlparse", "urllib.parse"),MovedAttribute("urlparse", "urlparse", "urllib.parse"),MovedAttribute("urlsplit", "urlparse", "urllib.parse"),MovedAttribute("urlunparse", "urlparse", "urllib.parse"),MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),MovedAttribute("quote", "urllib", "urllib.parse"),MovedAttribute("quote_plus", "urllib", "urllib.parse"),MovedAttribute("unquote", "urllib", "urllib.parse"),MovedAttribute("unquote_plus", "urllib", "urllib.parse"),MovedAttribute("urlencode", "urllib", "urllib.parse"),MovedAttribute("splitquery", "urllib", "urllib.parse"),MovedAttribute("splittag", "urllib", "urllib.parse"),MovedAttribute("splituser", "urllib", "urllib.parse"),MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),MovedAttribute("uses_params", "urlparse", "urllib.parse"),MovedAttribute("uses_query", "urlparse", "urllib.parse"),MovedAttribute("uses_relative", "urlparse", "urllib.parse"),]for attr in _urllib_parse_moved_attributes:setattr(Module_six_moves_urllib_parse, attr.name, attr)del attrModule_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),"moves.urllib_parse", "moves.urllib.parse")class Module_six_moves_urllib_error(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_error"""_urllib_error_moved_attributes = [MovedAttribute("URLError", "urllib2", "urllib.error"),MovedAttribute("HTTPError", "urllib2", "urllib.error"),MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),]for attr in _urllib_error_moved_attributes:setattr(Module_six_moves_urllib_error, attr.name, attr)del attrModule_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),"moves.urllib_error", "moves.urllib.error")class Module_six_moves_urllib_request(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_request"""_urllib_request_moved_attributes = [MovedAttribute("urlopen", "urllib2", "urllib.request"),MovedAttribute("install_opener", "urllib2", "urllib.request"),MovedAttribute("build_opener", "urllib2", "urllib.request"),MovedAttribute("pathname2url", "urllib", "urllib.request"),MovedAttribute("url2pathname", "urllib", "urllib.request"),MovedAttribute("getproxies", "urllib", "urllib.request"),MovedAttribute("Request", "urllib2", "urllib.request"),MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),MovedAttribute("BaseHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),MovedAttribute("FileHandler", "urllib2", "urllib.request"),MovedAttribute("FTPHandler", "urllib2", "urllib.request"),MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),MovedAttribute("urlretrieve", "urllib", "urllib.request"),MovedAttribute("urlcleanup", "urllib", "urllib.request"),MovedAttribute("URLopener", "urllib", "urllib.request"),MovedAttribute("FancyURLopener", "urllib", "urllib.request"),MovedAttribute("proxy_bypass", "urllib", "urllib.request"),]for attr in _urllib_request_moved_attributes:setattr(Module_six_moves_urllib_request, attr.name, attr)del attrModule_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),"moves.urllib_request", "moves.urllib.request")class Module_six_moves_urllib_response(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_response"""_urllib_response_moved_attributes = [MovedAttribute("addbase", "urllib", "urllib.response"),MovedAttribute("addclosehook", "urllib", "urllib.response"),MovedAttribute("addinfo", "urllib", "urllib.response"),MovedAttribute("addinfourl", "urllib", "urllib.response"),]for attr in _urllib_response_moved_attributes:setattr(Module_six_moves_urllib_response, attr.name, attr)del attrModule_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),"moves.urllib_response", "moves.urllib.response")class Module_six_moves_urllib_robotparser(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_robotparser"""_urllib_robotparser_moved_attributes = [MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),]for attr in _urllib_robotparser_moved_attributes:setattr(Module_six_moves_urllib_robotparser, attr.name, attr)del attrModule_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),"moves.urllib_robotparser", "moves.urllib.robotparser")class Module_six_moves_urllib(types.ModuleType):"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""__path__ = [] # mark as packageparse = _importer._get_module("moves.urllib_parse")error = _importer._get_module("moves.urllib_error")request = _importer._get_module("moves.urllib_request")response = _importer._get_module("moves.urllib_response")robotparser = _importer._get_module("moves.urllib_robotparser")def __dir__(self):return ['parse', 'error', 'request', 'response', 'robotparser']_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),"moves.urllib")def add_move(move):"""Add an item to six.moves."""setattr(_MovedItems, move.name, move)def remove_move(name):"""Remove item from six.moves."""try:delattr(_MovedItems, name)except AttributeError:try:del moves.__dict__[name]except KeyError:raise AttributeError("no such move, %r" % (name,))if PY3:_meth_func = "__func__"_meth_self = "__self__"_func_closure = "__closure__"_func_code = "__code__"_func_defaults = "__defaults__"_func_globals = "__globals__"else:_meth_func = "im_func"_meth_self = "im_self"_func_closure = "func_closure"_func_code = "func_code"_func_defaults = "func_defaults"_func_globals = "func_globals"try:advance_iterator = nextexcept NameError:def advance_iterator(it):return it.next()next = advance_iteratortry:callable = callableexcept NameError:def callable(obj):return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)if PY3:def get_unbound_function(unbound):return unboundcreate_bound_method = types.MethodTypedef create_unbound_method(func, cls):return funcIterator = objectelse:def get_unbound_function(unbound):return unbound.im_funcdef create_bound_method(func, obj):return types.MethodType(func, obj, obj.__class__)def create_unbound_method(func, cls):return types.MethodType(func, None, cls)class Iterator(object):def next(self):return type(self).__next__(self)callable = callable_add_doc(get_unbound_function,"""Get the function out of a possibly unbound function""")get_method_function = operator.attrgetter(_meth_func)get_method_self = operator.attrgetter(_meth_self)get_function_closure = operator.attrgetter(_func_closure)get_function_code = operator.attrgetter(_func_code)get_function_defaults = operator.attrgetter(_func_defaults)get_function_globals = operator.attrgetter(_func_globals)if PY3:def iterkeys(d, **kw):return iter(d.keys(**kw))def itervalues(d, **kw):return iter(d.values(**kw))def iteritems(d, **kw):return iter(d.items(**kw))def iterlists(d, **kw):return iter(d.lists(**kw))viewkeys = operator.methodcaller("keys")viewvalues = operator.methodcaller("values")viewitems = operator.methodcaller("items")else:def iterkeys(d, **kw):return d.iterkeys(**kw)def itervalues(d, **kw):return d.itervalues(**kw)def iteritems(d, **kw):return d.iteritems(**kw)def iterlists(d, **kw):return d.iterlists(**kw)viewkeys = operator.methodcaller("viewkeys")viewvalues = operator.methodcaller("viewvalues")viewitems = operator.methodcaller("viewitems")_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")_add_doc(itervalues, "Return an iterator over the values of a dictionary.")_add_doc(iteritems,"Return an iterator over the (key, value) pairs of a dictionary.")_add_doc(iterlists,"Return an iterator over the (key, [values]) pairs of a dictionary.")if PY3:def b(s):return s.encode("latin-1")def u(s):return sunichr = chrimport structint2byte = struct.Struct(">B").packdel structbyte2int = operator.itemgetter(0)indexbytes = operator.getitemiterbytes = iterimport ioStringIO = io.StringIOBytesIO = io.BytesIO_assertCountEqual = "assertCountEqual"if sys.version_info[1] <= 1:_assertRaisesRegex = "assertRaisesRegexp"_assertRegex = "assertRegexpMatches"else:_assertRaisesRegex = "assertRaisesRegex"_assertRegex = "assertRegex"else:def b(s):return s# Workaround for standalone backslashdef u(s):return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")unichr = unichrint2byte = chrdef byte2int(bs):return ord(bs[0])def indexbytes(buf, i):return ord(buf[i])iterbytes = functools.partial(itertools.imap, ord)import StringIOStringIO = BytesIO = StringIO.StringIO_assertCountEqual = "assertItemsEqual"_assertRaisesRegex = "assertRaisesRegexp"_assertRegex = "assertRegexpMatches"_add_doc(b, """Byte literal""")_add_doc(u, """Text literal""")def assertCountEqual(self, *args, **kwargs):return getattr(self, _assertCountEqual)(*args, **kwargs)def assertRaisesRegex(self, *args, **kwargs):return getattr(self, _assertRaisesRegex)(*args, **kwargs)def assertRegex(self, *args, **kwargs):return getattr(self, _assertRegex)(*args, **kwargs)if PY3:exec_ = getattr(moves.builtins, "exec")def reraise(tp, value, tb=None):if value is None:value = tp()if value.__traceback__ is not tb:raise value.with_traceback(tb)raise valueelse:def exec_(_code_, _globs_=None, _locs_=None):"""Execute code in a namespace."""if _globs_ is None:frame = sys._getframe(1)_globs_ = frame.f_globalsif _locs_ is None:_locs_ = frame.f_localsdel frameelif _locs_ is None:_locs_ = _globs_exec("""exec _code_ in _globs_, _locs_""")exec_("""def reraise(tp, value, tb=None):raise tp, value, tb""")if sys.version_info[:2] == (3, 2):exec_("""def raise_from(value, from_value):if from_value is None:raise valueraise value from from_value""")elif sys.version_info[:2] > (3, 2):exec_("""def raise_from(value, from_value):raise value from from_value""")else:def raise_from(value, from_value):raise valueprint_ = getattr(moves.builtins, "print", None)if print_ is None:def print_(*args, **kwargs):"""The new-style print function for Python 2.4 and 2.5."""fp = kwargs.pop("file", sys.stdout)if fp is None:returndef write(data):if not isinstance(data, basestring):data = str(data)# If the file has an encoding, encode unicode with it.if (isinstance(fp, file) andisinstance(data, unicode) andfp.encoding is not None):errors = getattr(fp, "errors", None)if errors is None:errors = "strict"data = data.encode(fp.encoding, errors)fp.write(data)want_unicode = Falsesep = kwargs.pop("sep", None)if sep is not None:if isinstance(sep, unicode):want_unicode = Trueelif not isinstance(sep, str):raise TypeError("sep must be None or a string")end = kwargs.pop("end", None)if end is not None:if isinstance(end, unicode):want_unicode = Trueelif not isinstance(end, str):raise TypeError("end must be None or a string")if kwargs:raise TypeError("invalid keyword arguments to print()")if not want_unicode:for arg in args:if isinstance(arg, unicode):want_unicode = Truebreakif want_unicode:newline = unicode("\n")space = unicode(" ")else:newline = "\n"space = " "if sep is None:sep = spaceif end is None:end = newlinefor i, arg in enumerate(args):if i:write(sep)write(arg)write(end)if sys.version_info[:2] < (3, 3):_print = print_def print_(*args, **kwargs):fp = kwargs.get("file", sys.stdout)flush = kwargs.pop("flush", False)_print(*args, **kwargs)if flush and fp is not None:fp.flush()_add_doc(reraise, """Reraise an exception.""")if sys.version_info[0:2] < (3, 4):def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,updated=functools.WRAPPER_UPDATES):def wrapper(f):f = functools.wraps(wrapped, assigned, updated)(f)f.__wrapped__ = wrappedreturn freturn wrapperelse:wraps = functools.wrapsdef with_metaclass(meta, *bases):"""Create a base class with a metaclass."""# This requires a bit of explanation: the basic idea is to make a dummy# metaclass for one level of class instantiation that replaces itself with# the actual metaclass.class metaclass(meta):def __new__(cls, name, this_bases, d):return meta(name, bases, d)return type.__new__(metaclass, 'temporary_class', (), {})def add_metaclass(metaclass):"""Class decorator for creating a class with a metaclass."""def wrapper(cls):orig_vars = cls.__dict__.copy()slots = orig_vars.get('__slots__')if slots is not None:if isinstance(slots, str):slots = [slots]for slots_var in slots:orig_vars.pop(slots_var)orig_vars.pop('__dict__', None)orig_vars.pop('__weakref__', None)return metaclass(cls.__name__, cls.__bases__, orig_vars)return wrapperdef python_2_unicode_compatible(klass):"""A decorator that defines __unicode__ and __str__ methods under Python 2.Under Python 3 it does nothing.To support Python 2 and 3 with a single code base, define a __str__ methodreturning text and apply this decorator to the class."""if PY2:if '__str__' not in klass.__dict__:raise ValueError("@python_2_unicode_compatible cannot be applied ""to %s because it doesn't define __str__()." %klass.__name__)klass.__unicode__ = klass.__str__klass.__str__ = lambda self: self.__unicode__().encode('utf-8')return klass# Complete the moves implementation.# This code is at the end of this module to speed up module loading.# Turn this module into a package.__path__ = [] # required for PEP 302 and PEP 451__package__ = __name__ # see PEP 366 @ReservedAssignmentif globals().get("__spec__") is not None:__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable# Remove other six meta path importers, since they cause problems. This can# happen if six is removed from sys.modules and then reloaded. (Setuptools does# this for some reason.)if sys.meta_path:for i, importer in enumerate(sys.meta_path):# Here's some real nastiness: Another "instance" of the six module might# be floating around. Therefore, we can't use isinstance() to check for# the six meta path importer, since the other six instance will have# inserted an importer with different class.if (type(importer).__name__ == "_SixMetaPathImporter" andimporter.name == __name__):del sys.meta_path[i]breakdel i, importer# Finally, add the importer to the meta path import hook.sys.meta_path.append(_importer)
## Copyright 2013-2014 Ray Holder#### Licensed under the Apache License, Version 2.0 (the "License");## you may not use this file except in compliance with the License.## You may obtain a copy of the License at#### http://www.apache.org/licenses/LICENSE-2.0#### Unless required by applicable law or agreed to in writing, software## distributed under the License is distributed on an "AS IS" BASIS,## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.## See the License for the specific language governing permissions and## limitations under the License.import randomfrom pip._vendor import siximport sysimport timeimport traceback# sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint...MAX_WAIT = 1073741823def retry(*dargs, **dkw):"""Decorator function that instantiates the Retrying object@param *dargs: positional arguments passed to Retrying object@param **dkw: keyword arguments passed to the Retrying object"""# support both @retry and @retry() as valid syntaxif len(dargs) == 1 and callable(dargs[0]):def wrap_simple(f):@six.wraps(f)def wrapped_f(*args, **kw):return Retrying().call(f, *args, **kw)return wrapped_freturn wrap_simple(dargs[0])else:def wrap(f):@six.wraps(f)def wrapped_f(*args, **kw):return Retrying(*dargs, **dkw).call(f, *args, **kw)return wrapped_freturn wrapclass Retrying(object):def __init__(self,stop=None, wait=None,stop_max_attempt_number=None,stop_max_delay=None,wait_fixed=None,wait_random_min=None, wait_random_max=None,wait_incrementing_start=None, wait_incrementing_increment=None,wait_exponential_multiplier=None, wait_exponential_max=None,retry_on_exception=None,retry_on_result=None,wrap_exception=False,stop_func=None,wait_func=None,wait_jitter_max=None):self._stop_max_attempt_number = 5 if stop_max_attempt_number is None else stop_max_attempt_numberself._stop_max_delay = 100 if stop_max_delay is None else stop_max_delayself._wait_fixed = 1000 if wait_fixed is None else wait_fixedself._wait_random_min = 0 if wait_random_min is None else wait_random_minself._wait_random_max = 1000 if wait_random_max is None else wait_random_maxself._wait_incrementing_start = 0 if wait_incrementing_start is None else wait_incrementing_startself._wait_incrementing_increment = 100 if wait_incrementing_increment is None else wait_incrementing_incrementself._wait_exponential_multiplier = 1 if wait_exponential_multiplier is None else wait_exponential_multiplierself._wait_exponential_max = MAX_WAIT if wait_exponential_max is None else wait_exponential_maxself._wait_jitter_max = 0 if wait_jitter_max is None else wait_jitter_max# TODO add chaining of stop behaviors# stop behaviorstop_funcs = []if stop_max_attempt_number is not None:stop_funcs.append(self.stop_after_attempt)if stop_max_delay is not None:stop_funcs.append(self.stop_after_delay)if stop_func is not None:self.stop = stop_funcelif stop is None:self.stop = lambda attempts, delay: any(f(attempts, delay) for f in stop_funcs)else:self.stop = getattr(self, stop)# TODO add chaining of wait behaviors# wait behaviorwait_funcs = [lambda *args, **kwargs: 0]if wait_fixed is not None:wait_funcs.append(self.fixed_sleep)if wait_random_min is not None or wait_random_max is not None:wait_funcs.append(self.random_sleep)if wait_incrementing_start is not None or wait_incrementing_increment is not None:wait_funcs.append(self.incrementing_sleep)if wait_exponential_multiplier is not None or wait_exponential_max is not None:wait_funcs.append(self.exponential_sleep)if wait_func is not None:self.wait = wait_funcelif wait is None:self.wait = lambda attempts, delay: max(f(attempts, delay) for f in wait_funcs)else:self.wait = getattr(self, wait)# retry on exception filterif retry_on_exception is None:self._retry_on_exception = self.always_rejectelse:self._retry_on_exception = retry_on_exception# TODO simplify retrying by Exception types# retry on result filterif retry_on_result is None:self._retry_on_result = self.never_rejectelse:self._retry_on_result = retry_on_resultself._wrap_exception = wrap_exceptiondef stop_after_attempt(self, previous_attempt_number, delay_since_first_attempt_ms):"""Stop after the previous attempt >= stop_max_attempt_number."""return previous_attempt_number >= self._stop_max_attempt_numberdef stop_after_delay(self, previous_attempt_number, delay_since_first_attempt_ms):"""Stop after the time from the first attempt >= stop_max_delay."""return delay_since_first_attempt_ms >= self._stop_max_delaydef no_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):"""Don't sleep at all before retrying."""return 0def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):"""Sleep a fixed amount of time between each retry."""return self._wait_fixeddef random_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):"""Sleep a random amount of time between wait_random_min and wait_random_max"""return random.randint(self._wait_random_min, self._wait_random_max)def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):"""Sleep an incremental amount of time after each attempt, starting atwait_incrementing_start and incrementing by wait_incrementing_increment"""result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1))if result < 0:result = 0return resultdef exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):exp = 2 ** previous_attempt_numberresult = self._wait_exponential_multiplier * expif result > self._wait_exponential_max:result = self._wait_exponential_maxif result < 0:result = 0return resultdef never_reject(self, result):return Falsedef always_reject(self, result):return Truedef should_reject(self, attempt):reject = Falseif attempt.has_exception:reject |= self._retry_on_exception(attempt.value[1])else:reject |= self._retry_on_result(attempt.value)return rejectdef call(self, fn, *args, **kwargs):start_time = int(round(time.time() * 1000))attempt_number = 1while True:try:attempt = Attempt(fn(*args, **kwargs), attempt_number, False)except:tb = sys.exc_info()attempt = Attempt(tb, attempt_number, True)if not self.should_reject(attempt):return attempt.get(self._wrap_exception)delay_since_first_attempt_ms = int(round(time.time() * 1000)) - start_timeif self.stop(attempt_number, delay_since_first_attempt_ms):if not self._wrap_exception and attempt.has_exception:# get() on an attempt with an exception should cause it to be raised, but raise just in caseraise attempt.get()else:raise RetryError(attempt)else:sleep = self.wait(attempt_number, delay_since_first_attempt_ms)if self._wait_jitter_max:jitter = random.random() * self._wait_jitter_maxsleep = sleep + max(0, jitter)time.sleep(sleep / 1000.0)attempt_number += 1class Attempt(object):"""An Attempt encapsulates a call to a target function that may end as anormal return value from the function or an Exception depending on whatoccurred during the execution."""def __init__(self, value, attempt_number, has_exception):self.value = valueself.attempt_number = attempt_numberself.has_exception = has_exceptiondef get(self, wrap_exception=False):"""Return the return value of this Attempt instance or raise an Exception.If wrap_exception is true, this Attempt is wrapped inside of aRetryError before being raised."""if self.has_exception:if wrap_exception:raise RetryError(self)else:six.reraise(self.value[0], self.value[1], self.value[2])else:return self.valuedef __repr__(self):if self.has_exception:return "Attempts: {0}, Error:\n{1}".format(self.attempt_number, "".join(traceback.format_tb(self.value[2])))else:return "Attempts: {0}, Value: {1}".format(self.attempt_number, self.value)class RetryError(Exception):"""A RetryError encapsulates the last Attempt instance right before giving up."""def __init__(self, last_attempt):self.last_attempt = last_attemptdef __str__(self):return "RetryError[{0}]".format(self.last_attempt)
# -*- coding: utf-8 -*-"""requests.utils~~~~~~~~~~~~~~This module provides utility functions that are used within Requeststhat are also useful for external consumption."""import cgiimport codecsimport collectionsimport ioimport osimport reimport socketimport structimport warningsfrom . import __version__from . import certsfrom .compat import parse_http_list as _parse_list_headerfrom .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2,builtin_str, getproxies, proxy_bypass, urlunparse,basestring)from .cookies import RequestsCookieJar, cookiejar_from_dictfrom .structures import CaseInsensitiveDictfrom .exceptions import InvalidURL, InvalidHeader, FileModeWarning_hush_pyflakes = (RequestsCookieJar,)NETRC_FILES = ('.netrc', '_netrc')DEFAULT_CA_BUNDLE_PATH = certs.where()def dict_to_sequence(d):"""Returns an internal sequence dictionary update."""if hasattr(d, 'items'):d = d.items()return ddef super_len(o):total_length = 0current_position = 0if hasattr(o, '__len__'):total_length = len(o)elif hasattr(o, 'len'):total_length = o.lenelif hasattr(o, 'getvalue'):# e.g. BytesIO, cStringIO.StringIOtotal_length = len(o.getvalue())elif hasattr(o, 'fileno'):try:fileno = o.fileno()except io.UnsupportedOperation:passelse:total_length = os.fstat(fileno).st_size# Having used fstat to determine the file length, we need to# confirm that this file was opened up in binary mode.if 'b' not in o.mode:warnings.warn(("Requests has determined the content-length for this ""request using the binary size of the file: however, the ""file has been opened in text mode (i.e. without the 'b' ""flag in the mode). This may lead to an incorrect ""content-length. In Requests 3.0, support will be removed ""for files in text mode."),FileModeWarning)if hasattr(o, 'tell'):try:current_position = o.tell()except (OSError, IOError):# This can happen in some weird situations, such as when the file# is actually a special file descriptor like stdin. In this# instance, we don't know what the length is, so set it to zero and# let requests chunk it instead.current_position = total_lengthreturn max(0, total_length - current_position)def get_netrc_auth(url, raise_errors=False):"""Returns the Requests tuple auth for a given url from netrc."""try:from netrc import netrc, NetrcParseErrornetrc_path = Nonefor f in NETRC_FILES:try:loc = os.path.expanduser('~/{0}'.format(f))except KeyError:# os.path.expanduser can fail when $HOME is undefined and# getpwuid fails. See http://bugs.python.org/issue20164 &# https://github.com/kennethreitz/requests/issues/1846returnif os.path.exists(loc):netrc_path = locbreak# Abort early if there isn't one.if netrc_path is None:returnri = urlparse(url)# Strip port numbers from netloc. This weird `if...encode`` dance is# used for Python 3.2, which doesn't support unicode literals.splitstr = b':'if isinstance(url, str):splitstr = splitstr.decode('ascii')host = ri.netloc.split(splitstr)[0]try:_netrc = netrc(netrc_path).authenticators(host)if _netrc:# Return with login / passwordlogin_i = (0 if _netrc[0] else 1)return (_netrc[login_i], _netrc[2])except (NetrcParseError, IOError):# If there was a parsing error or a permissions issue reading the file,# we'll just skip netrc auth unless explicitly asked to raise errors.if raise_errors:raise# AppEngine hackiness.except (ImportError, AttributeError):passdef guess_filename(obj):"""Tries to guess the filename of the given object."""name = getattr(obj, 'name', None)if (name and isinstance(name, basestring) and name[0] != '<' andname[-1] != '>'):return os.path.basename(name)def from_key_val_list(value):"""Take an object and test to see if it can be represented as adictionary. Unless it can not be represented as such, return anOrderedDict, e.g.,::>>> from_key_val_list([('key', 'val')])OrderedDict([('key', 'val')])>>> from_key_val_list('string')ValueError: need more than 1 value to unpack>>> from_key_val_list({'key': 'val'})OrderedDict([('key', 'val')]):rtype: OrderedDict"""if value is None:return Noneif isinstance(value, (str, bytes, bool, int)):raise ValueError('cannot encode objects that are not 2-tuples')return OrderedDict(value)def to_key_val_list(value):"""Take an object and test to see if it can be represented as adictionary. If it can be, return a list of tuples, e.g.,::>>> to_key_val_list([('key', 'val')])[('key', 'val')]>>> to_key_val_list({'key': 'val'})[('key', 'val')]>>> to_key_val_list('string')ValueError: cannot encode objects that are not 2-tuples.:rtype: list"""if value is None:return Noneif isinstance(value, (str, bytes, bool, int)):raise ValueError('cannot encode objects that are not 2-tuples')if isinstance(value, collections.Mapping):value = value.items()return list(value)# From mitsuhiko/werkzeug (used with permission).def parse_list_header(value):"""Parse lists as described by RFC 2068 Section 2.In particular, parse comma-separated lists where the elements ofthe list may include quoted-strings. A quoted-string couldcontain a comma. A non-quoted string could have quotes in themiddle. Quotes are removed automatically after parsing.It basically works like :func:`parse_set_header` just that itemsmay appear multiple times and case sensitivity is preserved.The return value is a standard :class:`list`:>>> parse_list_header('token, "quoted value"')['token', 'quoted value']To create a header from the :class:`list` again, use the:func:`dump_header` function.:param value: a string with a list header.:return: :class:`list`:rtype: list"""result = []for item in _parse_list_header(value):if item[:1] == item[-1:] == '"':item = unquote_header_value(item[1:-1])result.append(item)return result# From mitsuhiko/werkzeug (used with permission).def parse_dict_header(value):"""Parse lists of key, value pairs as described by RFC 2068 Section 2 andconvert them into a python dict:>>> d = parse_dict_header('foo="is a fish", bar="as well"')>>> type(d) is dictTrue>>> sorted(d.items())[('bar', 'as well'), ('foo', 'is a fish')]If there is no value for a key it will be `None`:>>> parse_dict_header('key_without_value'){'key_without_value': None}To create a header from the :class:`dict` again, use the:func:`dump_header` function.:param value: a string with a dict header.:return: :class:`dict`:rtype: dict"""result = {}for item in _parse_list_header(value):if '=' not in item:result[item] = Nonecontinuename, value = item.split('=', 1)if value[:1] == value[-1:] == '"':value = unquote_header_value(value[1:-1])result[name] = valuereturn result# From mitsuhiko/werkzeug (used with permission).def unquote_header_value(value, is_filename=False):r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).This does not use the real unquoting but what browsers are actuallyusing for quoting.:param value: the header value to unquote.:rtype: str"""if value and value[0] == value[-1] == '"':# this is not the real unquoting, but fixing this so that the# RFC is met will result in bugs with internet explorer and# probably some other browsers as well. IE for example is# uploading files with "C:\foo\bar.txt" as filenamevalue = value[1:-1]# if this is a filename and the starting characters look like# a UNC path, then just return the value without quotes. Using the# replace sequence below on a UNC path has the effect of turning# the leading double slash into a single slash and then# _fix_ie_filename() doesn't work correctly. See #458.if not is_filename or value[:2] != '\\\\':return value.replace('\\\\', '\\').replace('\\"', '"')return valuedef dict_from_cookiejar(cj):"""Returns a key/value dictionary from a CookieJar.:param cj: CookieJar object to extract cookies from.:rtype: dict"""cookie_dict = {}for cookie in cj:cookie_dict[cookie.name] = cookie.valuereturn cookie_dictdef add_dict_to_cookiejar(cj, cookie_dict):"""Returns a CookieJar from a key/value dictionary.:param cj: CookieJar to insert cookies into.:param cookie_dict: Dict of key/values to insert into CookieJar.:rtype: CookieJar"""cj2 = cookiejar_from_dict(cookie_dict)cj.update(cj2)return cjdef get_encodings_from_content(content):"""Returns encodings from given content string.:param content: bytestring to extract encodings from."""warnings.warn(('In requests 3.0, get_encodings_from_content will be removed. For ''more information, please see the discussion on issue #2266. (This'' warning should only appear once.)'),DeprecationWarning)charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')return (charset_re.findall(content) +pragma_re.findall(content) +xml_re.findall(content))def get_encoding_from_headers(headers):"""Returns encodings from given HTTP Header Dict.:param headers: dictionary to extract encoding from.:rtype: str"""content_type = headers.get('content-type')if not content_type:return Nonecontent_type, params = cgi.parse_header(content_type)if 'charset' in params:return params['charset'].strip("'\"")if 'text' in content_type:return 'ISO-8859-1'def stream_decode_response_unicode(iterator, r):"""Stream decodes a iterator."""if r.encoding is None:for item in iterator:yield itemreturndecoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')for chunk in iterator:rv = decoder.decode(chunk)if rv:yield rvrv = decoder.decode(b'', final=True)if rv:yield rvdef iter_slices(string, slice_length):"""Iterate over slices of a string."""pos = 0if slice_length is None or slice_length <= 0:slice_length = len(string)while pos < len(string):yield string[pos:pos + slice_length]pos += slice_lengthdef get_unicode_from_response(r):"""Returns the requested content back in unicode.:param r: Response object to get unicode content from.Tried:1. charset from content-type2. fall back and replace all unicode characters:rtype: str"""warnings.warn(('In requests 3.0, get_unicode_from_response will be removed. For ''more information, please see the discussion on issue #2266. (This'' warning should only appear once.)'),DeprecationWarning)tried_encodings = []# Try charset from content-typeencoding = get_encoding_from_headers(r.headers)if encoding:try:return str(r.content, encoding)except UnicodeError:tried_encodings.append(encoding)# Fall back:try:return str(r.content, encoding, errors='replace')except TypeError:return r.content# The unreserved URI characters (RFC 3986)UNRESERVED_SET = frozenset("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"+ "0123456789-._~")def unquote_unreserved(uri):"""Un-escape any percent-escape sequences in a URI that are unreservedcharacters. This leaves all reserved, illegal and non-ASCII bytes encoded.:rtype: str"""parts = uri.split('%')for i in range(1, len(parts)):h = parts[i][0:2]if len(h) == 2 and h.isalnum():try:c = chr(int(h, 16))except ValueError:raise InvalidURL("Invalid percent-escape sequence: '%s'" % h)if c in UNRESERVED_SET:parts[i] = c + parts[i][2:]else:parts[i] = '%' + parts[i]else:parts[i] = '%' + parts[i]return ''.join(parts)def requote_uri(uri):"""Re-quote the given URI.This function passes the given URI through an unquote/quote cycle toensure that it is fully and consistently quoted.:rtype: str"""safe_with_percent = "!#$%&'()*+,/:;=?@[]~"safe_without_percent = "!#$&'()*+,/:;=?@[]~"try:# Unquote only the unreserved characters# Then quote only illegal characters (do not quote reserved,# unreserved, or '%')return quote(unquote_unreserved(uri), safe=safe_with_percent)except InvalidURL:# We couldn't unquote the given URI, so let's try quoting it, but# there may be unquoted '%'s in the URI. We need to make sure they're# properly quoted so they do not cause issues elsewhere.return quote(uri, safe=safe_without_percent)def address_in_network(ip, net):"""This function allows you to check if on IP belongs to a network subnetExample: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24returns False if ip = 192.168.1.1 and net = 192.168.100.0/24:rtype: bool"""ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]netaddr, bits = net.split('/')netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmaskreturn (ipaddr & netmask) == (network & netmask)def dotted_netmask(mask):"""Converts mask from /xx format to xxx.xxx.xxx.xxxExample: if mask is 24 function returns 255.255.255.0:rtype: str"""bits = 0xffffffff ^ (1 << 32 - mask) - 1return socket.inet_ntoa(struct.pack('>I', bits))def is_ipv4_address(string_ip):""":rtype: bool"""try:socket.inet_aton(string_ip)except socket.error:return Falsereturn Truedef is_valid_cidr(string_network):"""Very simple check of the cidr format in no_proxy variable.:rtype: bool"""if string_network.count('/') == 1:try:mask = int(string_network.split('/')[1])except ValueError:return Falseif mask < 1 or mask > 32:return Falsetry:socket.inet_aton(string_network.split('/')[0])except socket.error:return Falseelse:return Falsereturn Truedef should_bypass_proxies(url):"""Returns whether we should bypass proxies or not.:rtype: bool"""get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())# First check whether no_proxy is defined. If it is, check that the URL# we're getting isn't in the no_proxy list.no_proxy = get_proxy('no_proxy')netloc = urlparse(url).netlocif no_proxy:# We need to check whether we match here. We need to see if we match# the end of the netloc, both with and without the port.no_proxy = (host for host in no_proxy.replace(' ', '').split(',') if host)ip = netloc.split(':')[0]if is_ipv4_address(ip):for proxy_ip in no_proxy:if is_valid_cidr(proxy_ip):if address_in_network(ip, proxy_ip):return Trueelif ip == proxy_ip:# If no_proxy ip was defined in plain IP notation instead of cidr notation &# matches the IP of the indexreturn Trueelse:for host in no_proxy:if netloc.endswith(host) or netloc.split(':')[0].endswith(host):# The URL does match something in no_proxy, so we don't want# to apply the proxies on this URL.return True# If the system proxy settings indicate that this URL should be bypassed,# don't proxy.# The proxy_bypass function is incredibly buggy on macOS in early versions# of Python 2.6, so allow this call to fail. Only catch the specific# exceptions we've seen, though: this call failing in other ways can reveal# legitimate problems.try:bypass = proxy_bypass(netloc)except (TypeError, socket.gaierror):bypass = Falseif bypass:return Truereturn Falsedef get_environ_proxies(url):"""Return a dict of environment proxies.:rtype: dict"""if should_bypass_proxies(url):return {}else:return getproxies()def select_proxy(url, proxies):"""Select a proxy for the url, if applicable.:param url: The url being for the request:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs"""proxies = proxies or {}urlparts = urlparse(url)if urlparts.hostname is None:return proxies.get('all', proxies.get(urlparts.scheme))proxy_keys = ['all://' + urlparts.hostname,'all',urlparts.scheme + '://' + urlparts.hostname,urlparts.scheme,]proxy = Nonefor proxy_key in proxy_keys:if proxy_key in proxies:proxy = proxies[proxy_key]breakreturn proxydef default_user_agent(name="python-requests"):"""Return a string representing the default user agent.:rtype: str"""return '%s/%s' % (name, __version__)def default_headers():""":rtype: requests.structures.CaseInsensitiveDict"""return CaseInsensitiveDict({'User-Agent': default_user_agent(),'Accept-Encoding': ', '.join(('gzip', 'deflate')),'Accept': '*/*','Connection': 'keep-alive',})def parse_header_links(value):"""Return a dict of parsed link headers proxies.i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg":rtype: list"""links = []replace_chars = ' \'"'for val in re.split(', *<', value):try:url, params = val.split(';', 1)except ValueError:url, params = val, ''link = {'url': url.strip('<> \'"')}for param in params.split(';'):try:key, value = param.split('=')except ValueError:breaklink[key.strip(replace_chars)] = value.strip(replace_chars)links.append(link)return links# Null bytes; no need to recreate these on each call to guess_json_utf_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3_null2 = _null * 2_null3 = _null * 3def guess_json_utf(data):""":rtype: str"""# JSON always starts with two ASCII characters, so detection is as# easy as counting the nulls and from their location and count# determine the encoding. Also detect a BOM, if present.sample = data[:4]if sample in (codecs.BOM_UTF32_LE, codecs.BOM32_BE):return 'utf-32' # BOM includedif sample[:3] == codecs.BOM_UTF8:return 'utf-8-sig' # BOM included, MS style (discouraged)if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):return 'utf-16' # BOM includednullcount = sample.count(_null)if nullcount == 0:return 'utf-8'if nullcount == 2:if sample[::2] == _null2: # 1st and 3rd are nullreturn 'utf-16-be'if sample[1::2] == _null2: # 2nd and 4th are nullreturn 'utf-16-le'# Did not detect 2 valid UTF-16 ascii-range charactersif nullcount == 3:if sample[:3] == _null3:return 'utf-32-be'if sample[1:] == _null3:return 'utf-32-le'# Did not detect a valid UTF-32 ascii-range characterreturn Nonedef prepend_scheme_if_needed(url, new_scheme):"""Given a URL that may or may not have a scheme, prepend the given scheme.Does not replace a present scheme with the one provided as an argument.:rtype: str"""scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)# urlparse is a finicky beast, and sometimes decides that there isn't a# netloc present. Assume that it's being over-cautious, and switch netloc# and path if urlparse decided there was no netloc.if not netloc:netloc, path = path, netlocreturn urlunparse((scheme, netloc, path, params, query, fragment))def get_auth_from_url(url):"""Given a url with authentication components, extract them into a tuple ofusername,password.:rtype: (str,str)"""parsed = urlparse(url)try:auth = (unquote(parsed.username), unquote(parsed.password))except (AttributeError, TypeError):auth = ('', '')return authdef to_native_string(string, encoding='ascii'):"""Given a string object, regardless of type, returns a representation ofthat string in the native string type, encoding and decoding wherenecessary. This assumes ASCII unless told otherwise."""if isinstance(string, builtin_str):out = stringelse:if is_py2:out = string.encode(encoding)else:out = string.decode(encoding)return out# Moved outside of function to avoid recompile every call_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$')_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$')def check_header_validity(header):"""Verifies that header value is a string which doesn't containleading whitespace or return characters. This prevents unintendedheader injection.:param header: tuple, in the format (name, value)."""name, value = headerif isinstance(value, bytes):pat = _CLEAN_HEADER_REGEX_BYTEelse:pat = _CLEAN_HEADER_REGEX_STRtry:if not pat.match(value):raise InvalidHeader("Invalid return character or leading space in header: %s" % name)except TypeError:raise InvalidHeader("Header value %s must be of type str or bytes, ""not %s" % (value, type(value)))def urldefragauth(url):"""Given a url remove the fragment and the authentication part.:rtype: str"""scheme, netloc, path, params, query, fragment = urlparse(url)# see func:`prepend_scheme_if_needed`if not netloc:netloc, path = path, netlocnetloc = netloc.rsplit('@', 1)[-1]return urlunparse((scheme, netloc, path, params, query, ''))
# -*- coding: utf-8 -*-"""requests.structures~~~~~~~~~~~~~~~~~~~Data structures that power Requests."""import collectionsfrom .compat import OrderedDictclass CaseInsensitiveDict(collections.MutableMapping):"""A case-insensitive ``dict``-like object.Implements all methods and operations of``collections.MutableMapping`` as well as dict's ``copy``. Alsoprovides ``lower_items``.All keys are expected to be strings. The structure remembers thecase of the last key to be set, and ``iter(instance)``,``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``will contain case-sensitive keys. However, querying and containstesting is case insensitive::cid = CaseInsensitiveDict()cid['Accept'] = 'application/json'cid['aCCEPT'] == 'application/json' # Truelist(cid) == ['Accept'] # TrueFor example, ``headers['content-encoding']`` will return thevalue of a ``'Content-Encoding'`` response header, regardlessof how the header name was originally stored.If the constructor, ``.update``, or equality comparisonoperations are given keys that have equal ``.lower()``s, thebehavior is undefined."""def __init__(self, data=None, **kwargs):self._store = OrderedDict()if data is None:data = {}self.update(data, **kwargs)def __setitem__(self, key, value):# Use the lowercased key for lookups, but store the actual# key alongside the value.self._store[key.lower()] = (key, value)def __getitem__(self, key):return self._store[key.lower()][1]def __delitem__(self, key):del self._store[key.lower()]def __iter__(self):return (casedkey for casedkey, mappedvalue in self._store.values())def __len__(self):return len(self._store)def lower_items(self):"""Like iteritems(), but with all lowercase keys."""return ((lowerkey, keyval[1])for (lowerkey, keyval)in self._store.items())def __eq__(self, other):if isinstance(other, collections.Mapping):other = CaseInsensitiveDict(other)else:return NotImplemented# Compare insensitivelyreturn dict(self.lower_items()) == dict(other.lower_items())# Copy is requireddef copy(self):return CaseInsensitiveDict(self._store.values())def __repr__(self):return str(dict(self.items()))class LookupDict(dict):"""Dictionary lookup object."""def __init__(self, name=None):self.name = namesuper(LookupDict, self).__init__()def __repr__(self):return '<lookup \'%s\'>' % (self.name)def __getitem__(self, key):# We allow fall-through here, so values default to Nonereturn self.__dict__.get(key, None)def get(self, key, default=None):return self.__dict__.get(key, default)
# -*- coding: utf-8 -*-from .structures import LookupDict_codes = {# Informational.100: ('continue',),101: ('switching_protocols',),102: ('processing',),103: ('checkpoint',),122: ('uri_too_long', 'request_uri_too_long'),200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),201: ('created',),202: ('accepted',),203: ('non_authoritative_info', 'non_authoritative_information'),204: ('no_content',),205: ('reset_content', 'reset'),206: ('partial_content', 'partial'),207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),208: ('already_reported',),226: ('im_used',),# Redirection.300: ('multiple_choices',),301: ('moved_permanently', 'moved', '\\o-'),302: ('found',),303: ('see_other', 'other'),304: ('not_modified',),305: ('use_proxy',),306: ('switch_proxy',),307: ('temporary_redirect', 'temporary_moved', 'temporary'),308: ('permanent_redirect','resume_incomplete', 'resume',), # These 2 to be removed in 3.0# Client Error.400: ('bad_request', 'bad'),401: ('unauthorized',),402: ('payment_required', 'payment'),403: ('forbidden',),404: ('not_found', '-o-'),405: ('method_not_allowed', 'not_allowed'),406: ('not_acceptable',),407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),408: ('request_timeout', 'timeout'),409: ('conflict',),410: ('gone',),411: ('length_required',),412: ('precondition_failed', 'precondition'),413: ('request_entity_too_large',),414: ('request_uri_too_large',),415: ('unsupported_media_type', 'unsupported_media', 'media_type'),416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),417: ('expectation_failed',),418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),421: ('misdirected_request',),422: ('unprocessable_entity', 'unprocessable'),423: ('locked',),424: ('failed_dependency', 'dependency'),425: ('unordered_collection', 'unordered'),426: ('upgrade_required', 'upgrade'),428: ('precondition_required', 'precondition'),429: ('too_many_requests', 'too_many'),431: ('header_fields_too_large', 'fields_too_large'),444: ('no_response', 'none'),449: ('retry_with', 'retry'),450: ('blocked_by_windows_parental_controls', 'parental_controls'),451: ('unavailable_for_legal_reasons', 'legal_reasons'),499: ('client_closed_request',),# Server Error.500: ('internal_server_error', 'server_error', '/o\\', '✗'),501: ('not_implemented',),502: ('bad_gateway',),503: ('service_unavailable', 'unavailable'),504: ('gateway_timeout',),505: ('http_version_not_supported', 'http_version'),506: ('variant_also_negotiates',),507: ('insufficient_storage',),509: ('bandwidth_limit_exceeded', 'bandwidth'),510: ('not_extended',),511: ('network_authentication_required', 'network_auth', 'network_authentication'),}codes = LookupDict(name='status_codes')for code, titles in _codes.items():for title in titles:setattr(codes, title, code)if not title.startswith('\\'):setattr(codes, title.upper(), code)
# -*- coding: utf-8 -*-"""requests.session~~~~~~~~~~~~~~~~This module provides a Session object to manage and persist settings acrossrequests (cookies, auth, proxies)."""import osfrom collections import Mappingfrom datetime import datetimefrom .auth import _basic_auth_strfrom .compat import cookielib, OrderedDict, urljoin, urlparsefrom .cookies import (cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMITfrom .hooks import default_hooks, dispatch_hookfrom .utils import to_key_val_list, default_headers, to_native_stringfrom .exceptions import (TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)from .packages.urllib3._collections import RecentlyUsedContainerfrom .structures import CaseInsensitiveDictfrom .adapters import HTTPAdapterfrom .utils import (requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,get_auth_from_url)from .status_codes import codes# formerly defined here, reexposed here for backward compatibilityfrom .models import REDIRECT_STATIREDIRECT_CACHE_SIZE = 1000def merge_setting(request_setting, session_setting, dict_class=OrderedDict):"""Determines appropriate setting for a given request, taking into accountthe explicit setting on that request, and the setting in the session. If asetting is a dictionary, they will be merged together using `dict_class`"""if session_setting is None:return request_settingif request_setting is None:return session_setting# Bypass if not a dictionary (e.g. verify)if not (isinstance(session_setting, Mapping) andisinstance(request_setting, Mapping)):return request_settingmerged_setting = dict_class(to_key_val_list(session_setting))merged_setting.update(to_key_val_list(request_setting))# Remove keys that are set to None. Extract keys first to avoid altering# the dictionary during iteration.none_keys = [k for (k, v) in merged_setting.items() if v is None]for key in none_keys:del merged_setting[key]return merged_settingdef merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):"""Properly merges both requests and session hooks.This is necessary because when request_hooks == {'response': []}, themerge breaks Session hooks entirely."""if session_hooks is None or session_hooks.get('response') == []:return request_hooksif request_hooks is None or request_hooks.get('response') == []:return session_hooksreturn merge_setting(request_hooks, session_hooks, dict_class)class SessionRedirectMixin(object):def resolve_redirects(self, resp, req, stream=False, timeout=None,verify=True, cert=None, proxies=None, **adapter_kwargs):"""Receives a Response. Returns a generator of Responses."""i = 0hist = [] # keep track of historywhile resp.is_redirect:prepared_request = req.copy()if i > 0:# Update history and keep track of redirects.hist.append(resp)new_hist = list(hist)resp.history = new_histtry:resp.content # Consume socket so it can be releasedexcept (ChunkedEncodingError, ContentDecodingError, RuntimeError):resp.raw.read(decode_content=False)if i >= self.max_redirects:raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp)# Release the connection back into the pool.resp.close()url = resp.headers['location']# Handle redirection without scheme (see: RFC 1808 Section 4)if url.startswith('//'):parsed_rurl = urlparse(resp.url)url = '%s:%s' % (parsed_rurl.scheme, url)# The scheme should be lower case...parsed = urlparse(url)url = parsed.geturl()# Facilitate relative 'location' headers, as allowed by RFC 7231.# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')# Compliant with RFC3986, we percent encode the url.if not parsed.netloc:url = urljoin(resp.url, requote_uri(url))else:url = requote_uri(url)prepared_request.url = to_native_string(url)# Cache the url, unless it redirects to itself.if resp.is_permanent_redirect and req.url != prepared_request.url:self.redirect_cache[req.url] = prepared_request.urlself.rebuild_method(prepared_request, resp)# https://github.com/kennethreitz/requests/issues/1084if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):# https://github.com/kennethreitz/requests/issues/3490purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding')for header in purged_headers:prepared_request.headers.pop(header, None)prepared_request.body = Noneheaders = prepared_request.headerstry:del headers['Cookie']except KeyError:pass# Extract any cookies sent on the response to the cookiejar# in the new request. Because we've mutated our copied prepared# request, use the old one that we haven't yet touched.extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)prepared_request._cookies.update(self.cookies)prepared_request.prepare_cookies(prepared_request._cookies)# Rebuild auth and proxy information.proxies = self.rebuild_proxies(prepared_request, proxies)self.rebuild_auth(prepared_request, resp)# Override the original request.req = prepared_requestresp = self.send(req,stream=stream,timeout=timeout,verify=verify,cert=cert,proxies=proxies,allow_redirects=False,**adapter_kwargs)extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)i += 1yield respdef rebuild_auth(self, prepared_request, response):"""When being redirected we may want to strip authentication from therequest to avoid leaking credentials. This method intelligently removesand reapplies authentication where possible to avoid credential loss."""headers = prepared_request.headersurl = prepared_request.urlif 'Authorization' in headers:# If we get redirected to a new host, we should strip out any# authentication headers.original_parsed = urlparse(response.request.url)redirect_parsed = urlparse(url)if (original_parsed.hostname != redirect_parsed.hostname):del headers['Authorization']# .netrc might have more auth for us on our new host.new_auth = get_netrc_auth(url) if self.trust_env else Noneif new_auth is not None:prepared_request.prepare_auth(new_auth)returndef rebuild_proxies(self, prepared_request, proxies):"""This method re-evaluates the proxy configuration by considering theenvironment variables. If we are redirected to a URL covered byNO_PROXY, we strip the proxy configuration. Otherwise, we set missingproxy keys for this URL (in case they were stripped by a previousredirect).This method also replaces the Proxy-Authorization header wherenecessary.:rtype: dict"""headers = prepared_request.headersurl = prepared_request.urlscheme = urlparse(url).schemenew_proxies = proxies.copy() if proxies is not None else {}if self.trust_env and not should_bypass_proxies(url):environ_proxies = get_environ_proxies(url)proxy = environ_proxies.get('all', environ_proxies.get(scheme))if proxy:new_proxies.setdefault(scheme, proxy)if 'Proxy-Authorization' in headers:del headers['Proxy-Authorization']try:username, password = get_auth_from_url(new_proxies[scheme])except KeyError:username, password = None, Noneif username and password:headers['Proxy-Authorization'] = _basic_auth_str(username, password)return new_proxiesdef rebuild_method(self, prepared_request, response):"""When being redirected we may want to change the method of the requestbased on certain specs or browser behavior."""method = prepared_request.method# http://tools.ietf.org/html/rfc7231#section-6.4.4if response.status_code == codes.see_other and method != 'HEAD':method = 'GET'# Do what the browsers do, despite standards...# First, turn 302s into GETs.if response.status_code == codes.found and method != 'HEAD':method = 'GET'# Second, if a POST is responded to with a 301, turn it into a GET.# This bizarre behaviour is explained in Issue 1704.if response.status_code == codes.moved and method == 'POST':method = 'GET'prepared_request.method = methodclass Session(SessionRedirectMixin):"""A Requests session.Provides cookie persistence, connection-pooling, and configuration.Basic Usage::>>> import requests>>> s = requests.Session()>>> s.get('http://httpbin.org/get')<Response [200]>Or as a context manager::>>> with requests.Session() as s:>>> s.get('http://httpbin.org/get')<Response [200]>"""__attrs__ = ['headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify','cert', 'prefetch', 'adapters', 'stream', 'trust_env','max_redirects',]def __init__(self):#: A case-insensitive dictionary of headers to be sent on each#: :class:`Request <Request>` sent from this#: :class:`Session <Session>`.self.headers = default_headers()#: Default Authentication tuple or object to attach to#: :class:`Request <Request>`.self.auth = None#: Dictionary mapping protocol or protocol and host to the URL of the proxy#: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to#: be used on each :class:`Request <Request>`.self.proxies = {}#: Event-handling hooks.self.hooks = default_hooks()#: Dictionary of querystring data to attach to each#: :class:`Request <Request>`. The dictionary values may be lists for#: representing multivalued query parameters.self.params = {}#: Stream response content default.self.stream = False#: SSL Verification default.self.verify = True#: SSL certificate default.self.cert = None#: Maximum number of redirects allowed. If the request exceeds this#: limit, a :class:`TooManyRedirects` exception is raised.#: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is#: 30.self.max_redirects = DEFAULT_REDIRECT_LIMIT#: Trust environment settings for proxy configuration, default#: authentication and similar.self.trust_env = True#: A CookieJar containing all currently outstanding cookies set on this#: session. By default it is a#: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but#: may be any other ``cookielib.CookieJar`` compatible object.self.cookies = cookiejar_from_dict({})# Default connection adapters.self.adapters = OrderedDict()self.mount('https://', HTTPAdapter())self.mount('http://', HTTPAdapter())# Only store 1000 redirects to prevent using infinite memoryself.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)def __enter__(self):return selfdef __exit__(self, *args):self.close()def prepare_request(self, request):"""Constructs a :class:`PreparedRequest <PreparedRequest>` fortransmission and returns it. The :class:`PreparedRequest` has settingsmerged from the :class:`Request <Request>` instance and those of the:class:`Session`.:param request: :class:`Request` instance to prepare with thissession's settings.:rtype: requests.PreparedRequest"""cookies = request.cookies or {}# Bootstrap CookieJar.if not isinstance(cookies, cookielib.CookieJar):cookies = cookiejar_from_dict(cookies)# Merge with session cookiesmerged_cookies = merge_cookies(merge_cookies(RequestsCookieJar(), self.cookies), cookies)# Set environment's basic authentication if not explicitly set.auth = request.authif self.trust_env and not auth and not self.auth:auth = get_netrc_auth(request.url)p = PreparedRequest()p.prepare(method=request.method.upper(),url=request.url,files=request.files,data=request.data,json=request.json,headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),params=merge_setting(request.params, self.params),auth=merge_setting(auth, self.auth),cookies=merged_cookies,hooks=merge_hooks(request.hooks, self.hooks),)return pdef request(self, method, url,params=None,data=None,headers=None,cookies=None,files=None,auth=None,timeout=None,allow_redirects=True,proxies=None,hooks=None,stream=None,verify=None,cert=None,json=None):"""Constructs a :class:`Request <Request>`, prepares it and sends it.Returns :class:`Response <Response>` object.:param method: method for the new :class:`Request` object.:param url: URL for the new :class:`Request` object.:param params: (optional) Dictionary or bytes to be sent in the querystring for the :class:`Request`.:param data: (optional) Dictionary, bytes, or file-like object to sendin the body of the :class:`Request`.:param json: (optional) json to send in the body of the:class:`Request`.:param headers: (optional) Dictionary of HTTP Headers to send with the:class:`Request`.:param cookies: (optional) Dict or CookieJar object to send with the:class:`Request`.:param files: (optional) Dictionary of ``'filename': file-like-objects``for multipart encoding upload.:param auth: (optional) Auth tuple or callable to enableBasic/Digest/Custom HTTP Auth.:param timeout: (optional) How long to wait for the server to senddata before giving up, as a float, or a :ref:`(connect timeout,read timeout) <timeouts>` tuple.:type timeout: float or tuple:param allow_redirects: (optional) Set to True by default.:type allow_redirects: bool:param proxies: (optional) Dictionary mapping protocol or protocol andhostname to the URL of the proxy.:param stream: (optional) whether to immediately download the responsecontent. Defaults to ``False``.:param verify: (optional) whether the SSL cert will be verified.A CA_BUNDLE path can also be provided. Defaults to ``True``.:param cert: (optional) if String, path to ssl client cert file (.pem).If Tuple, ('cert', 'key') pair.:rtype: requests.Response"""# Create the Request.req = Request(method = method.upper(),url = url,headers = headers,files = files,data = data or {},json = json,params = params or {},auth = auth,cookies = cookies,hooks = hooks,)prep = self.prepare_request(req)proxies = proxies or {}settings = self.merge_environment_settings(prep.url, proxies, stream, verify, cert)# Send the request.send_kwargs = {'timeout': timeout,'allow_redirects': allow_redirects,}send_kwargs.update(settings)resp = self.send(prep, **send_kwargs)return respdef get(self, url, **kwargs):"""Sends a GET request. Returns :class:`Response` object.:param url: URL for the new :class:`Request` object.:param \*\*kwargs: Optional arguments that ``request`` takes.:rtype: requests.Response"""kwargs.setdefault('allow_redirects', True)return self.request('GET', url, **kwargs)def options(self, url, **kwargs):"""Sends a OPTIONS request. Returns :class:`Response` object.:param url: URL for the new :class:`Request` object.:param \*\*kwargs: Optional arguments that ``request`` takes.:rtype: requests.Response"""kwargs.setdefault('allow_redirects', True)return self.request('OPTIONS', url, **kwargs)def head(self, url, **kwargs):"""Sends a HEAD request. Returns :class:`Response` object.:param url: URL for the new :class:`Request` object.:param \*\*kwargs: Optional arguments that ``request`` takes.:rtype: requests.Response"""kwargs.setdefault('allow_redirects', False)return self.request('HEAD', url, **kwargs)def post(self, url, data=None, json=None, **kwargs):"""Sends a POST request. Returns :class:`Response` object.:param url: URL for the new :class:`Request` object.:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.:param json: (optional) json to send in the body of the :class:`Request`.:param \*\*kwargs: Optional arguments that ``request`` takes.:rtype: requests.Response"""return self.request('POST', url, data=data, json=json, **kwargs)def put(self, url, data=None, **kwargs):"""Sends a PUT request. Returns :class:`Response` object.:param url: URL for the new :class:`Request` object.:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.:param \*\*kwargs: Optional arguments that ``request`` takes.:rtype: requests.Response"""return self.request('PUT', url, data=data, **kwargs)def patch(self, url, data=None, **kwargs):"""Sends a PATCH request. Returns :class:`Response` object.:param url: URL for the new :class:`Request` object.:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.:param \*\*kwargs: Optional arguments that ``request`` takes.:rtype: requests.Response"""return self.request('PATCH', url, data=data, **kwargs)def delete(self, url, **kwargs):"""Sends a DELETE request. Returns :class:`Response` object.:param url: URL for the new :class:`Request` object.:param \*\*kwargs: Optional arguments that ``request`` takes.:rtype: requests.Response"""return self.request('DELETE', url, **kwargs)def send(self, request, **kwargs):"""Send a given PreparedRequest.:rtype: requests.Response"""# Set defaults that the hooks can utilize to ensure they always have# the correct parameters to reproduce the previous request.kwargs.setdefault('stream', self.stream)kwargs.setdefault('verify', self.verify)kwargs.setdefault('cert', self.cert)kwargs.setdefault('proxies', self.proxies)# It's possible that users might accidentally send a Request object.# Guard against that specific failure case.if isinstance(request, Request):raise ValueError('You can only send PreparedRequests.')# Set up variables needed for resolve_redirects and dispatching of hooksallow_redirects = kwargs.pop('allow_redirects', True)stream = kwargs.get('stream')hooks = request.hooks# Resolve URL in redirect cache, if available.if allow_redirects:checked_urls = set()while request.url in self.redirect_cache:checked_urls.add(request.url)new_url = self.redirect_cache.get(request.url)if new_url in checked_urls:breakrequest.url = new_url# Get the appropriate adapter to useadapter = self.get_adapter(url=request.url)# Start time (approximately) of the requeststart = datetime.utcnow()# Send the requestr = adapter.send(request, **kwargs)# Total elapsed time of the request (approximately)r.elapsed = datetime.utcnow() - start# Response manipulation hooksr = dispatch_hook('response', hooks, r, **kwargs)# Persist cookiesif r.history:# If the hooks create history then we want those cookies toofor resp in r.history:extract_cookies_to_jar(self.cookies, resp.request, resp.raw)extract_cookies_to_jar(self.cookies, request, r.raw)# Redirect resolving generator.gen = self.resolve_redirects(r, request, **kwargs)# Resolve redirects if allowed.history = [resp for resp in gen] if allow_redirects else []# Shuffle things around if there's history.if history:# Insert the first (original) request at the starthistory.insert(0, r)# Get the last request mader = history.pop()r.history = historyif not stream:r.contentreturn rdef merge_environment_settings(self, url, proxies, stream, verify, cert):"""Check the environment and merge it with some settings.:rtype: dict"""# Gather clues from the surrounding environment.if self.trust_env:# Set environment's proxies.env_proxies = get_environ_proxies(url) or {}for (k, v) in env_proxies.items():proxies.setdefault(k, v)# Look for requests environment configuration and be compatible# with cURL.if verify is True or verify is None:verify = (os.environ.get('REQUESTS_CA_BUNDLE') oros.environ.get('CURL_CA_BUNDLE'))# Merge all the kwargs.proxies = merge_setting(proxies, self.proxies)stream = merge_setting(stream, self.stream)verify = merge_setting(verify, self.verify)cert = merge_setting(cert, self.cert)return {'verify': verify, 'proxies': proxies, 'stream': stream,'cert': cert}def get_adapter(self, url):"""Returns the appropriate connection adapter for the given URL.:rtype: requests.adapters.BaseAdapter"""for (prefix, adapter) in self.adapters.items():if url.lower().startswith(prefix):return adapter# Nothing matches :-/raise InvalidSchema("No connection adapters were found for '%s'" % url)def close(self):"""Closes all adapters and as such the session"""for v in self.adapters.values():v.close()def mount(self, prefix, adapter):"""Registers a connection adapter to a prefix.Adapters are sorted in descending order by key length."""self.adapters[prefix] = adapterkeys_to_move = [k for k in self.adapters if len(k) < len(prefix)]for key in keys_to_move:self.adapters[key] = self.adapters.pop(key)def __getstate__(self):state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)state['redirect_cache'] = dict(self.redirect_cache)return statedef __setstate__(self, state):redirect_cache = state.pop('redirect_cache', {})for attr, value in state.items():setattr(self, attr, value)self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)for redirect, to in redirect_cache.items():self.redirect_cache[redirect] = todef session():"""Returns a :class:`Session` for context-management.:rtype: Session"""return Session()
from __future__ import absolute_importfrom collections import namedtuplefrom ..exceptions import LocationParseErrorurl_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']class Url(namedtuple('Url', url_attrs)):"""Datastructure for representing an HTTP URL. Used as a return value for:func:`parse_url`."""slots = ()def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,query=None, fragment=None):if path and not path.startswith('/'):path = '/' + pathreturn super(Url, cls).__new__(cls, scheme, auth, host, port, path,query, fragment)@propertydef hostname(self):"""For backwards-compatibility with urlparse. We're nice like that."""return self.host@propertydef request_uri(self):"""Absolute path including the query string."""uri = self.path or '/'if self.query is not None:uri += '?' + self.queryreturn uri@propertydef netloc(self):"""Network location including host and port"""if self.port:return '%s:%d' % (self.host, self.port)return self.host@propertydef url(self):"""Convert self into a urlThis function should more or less round-trip with :func:`.parse_url`. Thereturned url may not be exactly the same as the url inputted to:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urlswith a blank port will have : removed).Example: ::>>> U = parse_url('http://google.com/mail/')>>> U.url'http://google.com/mail/'>>> Url('http', 'username:password', 'host.com', 80,... '/path', 'query', 'fragment').url'http://username:password@host.com:80/path?query#fragment'"""scheme, auth, host, port, path, query, fragment = selfurl = ''# We use "is not None" we want things to happen with empty strings (or 0 port)if scheme is not None:url += scheme + '://'if auth is not None:url += auth + '@'if host is not None:url += hostif port is not None:url += ':' + str(port)if path is not None:url += pathif query is not None:url += '?' + queryif fragment is not None:url += '#' + fragmentreturn urldef __str__(self):return self.urldef split_first(s, delims):"""Given a string and an iterable of delimiters, split on the first founddelimiter. Return two split parts and the matched delimiter.If not found, then the first part is the full input string.Example::>>> split_first('foo/bar?baz', '?/=')('foo', 'bar?baz', '/')>>> split_first('foo/bar?baz', '123')('foo/bar?baz', '', None)Scales linearly with number of delims. Not ideal for large number of delims."""min_idx = Nonemin_delim = Nonefor d in delims:idx = s.find(d)if idx < 0:continueif min_idx is None or idx < min_idx:min_idx = idxmin_delim = dif min_idx is None or min_idx < 0:return s, '', Nonereturn s[:min_idx], s[min_idx + 1:], min_delimdef parse_url(url):"""Given a url, return a parsed :class:`.Url` namedtuple. Best-effort isperformed to parse incomplete urls. Fields not provided will be None.Partly backwards-compatible with :mod:`urlparse`.Example::>>> parse_url('http://google.com/mail/')Url(scheme='http', host='google.com', port=None, path='/mail/', ...)>>> parse_url('google.com:80')Url(scheme=None, host='google.com', port=80, path=None, ...)>>> parse_url('/foo?bar')Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)"""# While this code has overlap with stdlib's urlparse, it is much# simplified for our needs and less annoying.# Additionally, this implementations does silly things to be optimal# on CPython.if not url:# Emptyreturn Url()scheme = Noneauth = Nonehost = Noneport = Nonepath = Nonefragment = Nonequery = None# Schemeif '://' in url:scheme, url = url.split('://', 1)# Find the earliest Authority Terminator# (http://tools.ietf.org/html/rfc3986#section-3.2)url, path_, delim = split_first(url, ['/', '?', '#'])if delim:# Reassemble the pathpath = delim + path_# Authif '@' in url:# Last '@' denotes end of auth partauth, url = url.rsplit('@', 1)# IPv6if url and url[0] == '[':host, url = url.split(']', 1)host += ']'# Portif ':' in url:_host, port = url.split(':', 1)if not host:host = _hostif port:# If given, ports must be integers.if not port.isdigit():raise LocationParseError(url)port = int(port)else:# Blank ports are cool, too. (rfc3986#section-3.2.3)port = Noneelif not host and url:host = urlif not path:return Url(scheme, auth, host, port, path, query, fragment)# Fragmentif '#' in path:path, fragment = path.split('#', 1)# Queryif '?' in path:path, query = path.split('?', 1)return Url(scheme, auth, host, port, path, query, fragment)def get_host(url):"""Deprecated. Use :func:`.parse_url` instead."""p = parse_url(url)return p.scheme or 'http', p.hostname, p.port
from __future__ import absolute_import# The default socket timeout, used by httplib to indicate that no timeout was# specified by the userfrom socket import _GLOBAL_DEFAULT_TIMEOUTimport timefrom ..exceptions import TimeoutStateError# A sentinel value to indicate that no timeout was specified by the user in# urllib3_Default = object()def current_time():"""Retrieve the current time. This function is mocked out in unit testing."""return time.time()class Timeout(object):""" Timeout configuration.Timeouts can be defined as a default for a pool::timeout = Timeout(connect=2.0, read=7.0)http = PoolManager(timeout=timeout)response = http.request('GET', 'http://example.com/')Or per-request (which overrides the default for the pool)::response = http.request('GET', 'http://example.com/', timeout=Timeout(10))Timeouts can be disabled by setting all the parameters to ``None``::no_timeout = Timeout(connect=None, read=None)response = http.request('GET', 'http://example.com/, timeout=no_timeout):param total:This combines the connect and read timeouts into one; the read timeoutwill be set to the time leftover from the connect attempt. In theevent that both a connect timeout and a total are specified, or a readtimeout and a total are specified, the shorter timeout will be applied.Defaults to None.:type total: integer, float, or None:param connect:The maximum amount of time to wait for a connection attempt to a serverto succeed. Omitting the parameter will default the connect timeout tothe system default, probably `the global default timeout in socket.py<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.None will set an infinite timeout for connection attempts.:type connect: integer, float, or None:param read:The maximum amount of time to wait between consecutiveread operations for a response from the server. Omittingthe parameter will default the read timeout to the systemdefault, probably `the global default timeout in socket.py<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.None will set an infinite timeout.:type read: integer, float, or None.. note::Many factors can affect the total amount of time for urllib3 to returnan HTTP response.For example, Python's DNS resolver does not obey the timeout specifiedon the socket. Other factors that can affect total request time includehigh CPU load, high swap, the program running at a low priority level,or other behaviors.In addition, the read and total timeouts only measure the time betweenread operations on the socket connecting the client and the server,not the total amount of time for the request to return a completeresponse. For most requests, the timeout is raised because the serverhas not sent the first byte in the specified time. This is not alwaysthe case; if a server streams one byte every fifteen seconds, a timeoutof 20 seconds will not trigger, even though the request will takeseveral minutes to complete.If your goal is to cut off any request after a set amount of wall clocktime, consider having a second "watcher" thread to cut off a slowrequest."""#: A sentinel object representing the default timeout valueDEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUTdef __init__(self, total=None, connect=_Default, read=_Default):self._connect = self._validate_timeout(connect, 'connect')self._read = self._validate_timeout(read, 'read')self.total = self._validate_timeout(total, 'total')self._start_connect = Nonedef __str__(self):return '%s(connect=%r, read=%r, total=%r)' % (type(self).__name__, self._connect, self._read, self.total)@classmethoddef _validate_timeout(cls, value, name):""" Check that a timeout attribute is valid.:param value: The timeout value to validate:param name: The name of the timeout attribute to validate. This isused to specify in error messages.:return: The validated and casted version of the given value.:raises ValueError: If the type is not an integer or a float, or if itis a numeric value less than zero."""if value is _Default:return cls.DEFAULT_TIMEOUTif value is None or value is cls.DEFAULT_TIMEOUT:return valuetry:float(value)except (TypeError, ValueError):raise ValueError("Timeout value %s was %s, but it must be an ""int or float." % (name, value))try:if value < 0:raise ValueError("Attempted to set %s timeout to %s, but the ""timeout cannot be set to a value less ""than 0." % (name, value))except TypeError: # Python 3raise ValueError("Timeout value %s was %s, but it must be an ""int or float." % (name, value))return value@classmethoddef from_float(cls, timeout):""" Create a new Timeout from a legacy timeout value.The timeout value used by httplib.py sets the same timeout on theconnect(), and recv() socket requests. This creates a :class:`Timeout`object that sets the individual timeouts to the ``timeout`` valuepassed to this function.:param timeout: The legacy timeout value.:type timeout: integer, float, sentinel default object, or None:return: Timeout object:rtype: :class:`Timeout`"""return Timeout(read=timeout, connect=timeout)def clone(self):""" Create a copy of the timeout objectTimeout properties are stored per-pool but each request needs a freshTimeout object to ensure each one has its own start/stop configured.:return: a copy of the timeout object:rtype: :class:`Timeout`"""# We can't use copy.deepcopy because that will also create a new object# for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to# detect the user default.return Timeout(connect=self._connect, read=self._read,total=self.total)def start_connect(self):""" Start the timeout clock, used during a connect() attempt:raises urllib3.exceptions.TimeoutStateError: if you attemptto start a timer that has been started already."""if self._start_connect is not None:raise TimeoutStateError("Timeout timer has already been started.")self._start_connect = current_time()return self._start_connectdef get_connect_duration(self):""" Gets the time elapsed since the call to :meth:`start_connect`.:return: Elapsed time.:rtype: float:raises urllib3.exceptions.TimeoutStateError: if you attemptto get duration for a timer that hasn't been started."""if self._start_connect is None:raise TimeoutStateError("Can't get connect duration for timer ""that has not started.")return current_time() - self._start_connect@propertydef connect_timeout(self):""" Get the value to use when setting a connection timeout.This will be a positive float or integer, the value None(never timeout), or the default system timeout.:return: Connect timeout.:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None"""if self.total is None:return self._connectif self._connect is None or self._connect is self.DEFAULT_TIMEOUT:return self.totalreturn min(self._connect, self.total)@propertydef read_timeout(self):""" Get the value for the read timeout.This assumes some time has elapsed in the connection timeout andcomputes the read timeout appropriately.If self.total is set, the read timeout is dependent on the amount oftime taken by the connect timeout. If the connection time has not beenestablished, a :exc:`~urllib3.exceptions.TimeoutStateError` will beraised.:return: Value to use for the read timeout.:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None:raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`has not yet been called on this object."""if (self.total is not None andself.total is not self.DEFAULT_TIMEOUT andself._read is not None andself._read is not self.DEFAULT_TIMEOUT):# In case the connect timeout has not yet been established.if self._start_connect is None:return self._readreturn max(0, min(self.total - self.get_connect_duration(),self._read))elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:return max(0, self.total - self.get_connect_duration())else:return self._read
from __future__ import absolute_importimport errnoimport warningsimport hmacfrom binascii import hexlify, unhexlifyfrom hashlib import md5, sha1, sha256from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarningSSLContext = NoneHAS_SNI = Falsecreate_default_context = NoneIS_PYOPENSSL = False# Maps the length of a digest to a possible hash function producing this digestHASHFUNC_MAP = {32: md5,40: sha1,64: sha256,}def _const_compare_digest_backport(a, b):"""Compare two digests of equal length in constant time.The digests must be of type str/bytes.Returns True if the digests match, and False otherwise."""result = abs(len(a) - len(b))for l, r in zip(bytearray(a), bytearray(b)):result |= l ^ rreturn result == 0_const_compare_digest = getattr(hmac, 'compare_digest',_const_compare_digest_backport)try: # Test for SSL featuresimport sslfrom ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23from ssl import HAS_SNI # Has SNI?except ImportError:passtry:from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSIONexcept ImportError:OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000OP_NO_COMPRESSION = 0x20000# A secure default.# Sources for more information on TLS ciphers:## - https://wiki.mozilla.org/Security/Server_Side_TLS# - https://www.ssllabs.com/projects/best-practices/index.html# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/## The general intent is:# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),# - prefer ECDHE over DHE for better performance,# - prefer any AES-GCM over any AES-CBC for better performance and security,# - use 3DES as fallback which is secure but slow,# - disable NULL authentication, MD5 MACs and DSS for security reasons.DEFAULT_CIPHERS = ('ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:''DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:''!eNULL:!MD5')try:from ssl import SSLContext # Modern SSL?except ImportError:import sysclass SSLContext(object): # Platform-specific: Python 2 & 3.1supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or(3, 2) <= sys.version_info)def __init__(self, protocol_version):self.protocol = protocol_version# Use default values from a real SSLContextself.check_hostname = Falseself.verify_mode = ssl.CERT_NONEself.ca_certs = Noneself.options = 0self.certfile = Noneself.keyfile = Noneself.ciphers = Nonedef load_cert_chain(self, certfile, keyfile):self.certfile = certfileself.keyfile = keyfiledef load_verify_locations(self, cafile=None, capath=None):self.ca_certs = cafileif capath is not None:raise SSLError("CA directories not supported in older Pythons")def set_ciphers(self, cipher_suite):if not self.supports_set_ciphers:raise TypeError('Your version of Python does not support setting ''a custom cipher suite. Please upgrade to Python ''2.7, 3.2, or later if you need this functionality.')self.ciphers = cipher_suitedef wrap_socket(self, socket, server_hostname=None, server_side=False):warnings.warn('A true SSLContext object is not available. This prevents ''urllib3 from configuring SSL appropriately and may cause ''certain SSL connections to fail. You can upgrade to a newer ''version of Python to solve this. For more information, see ''https://urllib3.readthedocs.io/en/latest/security.html''#insecureplatformwarning.',InsecurePlatformWarning)kwargs = {'keyfile': self.keyfile,'certfile': self.certfile,'ca_certs': self.ca_certs,'cert_reqs': self.verify_mode,'ssl_version': self.protocol,'server_side': server_side,}if self.supports_set_ciphers: # Platform-specific: Python 2.7+return wrap_socket(socket, ciphers=self.ciphers, **kwargs)else: # Platform-specific: Python 2.6return wrap_socket(socket, **kwargs)def assert_fingerprint(cert, fingerprint):"""Checks if given fingerprint matches the supplied certificate.:param cert:Certificate as bytes object.:param fingerprint:Fingerprint as string of hexdigits, can be interspersed by colons."""fingerprint = fingerprint.replace(':', '').lower()digest_length = len(fingerprint)hashfunc = HASHFUNC_MAP.get(digest_length)if not hashfunc:raise SSLError('Fingerprint of invalid length: {0}'.format(fingerprint))# We need encode() here for py32; works on py2 and p33.fingerprint_bytes = unhexlify(fingerprint.encode())cert_digest = hashfunc(cert).digest()if not _const_compare_digest(cert_digest, fingerprint_bytes):raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'.format(fingerprint, hexlify(cert_digest)))def resolve_cert_reqs(candidate):"""Resolves the argument to a numeric constant, which can be passed tothe wrap_socket function/method from the ssl module.Defaults to :data:`ssl.CERT_NONE`.If given a string it is assumed to be the name of the constant in the:mod:`ssl` module or its abbrevation.(So you can specify `REQUIRED` instead of `CERT_REQUIRED`.If it's neither `None` nor a string we assume it is already the numericconstant which can directly be passed to wrap_socket."""if candidate is None:return CERT_NONEif isinstance(candidate, str):res = getattr(ssl, candidate, None)if res is None:res = getattr(ssl, 'CERT_' + candidate)return resreturn candidatedef resolve_ssl_version(candidate):"""like resolve_cert_reqs"""if candidate is None:return PROTOCOL_SSLv23if isinstance(candidate, str):res = getattr(ssl, candidate, None)if res is None:res = getattr(ssl, 'PROTOCOL_' + candidate)return resreturn candidatedef create_urllib3_context(ssl_version=None, cert_reqs=None,options=None, ciphers=None):"""All arguments have the same meaning as ``ssl_wrap_socket``.By default, this function does a lot of the same work that``ssl.create_default_context`` does on Python 3.4+. It:- Disables SSLv2, SSLv3, and compression- Sets a restricted set of server ciphersIf you wish to enable SSLv3, you can do::from urllib3.util import ssl_context = ssl_.create_urllib3_context()context.options &= ~ssl_.OP_NO_SSLv3You can do the same to enable compression (substituting ``COMPRESSION``for ``SSLv3`` in the last line above).:param ssl_version:The desired protocol version to use. This will default toPROTOCOL_SSLv23 which will negotiate the highest protocol that boththe server and your installation of OpenSSL support.:param cert_reqs:Whether to require the certificate verification. This defaults to``ssl.CERT_REQUIRED``.:param options:Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.:param ciphers:Which cipher suites to allow the server to select.:returns:Constructed SSLContext object with specified options:rtype: SSLContext"""context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)# Setting the default here, as we may have no ssl module on importcert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqsif options is None:options = 0# SSLv2 is easily broken and is considered harmful and dangerousoptions |= OP_NO_SSLv2# SSLv3 has several problems and is now dangerousoptions |= OP_NO_SSLv3# Disable compression to prevent CRIME attacks for OpenSSL 1.0+# (issue #309)options |= OP_NO_COMPRESSIONcontext.options |= optionsif getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6context.set_ciphers(ciphers or DEFAULT_CIPHERS)context.verify_mode = cert_reqsif getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2# We do our own verification, including fingerprints and alternative# hostnames. So disable it herecontext.check_hostname = Falsereturn contextdef ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,ca_certs=None, server_hostname=None,ssl_version=None, ciphers=None, ssl_context=None,ca_cert_dir=None):"""All arguments except for server_hostname, ssl_context, and ca_cert_dir havethe same meaning as they do when using :func:`ssl.wrap_socket`.:param server_hostname:When SNI is supported, the expected hostname of the certificate:param ssl_context:A pre-made :class:`SSLContext` object. If none is provided, one willbe created using :func:`create_urllib3_context`.:param ciphers:A string of ciphers we wish the client to support. This is notsupported on Python 2.6 as the ssl module does not support it.:param ca_cert_dir:A directory containing CA certificates in multiple separate files, assupported by OpenSSL's -CApath flag or the capath argument toSSLContext.load_verify_locations()."""context = ssl_contextif context is None:context = create_urllib3_context(ssl_version, cert_reqs,ciphers=ciphers)if ca_certs or ca_cert_dir:try:context.load_verify_locations(ca_certs, ca_cert_dir)except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2raise SSLError(e)# Py33 raises FileNotFoundError which subclasses OSError# These are not equivalent unless we check the errno attributeexcept OSError as e: # Platform-specific: Python 3.3 and beyondif e.errno == errno.ENOENT:raise SSLError(e)raiseif certfile:context.load_cert_chain(certfile, keyfile)if HAS_SNI: # Platform-specific: OpenSSL with enabled SNIreturn context.wrap_socket(sock, server_hostname=server_hostname)warnings.warn('An HTTPS request has been made, but the SNI (Subject Name ''Indication) extension to TLS is not available on this platform. ''This may cause the server to present an incorrect TLS ''certificate, which can cause validation failures. You can upgrade to ''a newer version of Python to solve this. For more information, see ''https://urllib3.readthedocs.io/en/latest/security.html''#snimissingwarning.',SNIMissingWarning)return context.wrap_socket(sock)
from __future__ import absolute_importimport timeimport loggingfrom ..exceptions import (ConnectTimeoutError,MaxRetryError,ProtocolError,ReadTimeoutError,ResponseError,)from ..packages import sixlog = logging.getLogger(__name__)class Retry(object):""" Retry configuration.Each retry attempt will create a new Retry object with updated values, sothey can be safely reused.Retries can be defined as a default for a pool::retries = Retry(connect=5, read=2, redirect=5)http = PoolManager(retries=retries)response = http.request('GET', 'http://example.com/')Or per-request (which overrides the default for the pool)::response = http.request('GET', 'http://example.com/', retries=Retry(10))Retries can be disabled by passing ``False``::response = http.request('GET', 'http://example.com/', retries=False)Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unlessretries are disabled, in which case the causing exception will be raised.:param int total:Total number of retries to allow. Takes precedence over other counts.Set to ``None`` to remove this constraint and fall back on othercounts. It's a good idea to set this to some sensibly-high value toaccount for unexpected edge cases and avoid infinite retry loops.Set to ``0`` to fail on the first retry.Set to ``False`` to disable and imply ``raise_on_redirect=False``.:param int connect:How many connection-related errors to retry on.These are errors raised before the request is sent to the remote server,which we assume has not triggered the server to process the request.Set to ``0`` to fail on the first retry of this type.:param int read:How many times to retry on read errors.These errors are raised after the request was sent to the server, so therequest may have side-effects.Set to ``0`` to fail on the first retry of this type.:param int redirect:How many redirects to perform. Limit this to avoid infinite redirectloops.A redirect is a HTTP response with a status code 301, 302, 303, 307 or308.Set to ``0`` to fail on the first retry of this type.Set to ``False`` to disable and imply ``raise_on_redirect=False``.:param iterable method_whitelist:Set of uppercased HTTP method verbs that we should retry on.By default, we only retry on methods which are considered to beidempotent (multiple requests with the same parameters end with thesame state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.Set to a ``False`` value to retry on any verb.:param iterable status_forcelist:A set of integer HTTP status codes that we should force a retry on.A retry is initiated if the request method is in ``method_whitelist``and the response status code is in ``status_forcelist``.By default, this is disabled with ``None``.:param float backoff_factor:A backoff factor to apply between attempts after the second try(most errors are resolved immediately by a second try without adelay). urllib3 will sleep for::{backoff factor} * (2 ^ ({number of total retries} - 1))seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleepfor [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longerthan :attr:`Retry.BACKOFF_MAX`.By default, backoff is disabled (set to 0).:param bool raise_on_redirect: Whether, if the number of redirects isexhausted, to raise a MaxRetryError, or to return a response with aresponse code in the 3xx range.:param bool raise_on_status: Similar meaning to ``raise_on_redirect``:whether we should raise an exception, or return a response,if status falls in ``status_forcelist`` range and retries havebeen exhausted."""DEFAULT_METHOD_WHITELIST = frozenset(['HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'])#: Maximum backoff time.BACKOFF_MAX = 120def __init__(self, total=10, connect=None, read=None, redirect=None,method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,backoff_factor=0, raise_on_redirect=True, raise_on_status=True,_observed_errors=0):self.total = totalself.connect = connectself.read = readif redirect is False or total is False:redirect = 0raise_on_redirect = Falseself.redirect = redirectself.status_forcelist = status_forcelist or set()self.method_whitelist = method_whitelistself.backoff_factor = backoff_factorself.raise_on_redirect = raise_on_redirectself.raise_on_status = raise_on_statusself._observed_errors = _observed_errors # TODO: use .history instead?def new(self, **kw):params = dict(total=self.total,connect=self.connect, read=self.read, redirect=self.redirect,method_whitelist=self.method_whitelist,status_forcelist=self.status_forcelist,backoff_factor=self.backoff_factor,raise_on_redirect=self.raise_on_redirect,raise_on_status=self.raise_on_status,_observed_errors=self._observed_errors,)params.update(kw)return type(self)(**params)@classmethoddef from_int(cls, retries, redirect=True, default=None):""" Backwards-compatibility for the old retries format."""if retries is None:retries = default if default is not None else cls.DEFAULTif isinstance(retries, Retry):return retriesredirect = bool(redirect) and Nonenew_retries = cls(retries, redirect=redirect)log.debug("Converted retries value: %r -> %r", retries, new_retries)return new_retriesdef get_backoff_time(self):""" Formula for computing the current backoff:rtype: float"""if self._observed_errors <= 1:return 0backoff_value = self.backoff_factor * (2 ** (self._observed_errors - 1))return min(self.BACKOFF_MAX, backoff_value)def sleep(self):""" Sleep between retry attempts using an exponential backoff.By default, the backoff factor is 0 and this method will returnimmediately."""backoff = self.get_backoff_time()if backoff <= 0:returntime.sleep(backoff)def _is_connection_error(self, err):""" Errors when we're fairly sure that the server did not receive therequest, so it should be safe to retry."""return isinstance(err, ConnectTimeoutError)def _is_read_error(self, err):""" Errors that occur after the request has been started, so we shouldassume that the server began processing it."""return isinstance(err, (ReadTimeoutError, ProtocolError))def is_forced_retry(self, method, status_code):""" Is this method/status code retryable? (Based on method/codes whitelists)"""if self.method_whitelist and method.upper() not in self.method_whitelist:return Falsereturn self.status_forcelist and status_code in self.status_forcelistdef is_exhausted(self):""" Are we out of retries? """retry_counts = (self.total, self.connect, self.read, self.redirect)retry_counts = list(filter(None, retry_counts))if not retry_counts:return Falsereturn min(retry_counts) < 0def increment(self, method=None, url=None, response=None, error=None,_pool=None, _stacktrace=None):""" Return a new Retry object with incremented retry counters.:param response: A response object, or None, if the server did notreturn a response.:type response: :class:`~urllib3.response.HTTPResponse`:param Exception error: An error encountered during the request, orNone if the response was received successfully.:return: A new ``Retry`` object."""if self.total is False and error:# Disabled, indicate to re-raise the error.raise six.reraise(type(error), error, _stacktrace)total = self.totalif total is not None:total -= 1_observed_errors = self._observed_errorsconnect = self.connectread = self.readredirect = self.redirectcause = 'unknown'if error and self._is_connection_error(error):# Connect retry?if connect is False:raise six.reraise(type(error), error, _stacktrace)elif connect is not None:connect -= 1_observed_errors += 1elif error and self._is_read_error(error):# Read retry?if read is False:raise six.reraise(type(error), error, _stacktrace)elif read is not None:read -= 1_observed_errors += 1elif response and response.get_redirect_location():# Redirect retry?if redirect is not None:redirect -= 1cause = 'too many redirects'else:# Incrementing because of a server error like a 500 in# status_forcelist and a the given method is in the whitelist_observed_errors += 1cause = ResponseError.GENERIC_ERRORif response and response.status:cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)new_retry = self.new(total=total,connect=connect, read=read, redirect=redirect,_observed_errors=_observed_errors)if new_retry.is_exhausted():raise MaxRetryError(_pool, url, error or ResponseError(cause))log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)return new_retrydef __repr__(self):return ('{cls.__name__}(total={self.total}, connect={self.connect}, ''read={self.read}, redirect={self.redirect})').format(cls=type(self), self=self)# For backwards compatibility (equivalent to pre-v1.9):Retry.DEFAULT = Retry(3)
from __future__ import absolute_importfrom ..packages.six.moves import http_client as httplibfrom ..exceptions import HeaderParsingErrordef is_fp_closed(obj):"""Checks whether a given file-like object is closed.:param obj:The file-like object to check."""try:# Check via the official file-like-object way.return obj.closedexcept AttributeError:passtry:# Check if the object is a container for another file-like object that# gets released on exhaustion (e.g. HTTPResponse).return obj.fp is Noneexcept AttributeError:passraise ValueError("Unable to determine whether fp is closed.")def assert_header_parsing(headers):"""Asserts whether all headers have been successfully parsed.Extracts encountered errors from the result of parsing headers.Only works on Python 3.:param headers: Headers to verify.:type headers: `httplib.HTTPMessage`.:raises urllib3.exceptions.HeaderParsingError:If parsing errors are found."""# This will fail silently if we pass in the wrong kind of parameter.# To make debugging easier add an explicit check.if not isinstance(headers, httplib.HTTPMessage):raise TypeError('expected httplib.Message, got {0}.'.format(type(headers)))defects = getattr(headers, 'defects', None)get_payload = getattr(headers, 'get_payload', None)unparsed_data = Noneif get_payload: # Platform-specific: Python 3.unparsed_data = get_payload()if defects or unparsed_data:raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)def is_response_to_head(response):"""Checks whether the request of a response has been a HEAD-request.Handles the quirks of AppEngine.:param conn::type conn: :class:`httplib.HTTPResponse`"""# FIXME: Can we do this somehow without accessing private httplib _method?method = response._methodif isinstance(method, int): # Platform-specific: Appenginereturn method == 3return method.upper() == 'HEAD'
from __future__ import absolute_importfrom base64 import b64encodefrom ..packages.six import bACCEPT_ENCODING = 'gzip,deflate'def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,basic_auth=None, proxy_basic_auth=None, disable_cache=None):"""Shortcuts for generating request headers.:param keep_alive:If ``True``, adds 'connection: keep-alive' header.:param accept_encoding:Can be a boolean, list, or string.``True`` translates to 'gzip,deflate'.List will get joined by comma.String will be used as provided.:param user_agent:String representing the user-agent you want, such as"python-urllib3/0.6":param basic_auth:Colon-separated username:password string for 'authorization: basic ...'auth header.:param proxy_basic_auth:Colon-separated username:password string for 'proxy-authorization: basic ...'auth header.:param disable_cache:If ``True``, adds 'cache-control: no-cache' header.Example::>>> make_headers(keep_alive=True, user_agent="Batman/1.0"){'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}>>> make_headers(accept_encoding=True){'accept-encoding': 'gzip,deflate'}"""headers = {}if accept_encoding:if isinstance(accept_encoding, str):passelif isinstance(accept_encoding, list):accept_encoding = ','.join(accept_encoding)else:accept_encoding = ACCEPT_ENCODINGheaders['accept-encoding'] = accept_encodingif user_agent:headers['user-agent'] = user_agentif keep_alive:headers['connection'] = 'keep-alive'if basic_auth:headers['authorization'] = 'Basic ' + \b64encode(b(basic_auth)).decode('utf-8')if proxy_basic_auth:headers['proxy-authorization'] = 'Basic ' + \b64encode(b(proxy_basic_auth)).decode('utf-8')if disable_cache:headers['cache-control'] = 'no-cache'return headers
from __future__ import absolute_importimport sockettry:from select import poll, POLLINexcept ImportError: # `poll` doesn't exist on OSX and other platformspoll = Falsetry:from select import selectexcept ImportError: # `select` doesn't exist on AppEngine.select = Falsedef is_connection_dropped(conn): # Platform-specific"""Returns True if the connection is dropped and should be closed.:param conn::class:`httplib.HTTPConnection` object.Note: For platforms like AppEngine, this will always return ``False`` tolet the platform handle connection recycling transparently for us."""sock = getattr(conn, 'sock', False)if sock is False: # Platform-specific: AppEnginereturn Falseif sock is None: # Connection already closed (such as by httplib).return Trueif not poll:if not select: # Platform-specific: AppEnginereturn Falsetry:return select([sock], [], [], 0.0)[0]except socket.error:return True# This version is better on platforms that support it.p = poll()p.register(sock, POLLIN)for (fno, ev) in p.poll(0.0):if fno == sock.fileno():# Either data is buffered (bad), or the connection is dropped.return True# This function is copied from socket.py in the Python 2.7 standard# library test suite. Added to its signature is only `socket_options`.# One additional modification is that we avoid binding to IPv6 servers# discovered in DNS if the system doesn't have IPv6 functionality.def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,source_address=None, socket_options=None):"""Connect to *address* and return the socket object.Convenience function. Connect to *address* (a 2-tuple ``(host,port)``) and return the socket object. Passing the optional*timeout* parameter will set the timeout on the socket instancebefore attempting to connect. If no *timeout* is supplied, theglobal default timeout setting returned by :func:`getdefaulttimeout`is used. If *source_address* is set it must be a tuple of (host, port)for the socket to bind as a source address before making the connection.An host of '' or port 0 tells the OS to use the default."""host, port = addressif host.startswith('['):host = host.strip('[]')err = None# Using the value from allowed_gai_family() in the context of getaddrinfo lets# us select whether to work with IPv4 DNS records, IPv6 records, or both.# The original create_connection function always returns all records.family = allowed_gai_family()for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):af, socktype, proto, canonname, sa = ressock = Nonetry:sock = socket.socket(af, socktype, proto)# If provided, set socket level options before connecting._set_socket_options(sock, socket_options)if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:sock.settimeout(timeout)if source_address:sock.bind(source_address)sock.connect(sa)return sockexcept socket.error as e:err = eif sock is not None:sock.close()sock = Noneif err is not None:raise errraise socket.error("getaddrinfo returns an empty list")def _set_socket_options(sock, options):if options is None:returnfor opt in options:sock.setsockopt(*opt)def allowed_gai_family():"""This function is designed to work in the context ofgetaddrinfo, where family=socket.AF_UNSPEC is the default andwill perform a DNS search for both IPv6 and IPv4 records."""family = socket.AF_INETif HAS_IPV6:family = socket.AF_UNSPECreturn familydef _has_ipv6(host):""" Returns True if the system can bind an IPv6 address. """sock = Nonehas_ipv6 = Falseif socket.has_ipv6:# has_ipv6 returns true if cPython was compiled with IPv6 support.# It does not tell us if the system has IPv6 support enabled. To# determine that we must bind to an IPv6 address.# https://github.com/shazow/urllib3/pull/611# https://bugs.python.org/issue658327try:sock = socket.socket(socket.AF_INET6)sock.bind((host, 0))has_ipv6 = Trueexcept Exception:passif sock:sock.close()return has_ipv6HAS_IPV6 = _has_ipv6('::1')
from __future__ import absolute_import# For backwards compatibility, provide imports that used to be here.from .connection import is_connection_droppedfrom .request import make_headersfrom .response import is_fp_closedfrom .ssl_ import (SSLContext,HAS_SNI,IS_PYOPENSSL,assert_fingerprint,resolve_cert_reqs,resolve_ssl_version,ssl_wrap_socket,)from .timeout import (current_time,Timeout,)from .retry import Retryfrom .url import (get_host,parse_url,split_first,Url,)__all__ = ('HAS_SNI','IS_PYOPENSSL','SSLContext','Retry','Timeout','Url','assert_fingerprint','current_time','is_connection_dropped','is_fp_closed','get_host','parse_url','make_headers','resolve_cert_reqs','resolve_ssl_version','split_first','ssl_wrap_socket',)
from __future__ import absolute_importfrom contextlib import contextmanagerimport zlibimport iofrom socket import timeout as SocketTimeoutfrom socket import error as SocketErrorfrom ._collections import HTTPHeaderDictfrom .exceptions import (ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked)from .packages.six import string_types as basestring, binary_type, PY3from .packages.six.moves import http_client as httplibfrom .connection import HTTPException, BaseSSLErrorfrom .util.response import is_fp_closed, is_response_to_headclass DeflateDecoder(object):def __init__(self):self._first_try = Trueself._data = binary_type()self._obj = zlib.decompressobj()def __getattr__(self, name):return getattr(self._obj, name)def decompress(self, data):if not data:return dataif not self._first_try:return self._obj.decompress(data)self._data += datatry:return self._obj.decompress(data)except zlib.error:self._first_try = Falseself._obj = zlib.decompressobj(-zlib.MAX_WBITS)try:return self.decompress(self._data)finally:self._data = Noneclass GzipDecoder(object):def __init__(self):self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)def __getattr__(self, name):return getattr(self._obj, name)def decompress(self, data):if not data:return datareturn self._obj.decompress(data)def _get_decoder(mode):if mode == 'gzip':return GzipDecoder()return DeflateDecoder()class HTTPResponse(io.IOBase):"""HTTP Response container.Backwards-compatible to httplib's HTTPResponse but the response ``body`` isloaded and decoded on-demand when the ``data`` property is accessed. Thisclass is also compatible with the Python standard library's :mod:`io`module, and can hence be treated as a readable object in the context of thatframework.Extra parameters for behaviour not present in httplib.HTTPResponse::param preload_content:If True, the response's body will be preloaded during construction.:param decode_content:If True, attempts to decode specific content-encoding's based on headers(like 'gzip' and 'deflate') will be skipped and raw data will be usedinstead.:param original_response:When this HTTPResponse wrapper is generated from an httplib.HTTPResponseobject, it's convenient to include the original for debug purposes. It'sotherwise unused."""CONTENT_DECODERS = ['gzip', 'deflate']REDIRECT_STATUSES = [301, 302, 303, 307, 308]def __init__(self, body='', headers=None, status=0, version=0, reason=None,strict=0, preload_content=True, decode_content=True,original_response=None, pool=None, connection=None):if isinstance(headers, HTTPHeaderDict):self.headers = headerselse:self.headers = HTTPHeaderDict(headers)self.status = statusself.version = versionself.reason = reasonself.strict = strictself.decode_content = decode_contentself._decoder = Noneself._body = Noneself._fp = Noneself._original_response = original_responseself._fp_bytes_read = 0if body and isinstance(body, (basestring, binary_type)):self._body = bodyself._pool = poolself._connection = connectionif hasattr(body, 'read'):self._fp = body# Are we using the chunked-style of transfer encoding?self.chunked = Falseself.chunk_left = Nonetr_enc = self.headers.get('transfer-encoding', '').lower()# Don't incur the penalty of creating a list and then discarding itencodings = (enc.strip() for enc in tr_enc.split(","))if "chunked" in encodings:self.chunked = True# If requested, preload the body.if preload_content and not self._body:self._body = self.read(decode_content=decode_content)def get_redirect_location(self):"""Should we redirect and where to?:returns: Truthy redirect location string if we got a redirect statuscode and valid location. ``None`` if redirect status and nolocation. ``False`` if not a redirect status code."""if self.status in self.REDIRECT_STATUSES:return self.headers.get('location')return Falsedef release_conn(self):if not self._pool or not self._connection:returnself._pool._put_conn(self._connection)self._connection = None@propertydef data(self):# For backwords-compat with earlier urllib3 0.4 and earlier.if self._body:return self._bodyif self._fp:return self.read(cache_content=True)@propertydef connection(self):return self._connectiondef tell(self):"""Obtain the number of bytes pulled over the wire so far. May differ fromthe amount of content returned by :meth:``HTTPResponse.read`` if bytesare encoded on the wire (e.g, compressed)."""return self._fp_bytes_readdef _init_decoder(self):"""Set-up the _decoder attribute if necessar."""# Note: content-encoding value should be case-insensitive, per RFC 7230# Section 3.2content_encoding = self.headers.get('content-encoding', '').lower()if self._decoder is None and content_encoding in self.CONTENT_DECODERS:self._decoder = _get_decoder(content_encoding)def _decode(self, data, decode_content, flush_decoder):"""Decode the data passed in and potentially flush the decoder."""try:if decode_content and self._decoder:data = self._decoder.decompress(data)except (IOError, zlib.error) as e:content_encoding = self.headers.get('content-encoding', '').lower()raise DecodeError("Received response with content-encoding: %s, but ""failed to decode it." % content_encoding, e)if flush_decoder and decode_content:data += self._flush_decoder()return datadef _flush_decoder(self):"""Flushes the decoder. Should only be called if the decoder is actuallybeing used."""if self._decoder:buf = self._decoder.decompress(b'')return buf + self._decoder.flush()return b''@contextmanagerdef _error_catcher(self):"""Catch low-level python exceptions, instead re-raising urllib3variants, so that low-level exceptions are not leaked in thehigh-level api.On exit, release the connection back to the pool."""clean_exit = Falsetry:try:yieldexcept SocketTimeout:# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but# there is yet no clean way to get at it from this context.raise ReadTimeoutError(self._pool, None, 'Read timed out.')except BaseSSLError as e:# FIXME: Is there a better way to differentiate between SSLErrors?if 'read operation timed out' not in str(e): # Defensive:# This shouldn't happen but just in case we're missing an edge# case, let's avoid swallowing SSL errors.raiseraise ReadTimeoutError(self._pool, None, 'Read timed out.')except (HTTPException, SocketError) as e:# This includes IncompleteRead.raise ProtocolError('Connection broken: %r' % e, e)# If no exception is thrown, we should avoid cleaning up# unnecessarily.clean_exit = Truefinally:# If we didn't terminate cleanly, we need to throw away our# connection.if not clean_exit:# The response may not be closed but we're not going to use it# anymore so close it now to ensure that the connection is# released back to the pool.if self._original_response:self._original_response.close()# Closing the response may not actually be sufficient to close# everything, so if we have a hold of the connection close that# too.if self._connection:self._connection.close()# If we hold the original response but it's closed now, we should# return the connection back to the pool.if self._original_response and self._original_response.isclosed():self.release_conn()def read(self, amt=None, decode_content=None, cache_content=False):"""Similar to :meth:`httplib.HTTPResponse.read`, but with two additionalparameters: ``decode_content`` and ``cache_content``.:param amt:How much of the content to read. If specified, caching is skippedbecause it doesn't make sense to cache partial content as the fullresponse.:param decode_content:If True, will attempt to decode the body based on the'content-encoding' header.:param cache_content:If True, will save the returned data such that the same result isreturned despite of the state of the underlying file object. Thisis useful if you want the ``.data`` property to continue workingafter having ``.read()`` the file object. (Overridden if ``amt`` isset.)"""self._init_decoder()if decode_content is None:decode_content = self.decode_contentif self._fp is None:returnflush_decoder = Falsedata = Nonewith self._error_catcher():if amt is None:# cStringIO doesn't like amt=Nonedata = self._fp.read()flush_decoder = Trueelse:cache_content = Falsedata = self._fp.read(amt)if amt != 0 and not data: # Platform-specific: Buggy versions of Python.# Close the connection when no data is returned## This is redundant to what httplib/http.client _should_# already do. However, versions of python released before# December 15, 2012 (http://bugs.python.org/issue16298) do# not properly close the connection in all cases. There is# no harm in redundantly calling close.self._fp.close()flush_decoder = Trueif data:self._fp_bytes_read += len(data)data = self._decode(data, decode_content, flush_decoder)if cache_content:self._body = datareturn datadef stream(self, amt=2**16, decode_content=None):"""A generator wrapper for the read() method. A call will block until``amt`` bytes have been read from the connection or until theconnection is closed.:param amt:How much of the content to read. The generator will return up tomuch data per iteration, but may return less. This is particularlylikely when using compressed data. However, the empty string willnever be returned.:param decode_content:If True, will attempt to decode the body based on the'content-encoding' header."""if self.chunked:for line in self.read_chunked(amt, decode_content=decode_content):yield lineelse:while not is_fp_closed(self._fp):data = self.read(amt=amt, decode_content=decode_content)if data:yield data@classmethoddef from_httplib(ResponseCls, r, **response_kw):"""Given an :class:`httplib.HTTPResponse` instance ``r``, return acorresponding :class:`urllib3.response.HTTPResponse` object.Remaining parameters are passed to the HTTPResponse constructor, alongwith ``original_response=r``."""headers = r.msgif not isinstance(headers, HTTPHeaderDict):if PY3: # Python 3headers = HTTPHeaderDict(headers.items())else: # Python 2headers = HTTPHeaderDict.from_httplib(headers)# HTTPResponse objects in Python 3 don't have a .strict attributestrict = getattr(r, 'strict', 0)resp = ResponseCls(body=r,headers=headers,status=r.status,version=r.version,reason=r.reason,strict=strict,original_response=r,**response_kw)return resp# Backwards-compatibility methods for httplib.HTTPResponsedef getheaders(self):return self.headersdef getheader(self, name, default=None):return self.headers.get(name, default)# Overrides from io.IOBasedef close(self):if not self.closed:self._fp.close()if self._connection:self._connection.close()@propertydef closed(self):if self._fp is None:return Trueelif hasattr(self._fp, 'closed'):return self._fp.closedelif hasattr(self._fp, 'isclosed'): # Python 2return self._fp.isclosed()else:return Truedef fileno(self):if self._fp is None:raise IOError("HTTPResponse has no file to get a fileno from")elif hasattr(self._fp, "fileno"):return self._fp.fileno()else:raise IOError("The file-like object this HTTPResponse is wrapped ""around has no file descriptor")def flush(self):if self._fp is not None and hasattr(self._fp, 'flush'):return self._fp.flush()def readable(self):# This method is required for `io` module compatibility.return Truedef readinto(self, b):# This method is required for `io` module compatibility.temp = self.read(len(b))if len(temp) == 0:return 0else:b[:len(temp)] = tempreturn len(temp)def _update_chunk_length(self):# First, we'll figure out length of a chunk and then# we'll try to read it from socket.if self.chunk_left is not None:returnline = self._fp.fp.readline()line = line.split(b';', 1)[0]try:self.chunk_left = int(line, 16)except ValueError:# Invalid chunked protocol response, abort.self.close()raise httplib.IncompleteRead(line)def _handle_chunk(self, amt):returned_chunk = Noneif amt is None:chunk = self._fp._safe_read(self.chunk_left)returned_chunk = chunkself._fp._safe_read(2) # Toss the CRLF at the end of the chunk.self.chunk_left = Noneelif amt < self.chunk_left:value = self._fp._safe_read(amt)self.chunk_left = self.chunk_left - amtreturned_chunk = valueelif amt == self.chunk_left:value = self._fp._safe_read(amt)self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.self.chunk_left = Nonereturned_chunk = valueelse: # amt > self.chunk_leftreturned_chunk = self._fp._safe_read(self.chunk_left)self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.self.chunk_left = Nonereturn returned_chunkdef read_chunked(self, amt=None, decode_content=None):"""Similar to :meth:`HTTPResponse.read`, but with an additionalparameter: ``decode_content``.:param decode_content:If True, will attempt to decode the body based on the'content-encoding' header."""self._init_decoder()# FIXME: Rewrite this method and make it a class with a better structured logic.if not self.chunked:raise ResponseNotChunked("Response is not chunked. ""Header 'transfer-encoding: chunked' is missing.")# Don't bother reading the body of a HEAD request.if self._original_response and is_response_to_head(self._original_response):self._original_response.close()returnwith self._error_catcher():while True:self._update_chunk_length()if self.chunk_left == 0:breakchunk = self._handle_chunk(amt)decoded = self._decode(chunk, decode_content=decode_content,flush_decoder=False)if decoded:yield decodedif decode_content:# On CPython and PyPy, we should never need to flush the# decoder. However, on Jython we *might* need to, so# lets defensively do it anyway.decoded = self._flush_decoder()if decoded: # Platform-specific: Jython.yield decoded# Chunk content ends with \r\n: discard it.while True:line = self._fp.fp.readline()if not line:# Some sites may not end with '\r\n'.breakif line == b'\r\n':break# We read everything; close the "file".if self._original_response:self._original_response.close()
from __future__ import absolute_importtry:from urllib.parse import urlencodeexcept ImportError:from urllib import urlencodefrom .filepost import encode_multipart_formdata__all__ = ['RequestMethods']class RequestMethods(object):"""Convenience mixin for classes who implement a :meth:`urlopen` method, suchas :class:`~urllib3.connectionpool.HTTPConnectionPool` and:class:`~urllib3.poolmanager.PoolManager`.Provides behavior for making common types of HTTP request methods anddecides which type of request field encoding to use.Specifically,:meth:`.request_encode_url` is for sending requests whose fields areencoded in the URL (such as GET, HEAD, DELETE).:meth:`.request_encode_body` is for sending requests whose fields areencoded in the *body* of the request using multipart or www-form-urlencoded(such as for POST, PUT, PATCH).:meth:`.request` is for making any kind of request, it will look up theappropriate encoding format and use one of the above two methods to makethe request.Initializer parameters::param headers:Headers to include with all requests, unless other headers are givenexplicitly."""_encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])def __init__(self, headers=None):self.headers = headers or {}def urlopen(self, method, url, body=None, headers=None,encode_multipart=True, multipart_boundary=None,**kw): # Abstractraise NotImplemented("Classes extending RequestMethods must implement ""their own ``urlopen`` method.")def request(self, method, url, fields=None, headers=None, **urlopen_kw):"""Make a request using :meth:`urlopen` with the appropriate encoding of``fields`` based on the ``method`` used.This is a convenience method that requires the least amount of manualeffort. It can be used in most situations, while still having theoption to drop down to more specific methods when necessary, such as:meth:`request_encode_url`, :meth:`request_encode_body`,or even the lowest level :meth:`urlopen`."""method = method.upper()if method in self._encode_url_methods:return self.request_encode_url(method, url, fields=fields,headers=headers,**urlopen_kw)else:return self.request_encode_body(method, url, fields=fields,headers=headers,**urlopen_kw)def request_encode_url(self, method, url, fields=None, headers=None,**urlopen_kw):"""Make a request using :meth:`urlopen` with the ``fields`` encoded inthe url. This is useful for request methods like GET, HEAD, DELETE, etc."""if headers is None:headers = self.headersextra_kw = {'headers': headers}extra_kw.update(urlopen_kw)if fields:url += '?' + urlencode(fields)return self.urlopen(method, url, **extra_kw)def request_encode_body(self, method, url, fields=None, headers=None,encode_multipart=True, multipart_boundary=None,**urlopen_kw):"""Make a request using :meth:`urlopen` with the ``fields`` encoded inthe body. This is useful for request methods like POST, PUT, PATCH, etc.When ``encode_multipart=True`` (default), then:meth:`urllib3.filepost.encode_multipart_formdata` is used to encodethe payload with the appropriate content type. Otherwise:meth:`urllib.urlencode` is used with the'application/x-www-form-urlencoded' content type.Multipart encoding must be used when posting files, and it's reasonablysafe to use it in other times too. However, it may break requestsigning, such as with OAuth.Supports an optional ``fields`` parameter of key/value strings ANDkey/filetuple. A filetuple is a (filename, data, MIME type) tuple wherethe MIME type is optional. For example::fields = {'foo': 'bar','fakefile': ('foofile.txt', 'contents of foofile'),'realfile': ('barfile.txt', open('realfile').read()),'typedfile': ('bazfile.bin', open('bazfile').read(),'image/jpeg'),'nonamefile': 'contents of nonamefile field',}When uploading a file, providing a filename (the first parameter of thetuple) is optional but recommended to best mimick behavior of browsers.Note that if ``headers`` are supplied, the 'Content-Type' header willbe overwritten because it depends on the dynamic random boundary stringwhich is used to compose the body of the request. The random boundarystring can be explicitly set with the ``multipart_boundary`` parameter."""if headers is None:headers = self.headersextra_kw = {'headers': {}}if fields:if 'body' in urlopen_kw:raise TypeError("request got values for both 'fields' and 'body', can only specify one.")if encode_multipart:body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)else:body, content_type = urlencode(fields), 'application/x-www-form-urlencoded'extra_kw['body'] = bodyextra_kw['headers'] = {'Content-Type': content_type}extra_kw['headers'].update(headers)extra_kw.update(urlopen_kw)return self.urlopen(method, url, **extra_kw)
from __future__ import absolute_importimport collectionsimport functoolsimport loggingtry: # Python 3from urllib.parse import urljoinexcept ImportError:from urlparse import urljoinfrom ._collections import RecentlyUsedContainerfrom .connectionpool import HTTPConnectionPool, HTTPSConnectionPoolfrom .connectionpool import port_by_schemefrom .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknownfrom .request import RequestMethodsfrom .util.url import parse_urlfrom .util.retry import Retry__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']log = logging.getLogger(__name__)SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs','ssl_version', 'ca_cert_dir')# The base fields to use when determining what pool to get a connection from;# these do not rely on the ``connection_pool_kw`` and can be determined by the# URL and potentially the ``urllib3.connection.port_by_scheme`` dictionary.## All custom key schemes should include the fields in this key at a minimum.BasePoolKey = collections.namedtuple('BasePoolKey', ('scheme', 'host', 'port'))# The fields to use when determining what pool to get a HTTP and HTTPS# connection from. All additional fields must be present in the PoolManager's# ``connection_pool_kw`` instance variable.HTTPPoolKey = collections.namedtuple('HTTPPoolKey', BasePoolKey._fields + ('timeout', 'retries', 'strict','block', 'source_address'))HTTPSPoolKey = collections.namedtuple('HTTPSPoolKey', HTTPPoolKey._fields + SSL_KEYWORDS)def _default_key_normalizer(key_class, request_context):"""Create a pool key of type ``key_class`` for a request.According to RFC 3986, both the scheme and host are case-insensitive.Therefore, this function normalizes both before constructing the poolkey for an HTTPS request. If you wish to change this behaviour, providealternate callables to ``key_fn_by_scheme``.:param key_class:The class to use when constructing the key. This should be a namedtuplewith the ``scheme`` and ``host`` keys at a minimum.:param request_context:A dictionary-like object that contain the context for a request.It should contain a key for each field in the :class:`HTTPPoolKey`"""context = {}for key in key_class._fields:context[key] = request_context.get(key)context['scheme'] = context['scheme'].lower()context['host'] = context['host'].lower()return key_class(**context)# A dictionary that maps a scheme to a callable that creates a pool key.# This can be used to alter the way pool keys are constructed, if desired.# Each PoolManager makes a copy of this dictionary so they can be configured# globally here, or individually on the instance.key_fn_by_scheme = {'http': functools.partial(_default_key_normalizer, HTTPPoolKey),'https': functools.partial(_default_key_normalizer, HTTPSPoolKey),}pool_classes_by_scheme = {'http': HTTPConnectionPool,'https': HTTPSConnectionPool,}class PoolManager(RequestMethods):"""Allows for arbitrary requests while transparently keeping track ofnecessary connection pools for you.:param num_pools:Number of connection pools to cache before discarding the leastrecently used pool.:param headers:Headers to include with all requests, unless other headers are givenexplicitly.:param \**connection_pool_kw:Additional parameters are used to create fresh:class:`urllib3.connectionpool.ConnectionPool` instances.Example::>>> manager = PoolManager(num_pools=2)>>> r = manager.request('GET', 'http://google.com/')>>> r = manager.request('GET', 'http://google.com/mail')>>> r = manager.request('GET', 'http://yahoo.com/')>>> len(manager.pools)2"""proxy = Nonedef __init__(self, num_pools=10, headers=None, **connection_pool_kw):RequestMethods.__init__(self, headers)self.connection_pool_kw = connection_pool_kwself.pools = RecentlyUsedContainer(num_pools,dispose_func=lambda p: p.close())# Locally set the pool classes and keys so other PoolManagers can# override them.self.pool_classes_by_scheme = pool_classes_by_schemeself.key_fn_by_scheme = key_fn_by_scheme.copy()def __enter__(self):return selfdef __exit__(self, exc_type, exc_val, exc_tb):self.clear()# Return False to re-raise any potential exceptionsreturn Falsedef _new_pool(self, scheme, host, port):"""Create a new :class:`ConnectionPool` based on host, port and scheme.This method is used to actually create the connection pools handed outby :meth:`connection_from_url` and companion methods. It is intendedto be overridden for customization."""pool_cls = self.pool_classes_by_scheme[scheme]kwargs = self.connection_pool_kwif scheme == 'http':kwargs = self.connection_pool_kw.copy()for kw in SSL_KEYWORDS:kwargs.pop(kw, None)return pool_cls(host, port, **kwargs)def clear(self):"""Empty our store of pools and direct them all to close.This will not affect in-flight connections, but they will not bere-used after completion."""self.pools.clear()def connection_from_host(self, host, port=None, scheme='http'):"""Get a :class:`ConnectionPool` based on the host, port, and scheme.If ``port`` isn't given, it will be derived from the ``scheme`` using``urllib3.connectionpool.port_by_scheme``."""if not host:raise LocationValueError("No host specified.")request_context = self.connection_pool_kw.copy()request_context['scheme'] = scheme or 'http'if not port:port = port_by_scheme.get(request_context['scheme'].lower(), 80)request_context['port'] = portrequest_context['host'] = hostreturn self.connection_from_context(request_context)def connection_from_context(self, request_context):"""Get a :class:`ConnectionPool` based on the request context.``request_context`` must at least contain the ``scheme`` key and itsvalue must be a key in ``key_fn_by_scheme`` instance variable."""scheme = request_context['scheme'].lower()pool_key_constructor = self.key_fn_by_scheme[scheme]pool_key = pool_key_constructor(request_context)return self.connection_from_pool_key(pool_key)def connection_from_pool_key(self, pool_key):"""Get a :class:`ConnectionPool` based on the provided pool key.``pool_key`` should be a namedtuple that only contains immutableobjects. At a minimum it must have the ``scheme``, ``host``, and``port`` fields."""with self.pools.lock:# If the scheme, host, or port doesn't match existing open# connections, open a new ConnectionPool.pool = self.pools.get(pool_key)if pool:return pool# Make a fresh ConnectionPool of the desired typepool = self._new_pool(pool_key.scheme, pool_key.host, pool_key.port)self.pools[pool_key] = poolreturn pooldef connection_from_url(self, url):"""Similar to :func:`urllib3.connectionpool.connection_from_url` butdoesn't pass any additional parameters to the:class:`urllib3.connectionpool.ConnectionPool` constructor.Additional parameters are taken from the :class:`.PoolManager`constructor."""u = parse_url(url)return self.connection_from_host(u.host, port=u.port, scheme=u.scheme)def urlopen(self, method, url, redirect=True, **kw):"""Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`with custom cross-host redirect logic and only sends the request-uriportion of the ``url``.The given ``url`` parameter must be absolute, such that an appropriate:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it."""u = parse_url(url)conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)kw['assert_same_host'] = Falsekw['redirect'] = Falseif 'headers' not in kw:kw['headers'] = self.headersif self.proxy is not None and u.scheme == "http":response = conn.urlopen(method, url, **kw)else:response = conn.urlopen(method, u.request_uri, **kw)redirect_location = redirect and response.get_redirect_location()if not redirect_location:return response# Support relative URLs for redirecting.redirect_location = urljoin(url, redirect_location)# RFC 7231, Section 6.4.4if response.status == 303:method = 'GET'retries = kw.get('retries')if not isinstance(retries, Retry):retries = Retry.from_int(retries, redirect=redirect)try:retries = retries.increment(method, url, response=response, _pool=conn)except MaxRetryError:if retries.raise_on_redirect:raisereturn responsekw['retries'] = retrieskw['redirect'] = redirectlog.info("Redirecting %s -> %s", url, redirect_location)return self.urlopen(method, redirect_location, **kw)class ProxyManager(PoolManager):"""Behaves just like :class:`PoolManager`, but sends all requests throughthe defined proxy, using the CONNECT method for HTTPS URLs.:param proxy_url:The URL of the proxy to be used.:param proxy_headers:A dictionary contaning headers that will be sent to the proxy. In caseof HTTP they are being sent with each request, while in theHTTPS/CONNECT case they are sent only once. Could be used for proxyauthentication.Example:>>> proxy = urllib3.ProxyManager('http://localhost:3128/')>>> r1 = proxy.request('GET', 'http://google.com/')>>> r2 = proxy.request('GET', 'http://httpbin.org/')>>> len(proxy.pools)1>>> r3 = proxy.request('GET', 'https://httpbin.org/')>>> r4 = proxy.request('GET', 'https://twitter.com/')>>> len(proxy.pools)3"""def __init__(self, proxy_url, num_pools=10, headers=None,proxy_headers=None, **connection_pool_kw):if isinstance(proxy_url, HTTPConnectionPool):proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host,proxy_url.port)proxy = parse_url(proxy_url)if not proxy.port:port = port_by_scheme.get(proxy.scheme, 80)proxy = proxy._replace(port=port)if proxy.scheme not in ("http", "https"):raise ProxySchemeUnknown(proxy.scheme)self.proxy = proxyself.proxy_headers = proxy_headers or {}connection_pool_kw['_proxy'] = self.proxyconnection_pool_kw['_proxy_headers'] = self.proxy_headerssuper(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)def connection_from_host(self, host, port=None, scheme='http'):if scheme == "https":return super(ProxyManager, self).connection_from_host(host, port, scheme)return super(ProxyManager, self).connection_from_host(self.proxy.host, self.proxy.port, self.proxy.scheme)def _set_proxy_headers(self, url, headers=None):"""Sets headers needed by proxies: specifically, the Accept and Hostheaders. Only sets headers not provided by the user."""headers_ = {'Accept': '*/*'}netloc = parse_url(url).netlocif netloc:headers_['Host'] = netlocif headers:headers_.update(headers)return headers_def urlopen(self, method, url, redirect=True, **kw):"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."u = parse_url(url)if u.scheme == "http":# For proxied HTTPS requests, httplib sets the necessary headers# on the CONNECT to the proxy. For HTTP, we'll definitely# need to set 'Host' at the very least.headers = kw.get('headers', self.headers)kw['headers'] = self._set_proxy_headers(url, headers)return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)def proxy_from_url(url, **kw):return ProxyManager(proxy_url=url, **kw)
"""The match_hostname() function from Python 3.3.3, essential when using SSL."""# Note: This file is under the PSF license as the code comes from the python# stdlib. http://docs.python.org/3/license.htmlimport re__version__ = '3.4.0.2'class CertificateError(ValueError):passdef _dnsname_match(dn, hostname, max_wildcards=1):"""Matching according to RFC 6125, section 6.4.3http://tools.ietf.org/html/rfc6125#section-6.4.3"""pats = []if not dn:return False# Ported from python3-syntax:# leftmost, *remainder = dn.split(r'.')parts = dn.split(r'.')leftmost = parts[0]remainder = parts[1:]wildcards = leftmost.count('*')if wildcards > max_wildcards:# Issue #17980: avoid denials of service by refusing more# than one wildcard per fragment. A survey of established# policy among SSL implementations showed it to be a# reasonable choice.raise CertificateError("too many wildcards in certificate DNS name: " + repr(dn))# speed up common case w/o wildcardsif not wildcards:return dn.lower() == hostname.lower()# RFC 6125, section 6.4.3, subitem 1.# The client SHOULD NOT attempt to match a presented identifier in which# the wildcard character comprises a label other than the left-most label.if leftmost == '*':# When '*' is a fragment by itself, it matches a non-empty dotless# fragment.pats.append('[^.]+')elif leftmost.startswith('xn--') or hostname.startswith('xn--'):# RFC 6125, section 6.4.3, subitem 3.# The client SHOULD NOT attempt to match a presented identifier# where the wildcard character is embedded within an A-label or# U-label of an internationalized domain name.pats.append(re.escape(leftmost))else:# Otherwise, '*' matches any dotless string, e.g. www*pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))# add the remaining fragments, ignore any wildcardsfor frag in remainder:pats.append(re.escape(frag))pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)return pat.match(hostname)def match_hostname(cert, hostname):"""Verify that *cert* (in decoded format as returned bySSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125rules are followed, but IP addresses are not accepted for *hostname*.CertificateError is raised on failure. On success, the functionreturns nothing."""if not cert:raise ValueError("empty or no certificate")dnsnames = []san = cert.get('subjectAltName', ())for key, value in san:if key == 'DNS':if _dnsname_match(value, hostname):returndnsnames.append(value)if not dnsnames:# The subject is only checked when there is no dNSName entry# in subjectAltNamefor sub in cert.get('subject', ()):for key, value in sub:# XXX according to RFC 2818, the most specific Common Name# must be used.if key == 'commonName':if _dnsname_match(value, hostname):returndnsnames.append(value)if len(dnsnames) > 1:raise CertificateError("hostname %r ""doesn't match either of %s"% (hostname, ', '.join(map(repr, dnsnames))))elif len(dnsnames) == 1:raise CertificateError("hostname %r ""doesn't match %r"% (hostname, dnsnames[0]))else:raise CertificateError("no appropriate commonName or ""subjectAltName fields were found")
try:# Python 3.2+from ssl import CertificateError, match_hostnameexcept ImportError:try:# Backport of the function from a pypi modulefrom backports.ssl_match_hostname import CertificateError, match_hostnameexcept ImportError:# Our vendored copyfrom ._implementation import CertificateError, match_hostname# Not needed, but documenting what we provide.__all__ = ('CertificateError', 'match_hostname')
"""Utilities for writing code that runs on Python 2 and 3"""# Copyright (c) 2010-2015 Benjamin Peterson## Permission is hereby granted, free of charge, to any person obtaining a copy# of this software and associated documentation files (the "Software"), to deal# in the Software without restriction, including without limitation the rights# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell# copies of the Software, and to permit persons to whom the Software is# furnished to do so, subject to the following conditions:## The above copyright notice and this permission notice shall be included in all# copies or substantial portions of the Software.## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE# SOFTWARE.from __future__ import absolute_importimport functoolsimport itertoolsimport operatorimport sysimport types__author__ = "Benjamin Peterson <benjamin@python.org>"__version__ = "1.10.0"# Useful for very coarse version differentiation.PY2 = sys.version_info[0] == 2PY3 = sys.version_info[0] == 3PY34 = sys.version_info[0:2] >= (3, 4)if PY3:string_types = str,integer_types = int,class_types = type,text_type = strbinary_type = bytesMAXSIZE = sys.maxsizeelse:string_types = basestring,integer_types = (int, long)class_types = (type, types.ClassType)text_type = unicodebinary_type = strif sys.platform.startswith("java"):# Jython always uses 32 bits.MAXSIZE = int((1 << 31) - 1)else:# It's possible to have sizeof(long) != sizeof(Py_ssize_t).class X(object):def __len__(self):return 1 << 31try:len(X())except OverflowError:# 32-bitMAXSIZE = int((1 << 31) - 1)else:# 64-bitMAXSIZE = int((1 << 63) - 1)del Xdef _add_doc(func, doc):"""Add documentation to a function."""func.__doc__ = docdef _import_module(name):"""Import module, returning the module after the last dot."""__import__(name)return sys.modules[name]class _LazyDescr(object):def __init__(self, name):self.name = namedef __get__(self, obj, tp):result = self._resolve()setattr(obj, self.name, result) # Invokes __set__.try:# This is a bit ugly, but it avoids running this again by# removing this descriptor.delattr(obj.__class__, self.name)except AttributeError:passreturn resultclass MovedModule(_LazyDescr):def __init__(self, name, old, new=None):super(MovedModule, self).__init__(name)if PY3:if new is None:new = nameself.mod = newelse:self.mod = olddef _resolve(self):return _import_module(self.mod)def __getattr__(self, attr):_module = self._resolve()value = getattr(_module, attr)setattr(self, attr, value)return valueclass _LazyModule(types.ModuleType):def __init__(self, name):super(_LazyModule, self).__init__(name)self.__doc__ = self.__class__.__doc__def __dir__(self):attrs = ["__doc__", "__name__"]attrs += [attr.name for attr in self._moved_attributes]return attrs# Subclasses should override this_moved_attributes = []class MovedAttribute(_LazyDescr):def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):super(MovedAttribute, self).__init__(name)if PY3:if new_mod is None:new_mod = nameself.mod = new_modif new_attr is None:if old_attr is None:new_attr = nameelse:new_attr = old_attrself.attr = new_attrelse:self.mod = old_modif old_attr is None:old_attr = nameself.attr = old_attrdef _resolve(self):module = _import_module(self.mod)return getattr(module, self.attr)class _SixMetaPathImporter(object):"""A meta path importer to import six.moves and its submodules.This class implements a PEP302 finder and loader. It should be compatiblewith Python 2.5 and all existing versions of Python3"""def __init__(self, six_module_name):self.name = six_module_nameself.known_modules = {}def _add_module(self, mod, *fullnames):for fullname in fullnames:self.known_modules[self.name + "." + fullname] = moddef _get_module(self, fullname):return self.known_modules[self.name + "." + fullname]def find_module(self, fullname, path=None):if fullname in self.known_modules:return selfreturn Nonedef __get_module(self, fullname):try:return self.known_modules[fullname]except KeyError:raise ImportError("This loader does not know module " + fullname)def load_module(self, fullname):try:# in case of a reloadreturn sys.modules[fullname]except KeyError:passmod = self.__get_module(fullname)if isinstance(mod, MovedModule):mod = mod._resolve()else:mod.__loader__ = selfsys.modules[fullname] = modreturn moddef is_package(self, fullname):"""Return true, if the named module is a package.We need this method to get correct spec objects withPython 3.4 (see PEP451)"""return hasattr(self.__get_module(fullname), "__path__")def get_code(self, fullname):"""Return NoneRequired, if is_package is implemented"""self.__get_module(fullname) # eventually raises ImportErrorreturn Noneget_source = get_code # same as get_code_importer = _SixMetaPathImporter(__name__)class _MovedItems(_LazyModule):"""Lazy loading of moved objects"""__path__ = [] # mark as package_moved_attributes = [MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),MovedAttribute("intern", "__builtin__", "sys"),MovedAttribute("map", "itertools", "builtins", "imap", "map"),MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),MovedAttribute("reduce", "__builtin__", "functools"),MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),MovedAttribute("StringIO", "StringIO", "io"),MovedAttribute("UserDict", "UserDict", "collections"),MovedAttribute("UserList", "UserList", "collections"),MovedAttribute("UserString", "UserString", "collections"),MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),MovedModule("builtins", "__builtin__"),MovedModule("configparser", "ConfigParser"),MovedModule("copyreg", "copy_reg"),MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),MovedModule("http_cookies", "Cookie", "http.cookies"),MovedModule("html_entities", "htmlentitydefs", "html.entities"),MovedModule("html_parser", "HTMLParser", "html.parser"),MovedModule("http_client", "httplib", "http.client"),MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),MovedModule("cPickle", "cPickle", "pickle"),MovedModule("queue", "Queue"),MovedModule("reprlib", "repr"),MovedModule("socketserver", "SocketServer"),MovedModule("_thread", "thread", "_thread"),MovedModule("tkinter", "Tkinter"),MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),MovedModule("tkinter_tix", "Tix", "tkinter.tix"),MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),MovedModule("tkinter_colorchooser", "tkColorChooser","tkinter.colorchooser"),MovedModule("tkinter_commondialog", "tkCommonDialog","tkinter.commondialog"),MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),MovedModule("tkinter_font", "tkFont", "tkinter.font"),MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),MovedModule("tkinter_tksimpledialog", "tkSimpleDialog","tkinter.simpledialog"),MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),]# Add windows specific modules.if sys.platform == "win32":_moved_attributes += [MovedModule("winreg", "_winreg"),]for attr in _moved_attributes:setattr(_MovedItems, attr.name, attr)if isinstance(attr, MovedModule):_importer._add_module(attr, "moves." + attr.name)del attr_MovedItems._moved_attributes = _moved_attributesmoves = _MovedItems(__name__ + ".moves")_importer._add_module(moves, "moves")class Module_six_moves_urllib_parse(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_parse"""_urllib_parse_moved_attributes = [MovedAttribute("ParseResult", "urlparse", "urllib.parse"),MovedAttribute("SplitResult", "urlparse", "urllib.parse"),MovedAttribute("parse_qs", "urlparse", "urllib.parse"),MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),MovedAttribute("urldefrag", "urlparse", "urllib.parse"),MovedAttribute("urljoin", "urlparse", "urllib.parse"),MovedAttribute("urlparse", "urlparse", "urllib.parse"),MovedAttribute("urlsplit", "urlparse", "urllib.parse"),MovedAttribute("urlunparse", "urlparse", "urllib.parse"),MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),MovedAttribute("quote", "urllib", "urllib.parse"),MovedAttribute("quote_plus", "urllib", "urllib.parse"),MovedAttribute("unquote", "urllib", "urllib.parse"),MovedAttribute("unquote_plus", "urllib", "urllib.parse"),MovedAttribute("urlencode", "urllib", "urllib.parse"),MovedAttribute("splitquery", "urllib", "urllib.parse"),MovedAttribute("splittag", "urllib", "urllib.parse"),MovedAttribute("splituser", "urllib", "urllib.parse"),MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),MovedAttribute("uses_params", "urlparse", "urllib.parse"),MovedAttribute("uses_query", "urlparse", "urllib.parse"),MovedAttribute("uses_relative", "urlparse", "urllib.parse"),]for attr in _urllib_parse_moved_attributes:setattr(Module_six_moves_urllib_parse, attr.name, attr)del attrModule_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),"moves.urllib_parse", "moves.urllib.parse")class Module_six_moves_urllib_error(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_error"""_urllib_error_moved_attributes = [MovedAttribute("URLError", "urllib2", "urllib.error"),MovedAttribute("HTTPError", "urllib2", "urllib.error"),MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),]for attr in _urllib_error_moved_attributes:setattr(Module_six_moves_urllib_error, attr.name, attr)del attrModule_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),"moves.urllib_error", "moves.urllib.error")class Module_six_moves_urllib_request(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_request"""_urllib_request_moved_attributes = [MovedAttribute("urlopen", "urllib2", "urllib.request"),MovedAttribute("install_opener", "urllib2", "urllib.request"),MovedAttribute("build_opener", "urllib2", "urllib.request"),MovedAttribute("pathname2url", "urllib", "urllib.request"),MovedAttribute("url2pathname", "urllib", "urllib.request"),MovedAttribute("getproxies", "urllib", "urllib.request"),MovedAttribute("Request", "urllib2", "urllib.request"),MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),MovedAttribute("BaseHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),MovedAttribute("FileHandler", "urllib2", "urllib.request"),MovedAttribute("FTPHandler", "urllib2", "urllib.request"),MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),MovedAttribute("urlretrieve", "urllib", "urllib.request"),MovedAttribute("urlcleanup", "urllib", "urllib.request"),MovedAttribute("URLopener", "urllib", "urllib.request"),MovedAttribute("FancyURLopener", "urllib", "urllib.request"),MovedAttribute("proxy_bypass", "urllib", "urllib.request"),]for attr in _urllib_request_moved_attributes:setattr(Module_six_moves_urllib_request, attr.name, attr)del attrModule_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),"moves.urllib_request", "moves.urllib.request")class Module_six_moves_urllib_response(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_response"""_urllib_response_moved_attributes = [MovedAttribute("addbase", "urllib", "urllib.response"),MovedAttribute("addclosehook", "urllib", "urllib.response"),MovedAttribute("addinfo", "urllib", "urllib.response"),MovedAttribute("addinfourl", "urllib", "urllib.response"),]for attr in _urllib_response_moved_attributes:setattr(Module_six_moves_urllib_response, attr.name, attr)del attrModule_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),"moves.urllib_response", "moves.urllib.response")class Module_six_moves_urllib_robotparser(_LazyModule):"""Lazy loading of moved objects in six.moves.urllib_robotparser"""_urllib_robotparser_moved_attributes = [MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),]for attr in _urllib_robotparser_moved_attributes:setattr(Module_six_moves_urllib_robotparser, attr.name, attr)del attrModule_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),"moves.urllib_robotparser", "moves.urllib.robotparser")class Module_six_moves_urllib(types.ModuleType):"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""__path__ = [] # mark as packageparse = _importer._get_module("moves.urllib_parse")error = _importer._get_module("moves.urllib_error")request = _importer._get_module("moves.urllib_request")response = _importer._get_module("moves.urllib_response")robotparser = _importer._get_module("moves.urllib_robotparser")def __dir__(self):return ['parse', 'error', 'request', 'response', 'robotparser']_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),"moves.urllib")def add_move(move):"""Add an item to six.moves."""setattr(_MovedItems, move.name, move)def remove_move(name):"""Remove item from six.moves."""try:delattr(_MovedItems, name)except AttributeError:try:del moves.__dict__[name]except KeyError:raise AttributeError("no such move, %r" % (name,))if PY3:_meth_func = "__func__"_meth_self = "__self__"_func_closure = "__closure__"_func_code = "__code__"_func_defaults = "__defaults__"_func_globals = "__globals__"else:_meth_func = "im_func"_meth_self = "im_self"_func_closure = "func_closure"_func_code = "func_code"_func_defaults = "func_defaults"_func_globals = "func_globals"try:advance_iterator = nextexcept NameError:def advance_iterator(it):return it.next()next = advance_iteratortry:callable = callableexcept NameError:def callable(obj):return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)if PY3:def get_unbound_function(unbound):return unboundcreate_bound_method = types.MethodTypedef create_unbound_method(func, cls):return funcIterator = objectelse:def get_unbound_function(unbound):return unbound.im_funcdef create_bound_method(func, obj):return types.MethodType(func, obj, obj.__class__)def create_unbound_method(func, cls):return types.MethodType(func, None, cls)class Iterator(object):def next(self):return type(self).__next__(self)callable = callable_add_doc(get_unbound_function,"""Get the function out of a possibly unbound function""")get_method_function = operator.attrgetter(_meth_func)get_method_self = operator.attrgetter(_meth_self)get_function_closure = operator.attrgetter(_func_closure)get_function_code = operator.attrgetter(_func_code)get_function_defaults = operator.attrgetter(_func_defaults)get_function_globals = operator.attrgetter(_func_globals)if PY3:def iterkeys(d, **kw):return iter(d.keys(**kw))def itervalues(d, **kw):return iter(d.values(**kw))def iteritems(d, **kw):return iter(d.items(**kw))def iterlists(d, **kw):return iter(d.lists(**kw))viewkeys = operator.methodcaller("keys")viewvalues = operator.methodcaller("values")viewitems = operator.methodcaller("items")else:def iterkeys(d, **kw):return d.iterkeys(**kw)def itervalues(d, **kw):return d.itervalues(**kw)def iteritems(d, **kw):return d.iteritems(**kw)def iterlists(d, **kw):return d.iterlists(**kw)viewkeys = operator.methodcaller("viewkeys")viewvalues = operator.methodcaller("viewvalues")viewitems = operator.methodcaller("viewitems")_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")_add_doc(itervalues, "Return an iterator over the values of a dictionary.")_add_doc(iteritems,"Return an iterator over the (key, value) pairs of a dictionary.")_add_doc(iterlists,"Return an iterator over the (key, [values]) pairs of a dictionary.")if PY3:def b(s):return s.encode("latin-1")def u(s):return sunichr = chrimport structint2byte = struct.Struct(">B").packdel structbyte2int = operator.itemgetter(0)indexbytes = operator.getitemiterbytes = iterimport ioStringIO = io.StringIOBytesIO = io.BytesIO_assertCountEqual = "assertCountEqual"if sys.version_info[1] <= 1:_assertRaisesRegex = "assertRaisesRegexp"_assertRegex = "assertRegexpMatches"else:_assertRaisesRegex = "assertRaisesRegex"_assertRegex = "assertRegex"else:def b(s):return s# Workaround for standalone backslashdef u(s):return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")unichr = unichrint2byte = chrdef byte2int(bs):return ord(bs[0])def indexbytes(buf, i):return ord(buf[i])iterbytes = functools.partial(itertools.imap, ord)import StringIOStringIO = BytesIO = StringIO.StringIO_assertCountEqual = "assertItemsEqual"_assertRaisesRegex = "assertRaisesRegexp"_assertRegex = "assertRegexpMatches"_add_doc(b, """Byte literal""")_add_doc(u, """Text literal""")def assertCountEqual(self, *args, **kwargs):return getattr(self, _assertCountEqual)(*args, **kwargs)def assertRaisesRegex(self, *args, **kwargs):return getattr(self, _assertRaisesRegex)(*args, **kwargs)def assertRegex(self, *args, **kwargs):return getattr(self, _assertRegex)(*args, **kwargs)if PY3:exec_ = getattr(moves.builtins, "exec")def reraise(tp, value, tb=None):if value is None:value = tp()if value.__traceback__ is not tb:raise value.with_traceback(tb)raise valueelse:def exec_(_code_, _globs_=None, _locs_=None):"""Execute code in a namespace."""if _globs_ is None:frame = sys._getframe(1)_globs_ = frame.f_globalsif _locs_ is None:_locs_ = frame.f_localsdel frameelif _locs_ is None:_locs_ = _globs_exec("""exec _code_ in _globs_, _locs_""")exec_("""def reraise(tp, value, tb=None):raise tp, value, tb""")if sys.version_info[:2] == (3, 2):exec_("""def raise_from(value, from_value):if from_value is None:raise valueraise value from from_value""")elif sys.version_info[:2] > (3, 2):exec_("""def raise_from(value, from_value):raise value from from_value""")else:def raise_from(value, from_value):raise valueprint_ = getattr(moves.builtins, "print", None)if print_ is None:def print_(*args, **kwargs):"""The new-style print function for Python 2.4 and 2.5."""fp = kwargs.pop("file", sys.stdout)if fp is None:returndef write(data):if not isinstance(data, basestring):data = str(data)# If the file has an encoding, encode unicode with it.if (isinstance(fp, file) andisinstance(data, unicode) andfp.encoding is not None):errors = getattr(fp, "errors", None)if errors is None:errors = "strict"data = data.encode(fp.encoding, errors)fp.write(data)want_unicode = Falsesep = kwargs.pop("sep", None)if sep is not None:if isinstance(sep, unicode):want_unicode = Trueelif not isinstance(sep, str):raise TypeError("sep must be None or a string")end = kwargs.pop("end", None)if end is not None:if isinstance(end, unicode):want_unicode = Trueelif not isinstance(end, str):raise TypeError("end must be None or a string")if kwargs:raise TypeError("invalid keyword arguments to print()")if not want_unicode:for arg in args:if isinstance(arg, unicode):want_unicode = Truebreakif want_unicode:newline = unicode("\n")space = unicode(" ")else:newline = "\n"space = " "if sep is None:sep = spaceif end is None:end = newlinefor i, arg in enumerate(args):if i:write(sep)write(arg)write(end)if sys.version_info[:2] < (3, 3):_print = print_def print_(*args, **kwargs):fp = kwargs.get("file", sys.stdout)flush = kwargs.pop("flush", False)_print(*args, **kwargs)if flush and fp is not None:fp.flush()_add_doc(reraise, """Reraise an exception.""")if sys.version_info[0:2] < (3, 4):def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,updated=functools.WRAPPER_UPDATES):def wrapper(f):f = functools.wraps(wrapped, assigned, updated)(f)f.__wrapped__ = wrappedreturn freturn wrapperelse:wraps = functools.wrapsdef with_metaclass(meta, *bases):"""Create a base class with a metaclass."""# This requires a bit of explanation: the basic idea is to make a dummy# metaclass for one level of class instantiation that replaces itself with# the actual metaclass.class metaclass(meta):def __new__(cls, name, this_bases, d):return meta(name, bases, d)return type.__new__(metaclass, 'temporary_class', (), {})def add_metaclass(metaclass):"""Class decorator for creating a class with a metaclass."""def wrapper(cls):orig_vars = cls.__dict__.copy()slots = orig_vars.get('__slots__')if slots is not None:if isinstance(slots, str):slots = [slots]for slots_var in slots:orig_vars.pop(slots_var)orig_vars.pop('__dict__', None)orig_vars.pop('__weakref__', None)return metaclass(cls.__name__, cls.__bases__, orig_vars)return wrapperdef python_2_unicode_compatible(klass):"""A decorator that defines __unicode__ and __str__ methods under Python 2.Under Python 3 it does nothing.To support Python 2 and 3 with a single code base, define a __str__ methodreturning text and apply this decorator to the class."""if PY2:if '__str__' not in klass.__dict__:raise ValueError("@python_2_unicode_compatible cannot be applied ""to %s because it doesn't define __str__()." %klass.__name__)klass.__unicode__ = klass.__str__klass.__str__ = lambda self: self.__unicode__().encode('utf-8')return klass# Complete the moves implementation.# This code is at the end of this module to speed up module loading.# Turn this module into a package.__path__ = [] # required for PEP 302 and PEP 451__package__ = __name__ # see PEP 366 @ReservedAssignmentif globals().get("__spec__") is not None:__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable# Remove other six meta path importers, since they cause problems. This can# happen if six is removed from sys.modules and then reloaded. (Setuptools does# this for some reason.)if sys.meta_path:for i, importer in enumerate(sys.meta_path):# Here's some real nastiness: Another "instance" of the six module might# be floating around. Therefore, we can't use isinstance() to check for# the six meta path importer, since the other six instance will have# inserted an importer with different class.if (type(importer).__name__ == "_SixMetaPathImporter" andimporter.name == __name__):del sys.meta_path[i]breakdel i, importer# Finally, add the importer to the meta path import hook.sys.meta_path.append(_importer)
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.# Passes Python2.7's test suite and incorporates all the latest updates.# Copyright 2009 Raymond Hettinger, released under the MIT License.# http://code.activestate.com/recipes/576693/try:from thread import get_ident as _get_identexcept ImportError:from dummy_thread import get_ident as _get_identtry:from _abcoll import KeysView, ValuesView, ItemsViewexcept ImportError:passclass OrderedDict(dict):'Dictionary that remembers insertion order'# An inherited dict maps keys to values.# The inherited dict provides __getitem__, __len__, __contains__, and get.# The remaining methods are order-aware.# Big-O running times for all methods are the same as for regular dictionaries.# The internal self.__map dictionary maps keys to links in a doubly linked list.# The circular doubly linked list starts and ends with a sentinel element.# The sentinel element never gets deleted (this simplifies the algorithm).# Each link is stored as a list of length three: [PREV, NEXT, KEY].def __init__(self, *args, **kwds):'''Initialize an ordered dictionary. Signature is the same as forregular dictionaries, but keyword arguments are not recommendedbecause their insertion order is arbitrary.'''if len(args) > 1:raise TypeError('expected at most 1 arguments, got %d' % len(args))try:self.__rootexcept AttributeError:self.__root = root = [] # sentinel noderoot[:] = [root, root, None]self.__map = {}self.__update(*args, **kwds)def __setitem__(self, key, value, dict_setitem=dict.__setitem__):'od.__setitem__(i, y) <==> od[i]=y'# Setting a new item creates a new link which goes at the end of the linked# list, and the inherited dictionary is updated with the new key/value pair.if key not in self:root = self.__rootlast = root[0]last[1] = root[0] = self.__map[key] = [last, root, key]dict_setitem(self, key, value)def __delitem__(self, key, dict_delitem=dict.__delitem__):'od.__delitem__(y) <==> del od[y]'# Deleting an existing item uses self.__map to find the link which is# then removed by updating the links in the predecessor and successor nodes.dict_delitem(self, key)link_prev, link_next, key = self.__map.pop(key)link_prev[1] = link_nextlink_next[0] = link_prevdef __iter__(self):'od.__iter__() <==> iter(od)'root = self.__rootcurr = root[1]while curr is not root:yield curr[2]curr = curr[1]def __reversed__(self):'od.__reversed__() <==> reversed(od)'root = self.__rootcurr = root[0]while curr is not root:yield curr[2]curr = curr[0]def clear(self):'od.clear() -> None. Remove all items from od.'try:for node in self.__map.itervalues():del node[:]root = self.__rootroot[:] = [root, root, None]self.__map.clear()except AttributeError:passdict.clear(self)def popitem(self, last=True):'''od.popitem() -> (k, v), return and remove a (key, value) pair.Pairs are returned in LIFO order if last is true or FIFO order if false.'''if not self:raise KeyError('dictionary is empty')root = self.__rootif last:link = root[0]link_prev = link[0]link_prev[1] = rootroot[0] = link_prevelse:link = root[1]link_next = link[1]root[1] = link_nextlink_next[0] = rootkey = link[2]del self.__map[key]value = dict.pop(self, key)return key, value# -- the following methods do not depend on the internal structure --def keys(self):'od.keys() -> list of keys in od'return list(self)def values(self):'od.values() -> list of values in od'return [self[key] for key in self]def items(self):'od.items() -> list of (key, value) pairs in od'return [(key, self[key]) for key in self]def iterkeys(self):'od.iterkeys() -> an iterator over the keys in od'return iter(self)def itervalues(self):'od.itervalues -> an iterator over the values in od'for k in self:yield self[k]def iteritems(self):'od.iteritems -> an iterator over the (key, value) items in od'for k in self:yield (k, self[k])def update(*args, **kwds):'''od.update(E, **F) -> None. Update od from dict/iterable E and F.If E is a dict instance, does: for k in E: od[k] = E[k]If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]Or if E is an iterable of items, does: for k, v in E: od[k] = vIn either case, this is followed by: for k, v in F.items(): od[k] = v'''if len(args) > 2:raise TypeError('update() takes at most 2 positional ''arguments (%d given)' % (len(args),))elif not args:raise TypeError('update() takes at least 1 argument (0 given)')self = args[0]# Make progressively weaker assumptions about "other"other = ()if len(args) == 2:other = args[1]if isinstance(other, dict):for key in other:self[key] = other[key]elif hasattr(other, 'keys'):for key in other.keys():self[key] = other[key]else:for key, value in other:self[key] = valuefor key, value in kwds.items():self[key] = value__update = update # let subclasses override update without breaking __init____marker = object()def pop(self, key, default=__marker):'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.If key is not found, d is returned if given, otherwise KeyError is raised.'''if key in self:result = self[key]del self[key]return resultif default is self.__marker:raise KeyError(key)return defaultdef setdefault(self, key, default=None):'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'if key in self:return self[key]self[key] = defaultreturn defaultdef __repr__(self, _repr_running={}):'od.__repr__() <==> repr(od)'call_key = id(self), _get_ident()if call_key in _repr_running:return '...'_repr_running[call_key] = 1try:if not self:return '%s()' % (self.__class__.__name__,)return '%s(%r)' % (self.__class__.__name__, self.items())finally:del _repr_running[call_key]def __reduce__(self):'Return state information for pickling'items = [[k, self[k]] for k in self]inst_dict = vars(self).copy()for k in vars(OrderedDict()):inst_dict.pop(k, None)if inst_dict:return (self.__class__, (items,), inst_dict)return self.__class__, (items,)def copy(self):'od.copy() -> a shallow copy of od'return self.__class__(self)@classmethoddef fromkeys(cls, iterable, value=None):'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from Sand values equal to v (which defaults to None).'''d = cls()for key in iterable:d[key] = valuereturn ddef __eq__(self, other):'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitivewhile comparison to a regular mapping is order-insensitive.'''if isinstance(other, OrderedDict):return len(self)==len(other) and self.items() == other.items()return dict.__eq__(self, other)def __ne__(self, other):return not self == other# -- the following methods are only used in Python 2.7 --def viewkeys(self):"od.viewkeys() -> a set-like object providing a view on od's keys"return KeysView(self)def viewvalues(self):"od.viewvalues() -> an object providing a view on od's values"return ValuesView(self)def viewitems(self):"od.viewitems() -> a set-like object providing a view on od's items"return ItemsView(self)
from __future__ import absolute_importfrom . import ssl_match_hostname__all__ = ('ssl_match_hostname', )
from __future__ import absolute_importimport codecsfrom uuid import uuid4from io import BytesIOfrom .packages import sixfrom .packages.six import bfrom .fields import RequestFieldwriter = codecs.lookup('utf-8')[3]def choose_boundary():"""Our embarassingly-simple replacement for mimetools.choose_boundary."""return uuid4().hexdef iter_field_objects(fields):"""Iterate over fields.Supports list of (k, v) tuples and dicts, and lists of:class:`~urllib3.fields.RequestField`."""if isinstance(fields, dict):i = six.iteritems(fields)else:i = iter(fields)for field in i:if isinstance(field, RequestField):yield fieldelse:yield RequestField.from_tuples(*field)def iter_fields(fields):""".. deprecated:: 1.6Iterate over fields.The addition of :class:`~urllib3.fields.RequestField` makes this functionobsolete. Instead, use :func:`iter_field_objects`, which returns:class:`~urllib3.fields.RequestField` objects.Supports list of (k, v) tuples and dicts."""if isinstance(fields, dict):return ((k, v) for k, v in six.iteritems(fields))return ((k, v) for k, v in fields)def encode_multipart_formdata(fields, boundary=None):"""Encode a dictionary of ``fields`` using the multipart/form-data MIME format.:param fields:Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).:param boundary:If not specified, then a random boundary will be generated using:func:`mimetools.choose_boundary`."""body = BytesIO()if boundary is None:boundary = choose_boundary()for field in iter_field_objects(fields):body.write(b('--%s\r\n' % (boundary)))writer(body).write(field.render_headers())data = field.dataif isinstance(data, int):data = str(data) # Backwards compatibilityif isinstance(data, six.text_type):writer(body).write(data)else:body.write(data)body.write(b'\r\n')body.write(b('--%s--\r\n' % (boundary)))content_type = str('multipart/form-data; boundary=%s' % boundary)return body.getvalue(), content_type
from __future__ import absolute_importimport email.utilsimport mimetypesfrom .packages import sixdef guess_content_type(filename, default='application/octet-stream'):"""Guess the "Content-Type" of a file.:param filename:The filename to guess the "Content-Type" of using :mod:`mimetypes`.:param default:If no "Content-Type" can be guessed, default to `default`."""if filename:return mimetypes.guess_type(filename)[0] or defaultreturn defaultdef format_header_param(name, value):"""Helper function to format and quote a single header parameter.Particularly useful for header parameters which might containnon-ASCII values, like file names. This follows RFC 2231, assuggested by RFC 2388 Section 4.4.:param name:The name of the parameter, a string expected to be ASCII only.:param value:The value of the parameter, provided as a unicode string."""if not any(ch in value for ch in '"\\\r\n'):result = '%s="%s"' % (name, value)try:result.encode('ascii')except (UnicodeEncodeError, UnicodeDecodeError):passelse:return resultif not six.PY3 and isinstance(value, six.text_type): # Python 2:value = value.encode('utf-8')value = email.utils.encode_rfc2231(value, 'utf-8')value = '%s*=%s' % (name, value)return valueclass RequestField(object):"""A data container for request body parameters.:param name:The name of this request field.:param data:The data/value body.:param filename:An optional filename of the request field.:param headers:An optional dict-like object of headers to initially use for the field."""def __init__(self, name, data, filename=None, headers=None):self._name = nameself._filename = filenameself.data = dataself.headers = {}if headers:self.headers = dict(headers)@classmethoddef from_tuples(cls, fieldname, value):"""A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.Supports constructing :class:`~urllib3.fields.RequestField` fromparameter of key/value strings AND key/filetuple. A filetuple is a(filename, data, MIME type) tuple where the MIME type is optional.For example::'foo': 'bar','fakefile': ('foofile.txt', 'contents of foofile'),'realfile': ('barfile.txt', open('realfile').read()),'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),'nonamefile': 'contents of nonamefile field',Field names and filenames must be unicode."""if isinstance(value, tuple):if len(value) == 3:filename, data, content_type = valueelse:filename, data = valuecontent_type = guess_content_type(filename)else:filename = Nonecontent_type = Nonedata = valuerequest_param = cls(fieldname, data, filename=filename)request_param.make_multipart(content_type=content_type)return request_paramdef _render_part(self, name, value):"""Overridable helper function to format a single header parameter.:param name:The name of the parameter, a string expected to be ASCII only.:param value:The value of the parameter, provided as a unicode string."""return format_header_param(name, value)def _render_parts(self, header_parts):"""Helper function to format and quote a single header.Useful for single headers that are composed of multiple items. E.g.,'Content-Disposition' fields.:param header_parts:A sequence of (k, v) typles or a :class:`dict` of (k, v) to formatas `k1="v1"; k2="v2"; ...`."""parts = []iterable = header_partsif isinstance(header_parts, dict):iterable = header_parts.items()for name, value in iterable:if value:parts.append(self._render_part(name, value))return '; '.join(parts)def render_headers(self):"""Renders the headers for this request field."""lines = []sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']for sort_key in sort_keys:if self.headers.get(sort_key, False):lines.append('%s: %s' % (sort_key, self.headers[sort_key]))for header_name, header_value in self.headers.items():if header_name not in sort_keys:if header_value:lines.append('%s: %s' % (header_name, header_value))lines.append('\r\n')return '\r\n'.join(lines)def make_multipart(self, content_disposition=None, content_type=None,content_location=None):"""Makes this request field into a multipart request field.This method overrides "Content-Disposition", "Content-Type" and"Content-Location" headers to the request parameter.:param content_type:The 'Content-Type' of the request body.:param content_location:The 'Content-Location' of the request body."""self.headers['Content-Disposition'] = content_disposition or 'form-data'self.headers['Content-Disposition'] += '; '.join(['', self._render_parts((('name', self._name), ('filename', self._filename)))])self.headers['Content-Type'] = content_typeself.headers['Content-Location'] = content_location
from __future__ import absolute_import# Base Exceptionsclass HTTPError(Exception):"Base exception used by this module."passclass HTTPWarning(Warning):"Base warning used by this module."passclass PoolError(HTTPError):"Base exception for errors caused within a pool."def __init__(self, pool, message):self.pool = poolHTTPError.__init__(self, "%s: %s" % (pool, message))def __reduce__(self):# For pickling purposes.return self.__class__, (None, None)class RequestError(PoolError):"Base exception for PoolErrors that have associated URLs."def __init__(self, pool, url, message):self.url = urlPoolError.__init__(self, pool, message)def __reduce__(self):# For pickling purposes.return self.__class__, (None, self.url, None)class SSLError(HTTPError):"Raised when SSL certificate fails in an HTTPS connection."passclass ProxyError(HTTPError):"Raised when the connection to a proxy fails."passclass DecodeError(HTTPError):"Raised when automatic decoding based on Content-Type fails."passclass ProtocolError(HTTPError):"Raised when something unexpected happens mid-request/response."pass#: Renamed to ProtocolError but aliased for backwards compatibility.ConnectionError = ProtocolError# Leaf Exceptionsclass MaxRetryError(RequestError):"""Raised when the maximum number of retries is exceeded.:param pool: The connection pool:type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`:param string url: The requested Url:param exceptions.Exception reason: The underlying error"""def __init__(self, pool, url, reason=None):self.reason = reasonmessage = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason)RequestError.__init__(self, pool, url, message)class HostChangedError(RequestError):"Raised when an existing pool gets a request for a foreign host."def __init__(self, pool, url, retries=3):message = "Tried to open a foreign host with url: %s" % urlRequestError.__init__(self, pool, url, message)self.retries = retriesclass TimeoutStateError(HTTPError):""" Raised when passing an invalid state to a timeout """passclass TimeoutError(HTTPError):""" Raised when a socket timeout error occurs.Catching this error will catch both :exc:`ReadTimeoutErrors<ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`."""passclass ReadTimeoutError(TimeoutError, RequestError):"Raised when a socket timeout occurs while receiving data from a server"pass# This timeout error does not have a URL attached and needs to inherit from the# base HTTPErrorclass ConnectTimeoutError(TimeoutError):"Raised when a socket timeout occurs while connecting to a server"passclass NewConnectionError(ConnectTimeoutError, PoolError):"Raised when we fail to establish a new connection. Usually ECONNREFUSED."passclass EmptyPoolError(PoolError):"Raised when a pool runs out of connections and no more are allowed."passclass ClosedPoolError(PoolError):"Raised when a request enters a pool after the pool has been closed."passclass LocationValueError(ValueError, HTTPError):"Raised when there is something wrong with a given URL input."passclass LocationParseError(LocationValueError):"Raised when get_host or similar fails to parse the URL input."def __init__(self, location):message = "Failed to parse: %s" % locationHTTPError.__init__(self, message)self.location = locationclass ResponseError(HTTPError):"Used as a container for an error reason supplied in a MaxRetryError."GENERIC_ERROR = 'too many error responses'SPECIFIC_ERROR = 'too many {status_code} error responses'class SecurityWarning(HTTPWarning):"Warned when perfoming security reducing actions"passclass SubjectAltNameWarning(SecurityWarning):"Warned when connecting to a host with a certificate missing a SAN."passclass InsecureRequestWarning(SecurityWarning):"Warned when making an unverified HTTPS request."passclass SystemTimeWarning(SecurityWarning):"Warned when system time is suspected to be wrong"passclass InsecurePlatformWarning(SecurityWarning):"Warned when certain SSL configuration is not available on a platform."passclass SNIMissingWarning(HTTPWarning):"Warned when making a HTTPS request without SNI available."passclass DependencyWarning(HTTPWarning):"""Warned when an attempt is made to import a module with missing optionaldependencies."""passclass ResponseNotChunked(ProtocolError, ValueError):"Response needs to be chunked in order to read it as chunks."passclass ProxySchemeUnknown(AssertionError, ValueError):"ProxyManager does not support the supplied scheme"# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.def __init__(self, scheme):message = "Not supported proxy scheme %s" % schemesuper(ProxySchemeUnknown, self).__init__(message)class HeaderParsingError(HTTPError):"Raised by assert_header_parsing, but we convert it to a log.warning statement."def __init__(self, defects, unparsed_data):message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data)super(HeaderParsingError, self).__init__(message)
# -*- coding: utf-8 -*-"""SOCKS support for urllib3~~~~~~~~~~~~~~~~~~~~~~~~~This contrib module contains provisional support for SOCKS proxies from withinurllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) andSOCKS5. To enable its functionality, either install PySocks or install thismodule with the ``socks`` extra.Known Limitations:- Currently PySocks does not support contacting remote websites via literalIPv6 addresses. Any such connection attempt will fail.- Currently PySocks does not support IPv6 connections to the SOCKS proxy. Anysuch connection attempt will fail."""from __future__ import absolute_importtry:import socksexcept ImportError:import warningsfrom ..exceptions import DependencyWarningwarnings.warn(('SOCKS support in urllib3 requires the installation of optional ''dependencies: specifically, PySocks. For more information, see ''https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies'),DependencyWarning)raisefrom socket import error as SocketError, timeout as SocketTimeoutfrom ..connection import (HTTPConnection, HTTPSConnection)from ..connectionpool import (HTTPConnectionPool, HTTPSConnectionPool)from ..exceptions import ConnectTimeoutError, NewConnectionErrorfrom ..poolmanager import PoolManagerfrom ..util.url import parse_urltry:import sslexcept ImportError:ssl = Noneclass SOCKSConnection(HTTPConnection):"""A plain-text HTTP connection that connects via a SOCKS proxy."""def __init__(self, *args, **kwargs):self._socks_options = kwargs.pop('_socks_options')super(SOCKSConnection, self).__init__(*args, **kwargs)def _new_conn(self):"""Establish a new connection via the SOCKS proxy."""extra_kw = {}if self.source_address:extra_kw['source_address'] = self.source_addressif self.socket_options:extra_kw['socket_options'] = self.socket_optionstry:conn = socks.create_connection((self.host, self.port),proxy_type=self._socks_options['socks_version'],proxy_addr=self._socks_options['proxy_host'],proxy_port=self._socks_options['proxy_port'],proxy_username=self._socks_options['username'],proxy_password=self._socks_options['password'],timeout=self.timeout,**extra_kw)except SocketTimeout as e:raise ConnectTimeoutError(self, "Connection to %s timed out. (connect timeout=%s)" %(self.host, self.timeout))except socks.ProxyError as e:# This is fragile as hell, but it seems to be the only way to raise# useful errors here.if e.socket_err:error = e.socket_errif isinstance(error, SocketTimeout):raise ConnectTimeoutError(self,"Connection to %s timed out. (connect timeout=%s)" %(self.host, self.timeout))else:raise NewConnectionError(self,"Failed to establish a new connection: %s" % error)else:raise NewConnectionError(self,"Failed to establish a new connection: %s" % e)except SocketError as e: # Defensive: PySocks should catch all these.raise NewConnectionError(self, "Failed to establish a new connection: %s" % e)return conn# We don't need to duplicate the Verified/Unverified distinction from# urllib3/connection.py here because the HTTPSConnection will already have been# correctly set to either the Verified or Unverified form by that module. This# means the SOCKSHTTPSConnection will automatically be the correct type.class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):passclass SOCKSHTTPConnectionPool(HTTPConnectionPool):ConnectionCls = SOCKSConnectionclass SOCKSHTTPSConnectionPool(HTTPSConnectionPool):ConnectionCls = SOCKSHTTPSConnectionclass SOCKSProxyManager(PoolManager):"""A version of the urllib3 ProxyManager that routes connections via thedefined SOCKS proxy."""pool_classes_by_scheme = {'http': SOCKSHTTPConnectionPool,'https': SOCKSHTTPSConnectionPool,}def __init__(self, proxy_url, username=None, password=None,num_pools=10, headers=None, **connection_pool_kw):parsed = parse_url(proxy_url)if parsed.scheme == 'socks5':socks_version = socks.PROXY_TYPE_SOCKS5elif parsed.scheme == 'socks4':socks_version = socks.PROXY_TYPE_SOCKS4else:raise ValueError("Unable to determine SOCKS version from %s" % proxy_url)self.proxy_url = proxy_urlsocks_options = {'socks_version': socks_version,'proxy_host': parsed.host,'proxy_port': parsed.port,'username': username,'password': password,}connection_pool_kw['_socks_options'] = socks_optionssuper(SOCKSProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
'''SSL with SNI_-support for Python 2. Follow these instructions if you wouldlike to verify SSL certificates in Python 2. Note, the default libraries do*not* do certificate checking; you need to do additional work to validatecertificates yourself.This needs the following packages installed:* pyOpenSSL (tested with 0.13)* ndg-httpsclient (tested with 0.3.2)* pyasn1 (tested with 0.1.6)You can install them with the following command:pip install pyopenssl ndg-httpsclient pyasn1To activate certificate checking, call:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python codebefore you begin making HTTP requests. This can be done in a ``sitecustomize``module, or at any other time before your application begins using ``urllib3``,like this::try:import urllib3.contrib.pyopensslurllib3.contrib.pyopenssl.inject_into_urllib3()except ImportError:passNow you can use :mod:`urllib3` as you normally would, and it will support SNIwhen the required modules are installed.Activating this module also has the positive side effect of disabling SSL/TLScompression in Python 2 (see `CRIME attack`_).If you want to configure the default list of supported cipher suites, you canset the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.Module Variables----------------:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites... _sni: https://en.wikipedia.org/wiki/Server_Name_Indication.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)'''from __future__ import absolute_importtry:from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORTfrom ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltNameexcept SyntaxError as e:raise ImportError(e)import OpenSSL.SSLfrom pyasn1.codec.der import decoder as der_decoderfrom pyasn1.type import univ, constraintfrom socket import timeout, error as SocketErrortry: # Platform-specific: Python 2from socket import _fileobjectexcept ImportError: # Platform-specific: Python 3_fileobject = Nonefrom urllib3.packages.backports.makefile import backport_makefileimport sslimport selectimport sixfrom .. import connectionfrom .. import util__all__ = ['inject_into_urllib3', 'extract_from_urllib3']# SNI only *really* works if we can read the subjectAltName of certificates.HAS_SNI = SUBJ_ALT_NAME_SUPPORT# Map from urllib3 to PyOpenSSL compatible parameter-values._openssl_versions = {ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,}if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'):_openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHODif hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'):_openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHODtry:_openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})except AttributeError:pass_openssl_verify = {ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,ssl.CERT_REQUIRED:OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,}DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS.encode('ascii')# OpenSSL will only write 16K at a timeSSL_WRITE_BLOCKSIZE = 16384orig_util_HAS_SNI = util.HAS_SNIorig_connection_ssl_wrap_socket = connection.ssl_wrap_socketdef inject_into_urllib3():'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.'connection.ssl_wrap_socket = ssl_wrap_socketutil.HAS_SNI = HAS_SNIutil.IS_PYOPENSSL = Truedef extract_from_urllib3():'Undo monkey-patching by :func:`inject_into_urllib3`.'connection.ssl_wrap_socket = orig_connection_ssl_wrap_socketutil.HAS_SNI = orig_util_HAS_SNIutil.IS_PYOPENSSL = False# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.class SubjectAltName(BaseSubjectAltName):'''ASN.1 implementation for subjectAltNames support'''# There is no limit to how many SAN certificates a certificate may have,# however this needs to have some limit so we'll set an arbitrarily high# limit.sizeSpec = univ.SequenceOf.sizeSpec + \constraint.ValueSizeConstraint(1, 1024)# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.def get_subj_alt_name(peer_cert):# Search through extensionsdns_name = []if not SUBJ_ALT_NAME_SUPPORT:return dns_namegeneral_names = SubjectAltName()for i in range(peer_cert.get_extension_count()):ext = peer_cert.get_extension(i)ext_name = ext.get_short_name()if ext_name != b'subjectAltName':continue# PyOpenSSL returns extension data in ASN.1 encoded formext_dat = ext.get_data()decoded_dat = der_decoder.decode(ext_dat,asn1Spec=general_names)for name in decoded_dat:if not isinstance(name, SubjectAltName):continuefor entry in range(len(name)):component = name.getComponentByPosition(entry)if component.getName() != 'dNSName':continuedns_name.append(str(component.getComponent()))return dns_nameclass WrappedSocket(object):'''API-compatibility wrapper for Python OpenSSL's Connection-class.Note: _makefile_refs, _drop() and _reuse() are needed for the garbagecollector of pypy.'''def __init__(self, connection, socket, suppress_ragged_eofs=True):self.connection = connectionself.socket = socketself.suppress_ragged_eofs = suppress_ragged_eofsself._makefile_refs = 0self._closed = Falsedef fileno(self):return self.socket.fileno()# Copy-pasted from Python 3.5 source codedef _decref_socketios(self):if self._makefile_refs > 0:self._makefile_refs -= 1if self._closed:self.close()def recv(self, *args, **kwargs):try:data = self.connection.recv(*args, **kwargs)except OpenSSL.SSL.SysCallError as e:if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):return b''else:raise SocketError(str(e))except OpenSSL.SSL.ZeroReturnError as e:if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:return b''else:raiseexcept OpenSSL.SSL.WantReadError:rd, wd, ed = select.select([self.socket], [], [], self.socket.gettimeout())if not rd:raise timeout('The read operation timed out')else:return self.recv(*args, **kwargs)else:return datadef recv_into(self, *args, **kwargs):try:return self.connection.recv_into(*args, **kwargs)except OpenSSL.SSL.SysCallError as e:if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):return 0else:raise SocketError(str(e))except OpenSSL.SSL.ZeroReturnError as e:if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:return 0else:raiseexcept OpenSSL.SSL.WantReadError:rd, wd, ed = select.select([self.socket], [], [], self.socket.gettimeout())if not rd:raise timeout('The read operation timed out')else:return self.recv_into(*args, **kwargs)def settimeout(self, timeout):return self.socket.settimeout(timeout)def _send_until_done(self, data):while True:try:return self.connection.send(data)except OpenSSL.SSL.WantWriteError:_, wlist, _ = select.select([], [self.socket], [],self.socket.gettimeout())if not wlist:raise timeout()continuedef sendall(self, data):total_sent = 0while total_sent < len(data):sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])total_sent += sentdef shutdown(self):# FIXME rethrow compatible exceptions should we ever use thisself.connection.shutdown()def close(self):if self._makefile_refs < 1:try:self._closed = Truereturn self.connection.close()except OpenSSL.SSL.Error:returnelse:self._makefile_refs -= 1def getpeercert(self, binary_form=False):x509 = self.connection.get_peer_certificate()if not x509:return x509if binary_form:return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1,x509)return {'subject': ((('commonName', x509.get_subject().CN),),),'subjectAltName': [('DNS', value)for value in get_subj_alt_name(x509)]}def _reuse(self):self._makefile_refs += 1def _drop(self):if self._makefile_refs < 1:self.close()else:self._makefile_refs -= 1if _fileobject: # Platform-specific: Python 2def makefile(self, mode, bufsize=-1):self._makefile_refs += 1return _fileobject(self, mode, bufsize, close=True)else: # Platform-specific: Python 3makefile = backport_makefileWrappedSocket.makefile = makefiledef _verify_callback(cnx, x509, err_no, err_depth, return_code):return err_no == 0def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,ca_certs=None, server_hostname=None,ssl_version=None, ca_cert_dir=None):ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])if certfile:keyfile = keyfile or certfile # Match behaviour of the normal python ssl libraryctx.use_certificate_file(certfile)if keyfile:ctx.use_privatekey_file(keyfile)if cert_reqs != ssl.CERT_NONE:ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback)if ca_certs or ca_cert_dir:try:ctx.load_verify_locations(ca_certs, ca_cert_dir)except OpenSSL.SSL.Error as e:raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)else:ctx.set_default_verify_paths()# Disable TLS compression to mitigate CRIME attack (issue #309)OP_NO_COMPRESSION = 0x20000ctx.set_options(OP_NO_COMPRESSION)# Set list of supported ciphersuites.ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST)cnx = OpenSSL.SSL.Connection(ctx, sock)if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3server_hostname = server_hostname.encode('utf-8')cnx.set_tlsext_host_name(server_hostname)cnx.set_connect_state()while True:try:cnx.do_handshake()except OpenSSL.SSL.WantReadError:rd, _, _ = select.select([sock], [], [], sock.gettimeout())if not rd:raise timeout('select timed out')continueexcept OpenSSL.SSL.Error as e:raise ssl.SSLError('bad handshake: %r' % e)breakreturn WrappedSocket(cnx, sock)
"""NTLM authenticating pool, contributed by erikcederstranIssue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10"""from __future__ import absolute_importtry:from http.client import HTTPSConnectionexcept ImportError:from httplib import HTTPSConnectionfrom logging import getLoggerfrom ntlm import ntlmfrom urllib3 import HTTPSConnectionPoollog = getLogger(__name__)class NTLMConnectionPool(HTTPSConnectionPool):"""Implements an NTLM authentication version of an urllib3 connection pool"""scheme = 'https'def __init__(self, user, pw, authurl, *args, **kwargs):"""authurl is a random URL on the server that is protected by NTLM.user is the Windows user, probably in the DOMAIN\\username format.pw is the password for the user."""super(NTLMConnectionPool, self).__init__(*args, **kwargs)self.authurl = authurlself.rawuser = useruser_parts = user.split('\\', 1)self.domain = user_parts[0].upper()self.user = user_parts[1]self.pw = pwdef _new_conn(self):# Performs the NTLM handshake that secures the connection. The socket# must be kept open while requests are performed.self.num_connections += 1log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s',self.num_connections, self.host, self.authurl)headers = {}headers['Connection'] = 'Keep-Alive'req_header = 'Authorization'resp_header = 'www-authenticate'conn = HTTPSConnection(host=self.host, port=self.port)# Send negotiation messageheaders[req_header] = ('NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))log.debug('Request headers: %s', headers)conn.request('GET', self.authurl, None, headers)res = conn.getresponse()reshdr = dict(res.getheaders())log.debug('Response status: %s %s', res.status, res.reason)log.debug('Response headers: %s', reshdr)log.debug('Response data: %s [...]', res.read(100))# Remove the reference to the socket, so that it can not be closed by# the response object (we want to keep the socket open)res.fp = None# Server should respond with a challenge messageauth_header_values = reshdr[resp_header].split(', ')auth_header_value = Nonefor s in auth_header_values:if s[:5] == 'NTLM ':auth_header_value = s[5:]if auth_header_value is None:raise Exception('Unexpected %s response header: %s' %(resp_header, reshdr[resp_header]))# Send authentication messageServerChallenge, NegotiateFlags = \ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,self.user,self.domain,self.pw,NegotiateFlags)headers[req_header] = 'NTLM %s' % auth_msglog.debug('Request headers: %s', headers)conn.request('GET', self.authurl, None, headers)res = conn.getresponse()log.debug('Response status: %s %s', res.status, res.reason)log.debug('Response headers: %s', dict(res.getheaders()))log.debug('Response data: %s [...]', res.read()[:100])if res.status != 200:if res.status == 401:raise Exception('Server rejected request: wrong ''username or password')raise Exception('Wrong server response: %s %s' %(res.status, res.reason))res.fp = Nonelog.debug('Connection established')return conndef urlopen(self, method, url, body=None, headers=None, retries=3,redirect=True, assert_same_host=True):if headers is None:headers = {}headers['Connection'] = 'Keep-Alive'return super(NTLMConnectionPool, self).urlopen(method, url, body,headers, retries,redirect,assert_same_host)
from __future__ import absolute_importimport loggingimport osimport warningsfrom ..exceptions import (HTTPError,HTTPWarning,MaxRetryError,ProtocolError,TimeoutError,SSLError)from ..packages.six import BytesIOfrom ..request import RequestMethodsfrom ..response import HTTPResponsefrom ..util.timeout import Timeoutfrom ..util.retry import Retrytry:from google.appengine.api import urlfetchexcept ImportError:urlfetch = Nonelog = logging.getLogger(__name__)class AppEnginePlatformWarning(HTTPWarning):passclass AppEnginePlatformError(HTTPError):passclass AppEngineManager(RequestMethods):"""Connection manager for Google App Engine sandbox applications.This manager uses the URLFetch service directly instead of using theemulated httplib, and is subject to URLFetch limitations as described inthe App Engine documentation here:https://cloud.google.com/appengine/docs/python/urlfetchNotably it will raise an AppEnginePlatformError if:* URLFetch is not available.* If you attempt to use this on GAEv2 (Managed VMs), as full socketsupport is available.* If a request size is more than 10 megabytes.* If a response size is more than 32 megabtyes.* If you use an unsupported request method such as OPTIONS.Beyond those cases, it will raise normal urllib3 errors."""def __init__(self, headers=None, retries=None, validate_certificate=True):if not urlfetch:raise AppEnginePlatformError("URLFetch is not available in this environment.")if is_prod_appengine_mvms():raise AppEnginePlatformError("Use normal urllib3.PoolManager instead of AppEngineManager""on Managed VMs, as using URLFetch is not necessary in ""this environment.")warnings.warn("urllib3 is using URLFetch on Google App Engine sandbox instead ""of sockets. To use sockets directly instead of URLFetch see ""https://urllib3.readthedocs.io/en/latest/contrib.html.",AppEnginePlatformWarning)RequestMethods.__init__(self, headers)self.validate_certificate = validate_certificateself.retries = retries or Retry.DEFAULTdef __enter__(self):return selfdef __exit__(self, exc_type, exc_val, exc_tb):# Return False to re-raise any potential exceptionsreturn Falsedef urlopen(self, method, url, body=None, headers=None,retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT,**response_kw):retries = self._get_retries(retries, redirect)try:response = urlfetch.fetch(url,payload=body,method=method,headers=headers or {},allow_truncated=False,follow_redirects=(redirect andretries.redirect != 0 andretries.total),deadline=self._get_absolute_timeout(timeout),validate_certificate=self.validate_certificate,)except urlfetch.DeadlineExceededError as e:raise TimeoutError(self, e)except urlfetch.InvalidURLError as e:if 'too large' in str(e):raise AppEnginePlatformError("URLFetch request too large, URLFetch only ""supports requests up to 10mb in size.", e)raise ProtocolError(e)except urlfetch.DownloadError as e:if 'Too many redirects' in str(e):raise MaxRetryError(self, url, reason=e)raise ProtocolError(e)except urlfetch.ResponseTooLargeError as e:raise AppEnginePlatformError("URLFetch response too large, URLFetch only supports""responses up to 32mb in size.", e)except urlfetch.SSLCertificateError as e:raise SSLError(e)except urlfetch.InvalidMethodError as e:raise AppEnginePlatformError("URLFetch does not support method: %s" % method, e)http_response = self._urlfetch_response_to_http_response(response, **response_kw)# Check for redirect responseif (http_response.get_redirect_location() andretries.raise_on_redirect and redirect):raise MaxRetryError(self, url, "too many redirects")# Check if we should retry the HTTP response.if retries.is_forced_retry(method, status_code=http_response.status):retries = retries.increment(method, url, response=http_response, _pool=self)log.info("Forced retry: %s", url)retries.sleep()return self.urlopen(method, url,body=body, headers=headers,retries=retries, redirect=redirect,timeout=timeout, **response_kw)return http_responsedef _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):if is_prod_appengine():# Production GAE handles deflate encoding automatically, but does# not remove the encoding header.content_encoding = urlfetch_resp.headers.get('content-encoding')if content_encoding == 'deflate':del urlfetch_resp.headers['content-encoding']transfer_encoding = urlfetch_resp.headers.get('transfer-encoding')# We have a full response's content,# so let's make sure we don't report ourselves as chunked data.if transfer_encoding == 'chunked':encodings = transfer_encoding.split(",")encodings.remove('chunked')urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings)return HTTPResponse(# In order for decoding to work, we must present the content as# a file-like object.body=BytesIO(urlfetch_resp.content),headers=urlfetch_resp.headers,status=urlfetch_resp.status_code,**response_kw)def _get_absolute_timeout(self, timeout):if timeout is Timeout.DEFAULT_TIMEOUT:return 5 # 5s is the default timeout for URLFetch.if isinstance(timeout, Timeout):if timeout._read is not timeout._connect:warnings.warn("URLFetch does not support granular timeout settings, ""reverting to total timeout.", AppEnginePlatformWarning)return timeout.totalreturn timeoutdef _get_retries(self, retries, redirect):if not isinstance(retries, Retry):retries = Retry.from_int(retries, redirect=redirect, default=self.retries)if retries.connect or retries.read or retries.redirect:warnings.warn("URLFetch only supports total retries and does not ""recognize connect, read, or redirect retry parameters.",AppEnginePlatformWarning)return retriesdef is_appengine():return (is_local_appengine() oris_prod_appengine() oris_prod_appengine_mvms())def is_appengine_sandbox():return is_appengine() and not is_prod_appengine_mvms()def is_local_appengine():return ('APPENGINE_RUNTIME' in os.environ and'Development/' in os.environ['SERVER_SOFTWARE'])def is_prod_appengine():return ('APPENGINE_RUNTIME' in os.environ and'Google App Engine/' in os.environ['SERVER_SOFTWARE'] andnot is_prod_appengine_mvms())def is_prod_appengine_mvms():return os.environ.get('GAE_VM', False) == 'true'
from __future__ import absolute_importimport errnoimport loggingimport sysimport warningsfrom socket import error as SocketError, timeout as SocketTimeoutimport sockettry: # Python 3from queue import LifoQueue, Empty, Fullexcept ImportError:from Queue import LifoQueue, Empty, Full# Queue is imported for side effects on MS Windowsimport Queue as _unused_module_Queue # noqa: unusedfrom .exceptions import (ClosedPoolError,ProtocolError,EmptyPoolError,HeaderParsingError,HostChangedError,LocationValueError,MaxRetryError,ProxyError,ReadTimeoutError,SSLError,TimeoutError,InsecureRequestWarning,NewConnectionError,)from .packages.ssl_match_hostname import CertificateErrorfrom .packages import sixfrom .connection import (port_by_scheme,DummyConnection,HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,HTTPException, BaseSSLError,)from .request import RequestMethodsfrom .response import HTTPResponsefrom .util.connection import is_connection_droppedfrom .util.response import assert_header_parsingfrom .util.retry import Retryfrom .util.timeout import Timeoutfrom .util.url import get_host, Urlxrange = six.moves.xrangelog = logging.getLogger(__name__)_Default = object()# Pool objectsclass ConnectionPool(object):"""Base class for all connection pools, such as:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`."""scheme = NoneQueueCls = LifoQueuedef __init__(self, host, port=None):if not host:raise LocationValueError("No host specified.")# httplib doesn't like it when we include brackets in ipv6 addresses# Specifically, if we include brackets but also pass the port then# httplib crazily doubles up the square brackets on the Host header.# Instead, we need to make sure we never pass ``None`` as the port.# However, for backward compatibility reasons we can't actually# *assert* that.self.host = host.strip('[]')self.port = portdef __str__(self):return '%s(host=%r, port=%r)' % (type(self).__name__,self.host, self.port)def __enter__(self):return selfdef __exit__(self, exc_type, exc_val, exc_tb):self.close()# Return False to re-raise any potential exceptionsreturn Falsedef close(self):"""Close all pooled connections and disable the pool."""pass# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])class HTTPConnectionPool(ConnectionPool, RequestMethods):"""Thread-safe connection pool for one host.:param host:Host used for this HTTP Connection (e.g. "localhost"), passed into:class:`httplib.HTTPConnection`.:param port:Port used for this HTTP Connection (None is equivalent to 80), passedinto :class:`httplib.HTTPConnection`.:param strict:Causes BadStatusLine to be raised if the status line can't be parsedas a valid HTTP/1.0 or 1.1 status line, passed into:class:`httplib.HTTPConnection`... note::Only works in Python 2. This parameter is ignored in Python 3.:param timeout:Socket timeout in seconds for each individual connection. This canbe a float or integer, which sets the timeout for the HTTP request,or an instance of :class:`urllib3.util.Timeout` which gives you morefine-grained control over request timeouts. After the constructor hasbeen parsed, this is always a `urllib3.util.Timeout` object.:param maxsize:Number of connections to save that can be reused. More than 1 is usefulin multithreaded situations. If ``block`` is set to False, moreconnections will be created but they will not be saved once they'vebeen used.:param block:If set to True, no more than ``maxsize`` connections will be used ata time. When no free connections are available, the call will blockuntil a connection has been released. This is a useful side effect forparticular multithreaded situations where one does not want to use morethan maxsize connections per host to prevent flooding.:param headers:Headers to include with all requests, unless other headers are givenexplicitly.:param retries:Retry configuration to use by default with requests in this pool.:param _proxy:Parsed proxy URL, should not be used directly, instead, see:class:`urllib3.connectionpool.ProxyManager`":param _proxy_headers:A dictionary with proxy headers, should not be used directly,instead, see :class:`urllib3.connectionpool.ProxyManager`":param \**conn_kw:Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,:class:`urllib3.connection.HTTPSConnection` instances."""scheme = 'http'ConnectionCls = HTTPConnectionResponseCls = HTTPResponsedef __init__(self, host, port=None, strict=False,timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,headers=None, retries=None,_proxy=None, _proxy_headers=None,**conn_kw):ConnectionPool.__init__(self, host, port)RequestMethods.__init__(self, headers)self.strict = strictif not isinstance(timeout, Timeout):timeout = Timeout.from_float(timeout)if retries is None:retries = Retry.DEFAULTself.timeout = timeoutself.retries = retriesself.pool = self.QueueCls(maxsize)self.block = blockself.proxy = _proxyself.proxy_headers = _proxy_headers or {}# Fill the queue up so that doing get() on it will block properlyfor _ in xrange(maxsize):self.pool.put(None)# These are mostly for testing and debugging purposes.self.num_connections = 0self.num_requests = 0self.conn_kw = conn_kwif self.proxy:# Enable Nagle's algorithm for proxies, to avoid packet fragmentation.# We cannot know if the user has added default socket options, so we cannot replace the# list.self.conn_kw.setdefault('socket_options', [])def _new_conn(self):"""Return a fresh :class:`HTTPConnection`."""self.num_connections += 1log.info("Starting new HTTP connection (%d): %s",self.num_connections, self.host)conn = self.ConnectionCls(host=self.host, port=self.port,timeout=self.timeout.connect_timeout,strict=self.strict, **self.conn_kw)return conndef _get_conn(self, timeout=None):"""Get a connection. Will return a pooled connection if one is available.If no connections are available and :prop:`.block` is ``False``, then afresh connection is returned.:param timeout:Seconds to wait before giving up and raising:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and:prop:`.block` is ``True``."""conn = Nonetry:conn = self.pool.get(block=self.block, timeout=timeout)except AttributeError: # self.pool is Noneraise ClosedPoolError(self, "Pool is closed.")except Empty:if self.block:raise EmptyPoolError(self,"Pool reached maximum size and no more ""connections are allowed.")pass # Oh well, we'll create a new connection then# If this is a persistent connection, check if it got disconnectedif conn and is_connection_dropped(conn):log.info("Resetting dropped connection: %s", self.host)conn.close()if getattr(conn, 'auto_open', 1) == 0:# This is a proxied connection that has been mutated by# httplib._tunnel() and cannot be reused (since it would# attempt to bypass the proxy)conn = Nonereturn conn or self._new_conn()def _put_conn(self, conn):"""Put a connection back into the pool.:param conn:Connection object for the current host and port as returned by:meth:`._new_conn` or :meth:`._get_conn`.If the pool is already full, the connection is closed and discardedbecause we exceeded maxsize. If connections are discarded frequently,then maxsize should be increased.If the pool is closed, then the connection will be closed and discarded."""try:self.pool.put(conn, block=False)return # Everything is dandy, done.except AttributeError:# self.pool is None.passexcept Full:# This should never happen if self.block == Truelog.warning("Connection pool is full, discarding connection: %s",self.host)# Connection never got put back into the pool, close it.if conn:conn.close()def _validate_conn(self, conn):"""Called right before a request is made, after the socket is created."""passdef _prepare_proxy(self, conn):# Nothing to do for HTTP connections.passdef _get_timeout(self, timeout):""" Helper that always returns a :class:`urllib3.util.Timeout` """if timeout is _Default:return self.timeout.clone()if isinstance(timeout, Timeout):return timeout.clone()else:# User passed us an int/float. This is for backwards compatibility,# can be removed laterreturn Timeout.from_float(timeout)def _raise_timeout(self, err, url, timeout_value):"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""if isinstance(err, SocketTimeout):raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)# See the above comment about EAGAIN in Python 3. In Python 2 we have# to specifically catch it and throw the timeout errorif hasattr(err, 'errno') and err.errno in _blocking_errnos:raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)# Catch possible read timeouts thrown as SSL errors. If not the# case, rethrow the original. We need to do this because of:# http://bugs.python.org/issue10272if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)def _make_request(self, conn, method, url, timeout=_Default, chunked=False,**httplib_request_kw):"""Perform a request on a given urllib connection object taken from ourpool.:param conn:a connection from one of our connection pools:param timeout:Socket timeout in seconds for the request. This can be afloat or integer, which will set the same timeout value forthe socket connect and the socket read, or an instance of:class:`urllib3.util.Timeout`, which gives you more fine-grainedcontrol over your timeouts."""self.num_requests += 1timeout_obj = self._get_timeout(timeout)timeout_obj.start_connect()conn.timeout = timeout_obj.connect_timeout# Trigger any extra validation we need to do.try:self._validate_conn(conn)except (SocketTimeout, BaseSSLError) as e:# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)raise# conn.request() calls httplib.*.request, not the method in# urllib3.request. It also calls makefile (recv) on the socket.if chunked:conn.request_chunked(method, url, **httplib_request_kw)else:conn.request(method, url, **httplib_request_kw)# Reset the timeout for the recv() on the socketread_timeout = timeout_obj.read_timeout# App Engine doesn't have a sock attrif getattr(conn, 'sock', None):# In Python 3 socket.py will catch EAGAIN and return None when you# try and read into the file pointer created by http.client, which# instead raises a BadStatusLine exception. Instead of catching# the exception and assuming all BadStatusLine exceptions are read# timeouts, check for a zero timeout before making the request.if read_timeout == 0:raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % read_timeout)if read_timeout is Timeout.DEFAULT_TIMEOUT:conn.sock.settimeout(socket.getdefaulttimeout())else: # None or a valueconn.sock.settimeout(read_timeout)# Receive the response from the servertry:try: # Python 2.7, use buffering of HTTP responseshttplib_response = conn.getresponse(buffering=True)except TypeError: # Python 2.6 and older, Python 3try:httplib_response = conn.getresponse()except Exception as e:# Remove the TypeError from the exception chain in Python 3;# otherwise it looks like a programming error was the cause.six.raise_from(e, None)except (SocketTimeout, BaseSSLError, SocketError) as e:self._raise_timeout(err=e, url=url, timeout_value=read_timeout)raise# AppEngine doesn't have a version attr.http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')log.debug("\"%s %s %s\" %s %s", method, url, http_version,httplib_response.status, httplib_response.length)try:assert_header_parsing(httplib_response.msg)except HeaderParsingError as hpe: # Platform-specific: Python 3log.warning('Failed to parse headers (url=%s): %s',self._absolute_url(url), hpe, exc_info=True)return httplib_responsedef _absolute_url(self, path):return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).urldef close(self):"""Close all pooled connections and disable the pool."""# Disable access to the poolold_pool, self.pool = self.pool, Nonetry:while True:conn = old_pool.get(block=False)if conn:conn.close()except Empty:pass # Done.def is_same_host(self, url):"""Check if the given ``url`` is a member of the same host as thisconnection pool."""if url.startswith('/'):return True# TODO: Add optional support for socket.gethostbyname checking.scheme, host, port = get_host(url)# Use explicit default port for comparison when none is givenif self.port and not port:port = port_by_scheme.get(scheme)elif not self.port and port == port_by_scheme.get(scheme):port = Nonereturn (scheme, host, port) == (self.scheme, self.host, self.port)def urlopen(self, method, url, body=None, headers=None, retries=None,redirect=True, assert_same_host=True, timeout=_Default,pool_timeout=None, release_conn=None, chunked=False,**response_kw):"""Get a connection from the pool and perform an HTTP request. This is thelowest level call for making a request, so you'll need to specify allthe raw details... note::More commonly, it's appropriate to use a convenience method providedby :class:`.RequestMethods`, such as :meth:`request`... note::`release_conn` will only behave as expected if`preload_content=False` because we want to make`preload_content=False` the default behaviour someday soon withoutbreaking backwards compatibility.:param method:HTTP request method (such as GET, POST, PUT, etc.):param body:Data to send in the request body (useful for creatingPOST requests, see HTTPConnectionPool.post_url formore convenience).:param headers:Dictionary of custom headers to send, such as User-Agent,If-None-Match, etc. If None, pool headers are used. If provided,these headers completely replace any pool-specific headers.:param retries:Configure the number of retries to allow before raising a:class:`~urllib3.exceptions.MaxRetryError` exception.Pass ``None`` to retry until you receive a response. Pass a:class:`~urllib3.util.retry.Retry` object for fine-grained controlover different types of retries.Pass an integer number to retry connection errors that many times,but no other types of errors. Pass zero to never retry.If ``False``, then retries are disabled and any exception is raisedimmediately. Also, instead of raising a MaxRetryError on redirects,the redirect response will be returned.:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.:param redirect:If True, automatically handle redirects (status codes 301, 302,303, 307, 308). Each redirect counts as a retry. Disabling retrieswill disable redirect, too.:param assert_same_host:If ``True``, will make sure that the host of the pool requests isconsistent else will raise HostChangedError. When False, you canuse the pool on an HTTP proxy and request foreign hosts.:param timeout:If specified, overrides the default timeout for this onerequest. It may be a float (in seconds) or an instance of:class:`urllib3.util.Timeout`.:param pool_timeout:If set and the pool is set to block=True, then this method willblock for ``pool_timeout`` seconds and raise EmptyPoolError if noconnection is available within the time period.:param release_conn:If False, then the urlopen call will not release the connectionback into the pool once a response is received (but will release ifyou read the entire contents of the response such as when`preload_content=True`). This is useful if you're not preloadingthe response's content immediately. You will need to call``r.release_conn()`` on the response ``r`` to return the connectionback into the pool. If None, it takes the value of``response_kw.get('preload_content', True)``.:param chunked:If True, urllib3 will send the body using chunked transferencoding. Otherwise, urllib3 will send the body using the standardcontent-length form. Defaults to False.:param \**response_kw:Additional parameters are passed to:meth:`urllib3.response.HTTPResponse.from_httplib`"""if headers is None:headers = self.headersif not isinstance(retries, Retry):retries = Retry.from_int(retries, redirect=redirect, default=self.retries)if release_conn is None:release_conn = response_kw.get('preload_content', True)# Check hostif assert_same_host and not self.is_same_host(url):raise HostChangedError(self, url, retries)conn = None# Track whether `conn` needs to be released before# returning/raising/recursing. Update this variable if necessary, and# leave `release_conn` constant throughout the function. That way, if# the function recurses, the original value of `release_conn` will be# passed down into the recursive call, and its value will be respected.## See issue #651 [1] for details.## [1] <https://github.com/shazow/urllib3/issues/651>release_this_conn = release_conn# Merge the proxy headers. Only do this in HTTP. We have to copy the# headers dict so we can safely change it without those changes being# reflected in anyone else's copy.if self.scheme == 'http':headers = headers.copy()headers.update(self.proxy_headers)# Must keep the exception bound to a separate variable or else Python 3# complains about UnboundLocalError.err = None# Keep track of whether we cleanly exited the except block. This# ensures we do proper cleanup in finally.clean_exit = Falsetry:# Request a connection from the queue.timeout_obj = self._get_timeout(timeout)conn = self._get_conn(timeout=pool_timeout)conn.timeout = timeout_obj.connect_timeoutis_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)if is_new_proxy_conn:self._prepare_proxy(conn)# Make the request on the httplib connection object.httplib_response = self._make_request(conn, method, url,timeout=timeout_obj,body=body, headers=headers,chunked=chunked)# If we're going to release the connection in ``finally:``, then# the response doesn't need to know about the connection. Otherwise# it will also try to release it and we'll have a double-release# mess.response_conn = conn if not release_conn else None# Import httplib's response into our own wrapper objectresponse = self.ResponseCls.from_httplib(httplib_response,pool=self,connection=response_conn,**response_kw)# Everything went great!clean_exit = Trueexcept Empty:# Timed out by queue.raise EmptyPoolError(self, "No pool connections are available.")except (BaseSSLError, CertificateError) as e:# Close the connection. If a connection is reused on which there# was a Certificate error, the next request will certainly raise# another Certificate error.clean_exit = Falseraise SSLError(e)except SSLError:# Treat SSLError separately from BaseSSLError to preserve# traceback.clean_exit = Falseraiseexcept (TimeoutError, HTTPException, SocketError, ProtocolError) as e:# Discard the connection for these exceptions. It will be# be replaced during the next _get_conn() call.clean_exit = Falseif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:e = ProxyError('Cannot connect to proxy.', e)elif isinstance(e, (SocketError, HTTPException)):e = ProtocolError('Connection aborted.', e)retries = retries.increment(method, url, error=e, _pool=self,_stacktrace=sys.exc_info()[2])retries.sleep()# Keep track of the error for the retry warning.err = efinally:if not clean_exit:# We hit some kind of exception, handled or otherwise. We need# to throw the connection away unless explicitly told not to.# Close the connection, set the variable to None, and make sure# we put the None back in the pool to avoid leaking it.conn = conn and conn.close()release_this_conn = Trueif release_this_conn:# Put the connection back to be reused. If the connection is# expired then it will be None, which will get replaced with a# fresh connection during _get_conn.self._put_conn(conn)if not conn:# Try againlog.warning("Retrying (%r) after connection ""broken by '%r': %s", retries, err, url)return self.urlopen(method, url, body, headers, retries,redirect, assert_same_host,timeout=timeout, pool_timeout=pool_timeout,release_conn=release_conn, **response_kw)# Handle redirect?redirect_location = redirect and response.get_redirect_location()if redirect_location:if response.status == 303:method = 'GET'try:retries = retries.increment(method, url, response=response, _pool=self)except MaxRetryError:if retries.raise_on_redirect:# Release the connection for this response, since we're not# returning it to be released manually.response.release_conn()raisereturn responselog.info("Redirecting %s -> %s", url, redirect_location)return self.urlopen(method, redirect_location, body, headers,retries=retries, redirect=redirect,assert_same_host=assert_same_host,timeout=timeout, pool_timeout=pool_timeout,release_conn=release_conn, **response_kw)# Check if we should retry the HTTP response.if retries.is_forced_retry(method, status_code=response.status):try:retries = retries.increment(method, url, response=response, _pool=self)except MaxRetryError:if retries.raise_on_status:# Release the connection for this response, since we're not# returning it to be released manually.response.release_conn()raisereturn responseretries.sleep()log.info("Forced retry: %s", url)return self.urlopen(method, url, body, headers,retries=retries, redirect=redirect,assert_same_host=assert_same_host,timeout=timeout, pool_timeout=pool_timeout,release_conn=release_conn, **response_kw)return responseclass HTTPSConnectionPool(HTTPConnectionPool):"""Same as :class:`.HTTPConnectionPool`, but HTTPS.When Python is compiled with the :mod:`ssl` module, then:class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,instead of :class:`.HTTPSConnection`.:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,``assert_hostname`` and ``host`` in this order to verify connections.If ``assert_hostname`` is False, no verification is done.The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` isavailable and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgradethe connection socket into an SSL socket."""scheme = 'https'ConnectionCls = HTTPSConnectiondef __init__(self, host, port=None,strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,block=False, headers=None, retries=None,_proxy=None, _proxy_headers=None,key_file=None, cert_file=None, cert_reqs=None,ca_certs=None, ssl_version=None,assert_hostname=None, assert_fingerprint=None,ca_cert_dir=None, **conn_kw):HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,block, headers, retries, _proxy, _proxy_headers,**conn_kw)if ca_certs and cert_reqs is None:cert_reqs = 'CERT_REQUIRED'self.key_file = key_fileself.cert_file = cert_fileself.cert_reqs = cert_reqsself.ca_certs = ca_certsself.ca_cert_dir = ca_cert_dirself.ssl_version = ssl_versionself.assert_hostname = assert_hostnameself.assert_fingerprint = assert_fingerprintdef _prepare_conn(self, conn):"""Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`and establish the tunnel if proxy is used."""if isinstance(conn, VerifiedHTTPSConnection):conn.set_cert(key_file=self.key_file,cert_file=self.cert_file,cert_reqs=self.cert_reqs,ca_certs=self.ca_certs,ca_cert_dir=self.ca_cert_dir,assert_hostname=self.assert_hostname,assert_fingerprint=self.assert_fingerprint)conn.ssl_version = self.ssl_versionreturn conndef _prepare_proxy(self, conn):"""Establish tunnel connection early, because otherwise httplibwould improperly set Host: header to proxy's IP:port."""# Python 2.7+try:set_tunnel = conn.set_tunnelexcept AttributeError: # Platform-specific: Python 2.6set_tunnel = conn._set_tunnelif sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and olderset_tunnel(self.host, self.port)else:set_tunnel(self.host, self.port, self.proxy_headers)conn.connect()def _new_conn(self):"""Return a fresh :class:`httplib.HTTPSConnection`."""self.num_connections += 1log.info("Starting new HTTPS connection (%d): %s",self.num_connections, self.host)if not self.ConnectionCls or self.ConnectionCls is DummyConnection:raise SSLError("Can't connect to HTTPS URL because the SSL ""module is not available.")actual_host = self.hostactual_port = self.portif self.proxy is not None:actual_host = self.proxy.hostactual_port = self.proxy.portconn = self.ConnectionCls(host=actual_host, port=actual_port,timeout=self.timeout.connect_timeout,strict=self.strict, **self.conn_kw)return self._prepare_conn(conn)def _validate_conn(self, conn):"""Called right before a request is made, after the socket is created."""super(HTTPSConnectionPool, self)._validate_conn(conn)# Force connect early to allow us to validate the connection.if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`conn.connect()if not conn.is_verified:warnings.warn(('Unverified HTTPS request is being made. ''Adding certificate verification is strongly advised. See: ''https://urllib3.readthedocs.io/en/latest/security.html'),InsecureRequestWarning)def connection_from_url(url, **kw):"""Given a url, return an :class:`.ConnectionPool` instance of its host.This is a shortcut for not having to parse out the scheme, host, and portof the url before creating an :class:`.ConnectionPool` instance.:param url:Absolute URL string that must include the scheme. Port is optional.:param \**kw:Passes additional parameters to the constructor of the appropriate:class:`.ConnectionPool`. Useful for specifying things liketimeout, maxsize, headers, etc.Example::>>> conn = connection_from_url('http://google.com/')>>> r = conn.request('GET', '/')"""scheme, host, port = get_host(url)port = port or port_by_scheme.get(scheme, 80)if scheme == 'https':return HTTPSConnectionPool(host, port=port, **kw)else:return HTTPConnectionPool(host, port=port, **kw)
from __future__ import absolute_importimport datetimeimport loggingimport osimport sysimport socketfrom socket import error as SocketError, timeout as SocketTimeoutimport warningsfrom .packages import sixtry: # Python 3from http.client import HTTPConnection as _HTTPConnectionfrom http.client import HTTPException # noqa: unused in this moduleexcept ImportError:from httplib import HTTPConnection as _HTTPConnectionfrom httplib import HTTPException # noqa: unused in this moduletry: # Compiled with SSL?import sslBaseSSLError = ssl.SSLErrorexcept (ImportError, AttributeError): # Platform-specific: No SSL.ssl = Noneclass BaseSSLError(BaseException):passtry: # Python 3:# Not a no-op, we're adding this to the namespace so it can be imported.ConnectionError = ConnectionErrorexcept NameError: # Python 2:class ConnectionError(Exception):passfrom .exceptions import (NewConnectionError,ConnectTimeoutError,SubjectAltNameWarning,SystemTimeWarning,)from .packages.ssl_match_hostname import match_hostname, CertificateErrorfrom .util.ssl_ import (resolve_cert_reqs,resolve_ssl_version,ssl_wrap_socket,assert_fingerprint,)from .util import connectionfrom ._collections import HTTPHeaderDictlog = logging.getLogger(__name__)port_by_scheme = {'http': 80,'https': 443,}RECENT_DATE = datetime.date(2014, 1, 1)class DummyConnection(object):"""Used to detect a failed ConnectionCls import."""passclass HTTPConnection(_HTTPConnection, object):"""Based on httplib.HTTPConnection but provides an extra constructorbackwards-compatibility layer between older and newer Pythons.Additional keyword parameters are used to configure attributes of the connection.Accepted parameters include:- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`- ``source_address``: Set the source address for the current connection... note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x- ``socket_options``: Set specific options on the underlying socket. If not specified, thendefaults are loaded from ``HTTPConnection.default_socket_options`` which includes disablingNagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.For example, if you wish to enable TCP Keep Alive in addition to the defaults,you might pass::HTTPConnection.default_socket_options + [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),]Or you may want to disable the defaults by passing an empty list (e.g., ``[]``)."""default_port = port_by_scheme['http']#: Disable Nagle's algorithm by default.#: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]#: Whether this connection verifies the host's certificate.is_verified = Falsedef __init__(self, *args, **kw):if six.PY3: # Python 3kw.pop('strict', None)# Pre-set source_address in case we have an older Python like 2.6.self.source_address = kw.get('source_address')if sys.version_info < (2, 7): # Python 2.6# _HTTPConnection on Python 2.6 will balk at this keyword arg, but# not newer versions. We can still use it when creating a# connection though, so we pop it *after* we have saved it as# self.source_address.kw.pop('source_address', None)#: The socket options provided by the user. If no options are#: provided, we use the default options.self.socket_options = kw.pop('socket_options', self.default_socket_options)# Superclass also sets self.source_address in Python 2.7+._HTTPConnection.__init__(self, *args, **kw)def _new_conn(self):""" Establish a socket connection and set nodelay settings on it.:return: New socket connection."""extra_kw = {}if self.source_address:extra_kw['source_address'] = self.source_addressif self.socket_options:extra_kw['socket_options'] = self.socket_optionstry:conn = connection.create_connection((self.host, self.port), self.timeout, **extra_kw)except SocketTimeout as e:raise ConnectTimeoutError(self, "Connection to %s timed out. (connect timeout=%s)" %(self.host, self.timeout))except SocketError as e:raise NewConnectionError(self, "Failed to establish a new connection: %s" % e)return conndef _prepare_conn(self, conn):self.sock = conn# the _tunnel_host attribute was added in python 2.6.3 (via# http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do# not have them.if getattr(self, '_tunnel_host', None):# TODO: Fix tunnel so it doesn't depend on self.sock state.self._tunnel()# Mark this connection as not reusableself.auto_open = 0def connect(self):conn = self._new_conn()self._prepare_conn(conn)def request_chunked(self, method, url, body=None, headers=None):"""Alternative to the common request method, which sends thebody with chunked encoding and not as one block"""headers = HTTPHeaderDict(headers if headers is not None else {})skip_accept_encoding = 'accept-encoding' in headersself.putrequest(method, url, skip_accept_encoding=skip_accept_encoding)for header, value in headers.items():self.putheader(header, value)if 'transfer-encoding' not in headers:self.putheader('Transfer-Encoding', 'chunked')self.endheaders()if body is not None:stringish_types = six.string_types + (six.binary_type,)if isinstance(body, stringish_types):body = (body,)for chunk in body:if not chunk:continueif not isinstance(chunk, six.binary_type):chunk = chunk.encode('utf8')len_str = hex(len(chunk))[2:]self.send(len_str.encode('utf-8'))self.send(b'\r\n')self.send(chunk)self.send(b'\r\n')# After the if clause, to always have a closed bodyself.send(b'0\r\n\r\n')class HTTPSConnection(HTTPConnection):default_port = port_by_scheme['https']def __init__(self, host, port=None, key_file=None, cert_file=None,strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):HTTPConnection.__init__(self, host, port, strict=strict,timeout=timeout, **kw)self.key_file = key_fileself.cert_file = cert_file# Required property for Google AppEngine 1.9.0 which otherwise causes# HTTPS requests to go out as HTTP. (See Issue #356)self._protocol = 'https'def connect(self):conn = self._new_conn()self._prepare_conn(conn)self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)class VerifiedHTTPSConnection(HTTPSConnection):"""Based on httplib.HTTPSConnection but wraps the socket withSSL certification."""cert_reqs = Noneca_certs = Noneca_cert_dir = Nonessl_version = Noneassert_fingerprint = Nonedef set_cert(self, key_file=None, cert_file=None,cert_reqs=None, ca_certs=None,assert_hostname=None, assert_fingerprint=None,ca_cert_dir=None):if (ca_certs or ca_cert_dir) and cert_reqs is None:cert_reqs = 'CERT_REQUIRED'self.key_file = key_fileself.cert_file = cert_fileself.cert_reqs = cert_reqsself.assert_hostname = assert_hostnameself.assert_fingerprint = assert_fingerprintself.ca_certs = ca_certs and os.path.expanduser(ca_certs)self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)def connect(self):# Add certificate verificationconn = self._new_conn()resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)resolved_ssl_version = resolve_ssl_version(self.ssl_version)hostname = self.hostif getattr(self, '_tunnel_host', None):# _tunnel_host was added in Python 2.6.3# (See: http://hg.python.org/cpython/rev/0f57b30a152f)self.sock = conn# Calls self._set_hostport(), so self.host is# self._tunnel_host below.self._tunnel()# Mark this connection as not reusableself.auto_open = 0# Override the host with the one we're requesting data from.hostname = self._tunnel_hostis_time_off = datetime.date.today() < RECENT_DATEif is_time_off:warnings.warn(('System time is way off (before {0}). This will probably ''lead to SSL verification errors').format(RECENT_DATE),SystemTimeWarning)# Wrap socket using verification with the root certs in# trusted_root_certsself.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,cert_reqs=resolved_cert_reqs,ca_certs=self.ca_certs,ca_cert_dir=self.ca_cert_dir,server_hostname=hostname,ssl_version=resolved_ssl_version)if self.assert_fingerprint:assert_fingerprint(self.sock.getpeercert(binary_form=True),self.assert_fingerprint)elif resolved_cert_reqs != ssl.CERT_NONE \and self.assert_hostname is not False:cert = self.sock.getpeercert()if not cert.get('subjectAltName', ()):warnings.warn(('Certificate for {0} has no `subjectAltName`, falling back to check for a ''`commonName` for now. This feature is being removed by major browsers and ''deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 ''for details.)'.format(hostname)),SubjectAltNameWarning)_match_hostname(cert, self.assert_hostname or hostname)self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED orself.assert_fingerprint is not None)def _match_hostname(cert, asserted_hostname):try:match_hostname(cert, asserted_hostname)except CertificateError as e:log.error('Certificate did not match expected hostname: %s. ''Certificate: %s', asserted_hostname, cert)# Add cert to exception and reraise so client code can inspect# the cert when catching the exception, if they want toe._peer_cert = certraiseif ssl:# Make a copy for testing.UnverifiedHTTPSConnection = HTTPSConnectionHTTPSConnection = VerifiedHTTPSConnectionelse:HTTPSConnection = DummyConnection
from __future__ import absolute_importfrom collections import Mapping, MutableMappingtry:from threading import RLockexcept ImportError: # Platform-specific: No threads availableclass RLock:def __enter__(self):passdef __exit__(self, exc_type, exc_value, traceback):passtry: # Python 2.7+from collections import OrderedDictexcept ImportError:from .packages.ordered_dict import OrderedDictfrom .packages.six import iterkeys, itervalues, PY3__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']_Null = object()class RecentlyUsedContainer(MutableMapping):"""Provides a thread-safe dict-like container which maintains up to``maxsize`` keys while throwing away the least-recently-used keys beyond``maxsize``.:param maxsize:Maximum number of recent elements to retain.:param dispose_func:Every time an item is evicted from the container,``dispose_func(value)`` is called. Callback which will get called"""ContainerCls = OrderedDictdef __init__(self, maxsize=10, dispose_func=None):self._maxsize = maxsizeself.dispose_func = dispose_funcself._container = self.ContainerCls()self.lock = RLock()def __getitem__(self, key):# Re-insert the item, moving it to the end of the eviction line.with self.lock:item = self._container.pop(key)self._container[key] = itemreturn itemdef __setitem__(self, key, value):evicted_value = _Nullwith self.lock:# Possibly evict the existing value of 'key'evicted_value = self._container.get(key, _Null)self._container[key] = value# If we didn't evict an existing value, we might have to evict the# least recently used item from the beginning of the container.if len(self._container) > self._maxsize:_key, evicted_value = self._container.popitem(last=False)if self.dispose_func and evicted_value is not _Null:self.dispose_func(evicted_value)def __delitem__(self, key):with self.lock:value = self._container.pop(key)if self.dispose_func:self.dispose_func(value)def __len__(self):with self.lock:return len(self._container)def __iter__(self):raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')def clear(self):with self.lock:# Copy pointers to all values, then wipe the mappingvalues = list(itervalues(self._container))self._container.clear()if self.dispose_func:for value in values:self.dispose_func(value)def keys(self):with self.lock:return list(iterkeys(self._container))class HTTPHeaderDict(MutableMapping):""":param headers:An iterable of field-value pairs. Must not contain multiple field nameswhen compared case-insensitively.:param kwargs:Additional field-value pairs to pass in to ``dict.update``.A ``dict`` like container for storing HTTP Headers.Field names are stored and compared case-insensitively in compliance withRFC 7230. Iteration provides the first case-sensitive key seen for eachcase-insensitive pair.Using ``__setitem__`` syntax overwrites fields that compare equalcase-insensitively in order to maintain ``dict``'s api. For fields thatcompare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``in a loop.If multiple fields that are equal case-insensitively are passed to theconstructor or ``.update``, the behavior is undefined and some will belost.>>> headers = HTTPHeaderDict()>>> headers.add('Set-Cookie', 'foo=bar')>>> headers.add('set-cookie', 'baz=quxx')>>> headers['content-length'] = '7'>>> headers['SET-cookie']'foo=bar, baz=quxx'>>> headers['Content-Length']'7'"""def __init__(self, headers=None, **kwargs):super(HTTPHeaderDict, self).__init__()self._container = OrderedDict()if headers is not None:if isinstance(headers, HTTPHeaderDict):self._copy_from(headers)else:self.extend(headers)if kwargs:self.extend(kwargs)def __setitem__(self, key, val):self._container[key.lower()] = (key, val)return self._container[key.lower()]def __getitem__(self, key):val = self._container[key.lower()]return ', '.join(val[1:])def __delitem__(self, key):del self._container[key.lower()]def __contains__(self, key):return key.lower() in self._containerdef __eq__(self, other):if not isinstance(other, Mapping) and not hasattr(other, 'keys'):return Falseif not isinstance(other, type(self)):other = type(self)(other)return (dict((k.lower(), v) for k, v in self.itermerged()) ==dict((k.lower(), v) for k, v in other.itermerged()))def __ne__(self, other):return not self.__eq__(other)if not PY3: # Python 2iterkeys = MutableMapping.iterkeysitervalues = MutableMapping.itervalues__marker = object()def __len__(self):return len(self._container)def __iter__(self):# Only provide the originally cased namesfor vals in self._container.values():yield vals[0]def pop(self, key, default=__marker):'''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.If key is not found, d is returned if given, otherwise KeyError is raised.'''# Using the MutableMapping function directly fails due to the private marker.# Using ordinary dict.pop would expose the internal structures.# So let's reinvent the wheel.try:value = self[key]except KeyError:if default is self.__marker:raisereturn defaultelse:del self[key]return valuedef discard(self, key):try:del self[key]except KeyError:passdef add(self, key, val):"""Adds a (name, value) pair, doesn't overwrite the value if it alreadyexists.>>> headers = HTTPHeaderDict(foo='bar')>>> headers.add('Foo', 'baz')>>> headers['foo']'bar, baz'"""key_lower = key.lower()new_vals = key, val# Keep the common case aka no item present as fast as possiblevals = self._container.setdefault(key_lower, new_vals)if new_vals is not vals:# new_vals was not inserted, as there was a previous oneif isinstance(vals, list):# If already several items got inserted, we have a listvals.append(val)else:# vals should be a tuple then, i.e. only one item so far# Need to convert the tuple to list for further extensionself._container[key_lower] = [vals[0], vals[1], val]def extend(self, *args, **kwargs):"""Generic import function for any type of header-like object.Adapted version of MutableMapping.update in order to insert itemswith self.add instead of self.__setitem__"""if len(args) > 1:raise TypeError("extend() takes at most 1 positional ""arguments ({0} given)".format(len(args)))other = args[0] if len(args) >= 1 else ()if isinstance(other, HTTPHeaderDict):for key, val in other.iteritems():self.add(key, val)elif isinstance(other, Mapping):for key in other:self.add(key, other[key])elif hasattr(other, "keys"):for key in other.keys():self.add(key, other[key])else:for key, value in other:self.add(key, value)for key, value in kwargs.items():self.add(key, value)def getlist(self, key):"""Returns a list of all the values for the named field. Returns anempty list if the key doesn't exist."""try:vals = self._container[key.lower()]except KeyError:return []else:if isinstance(vals, tuple):return [vals[1]]else:return vals[1:]# Backwards compatibility for httplibgetheaders = getlistgetallmatchingheaders = getlistiget = getlistdef __repr__(self):return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))def _copy_from(self, other):for key in other:val = other.getlist(key)if isinstance(val, list):# Don't need to convert tuplesval = list(val)self._container[key.lower()] = [key] + valdef copy(self):clone = type(self)()clone._copy_from(self)return clonedef iteritems(self):"""Iterate over all header lines, including duplicate ones."""for key in self:vals = self._container[key.lower()]for val in vals[1:]:yield vals[0], valdef itermerged(self):"""Iterate over all headers, merging duplicate ones together."""for key in self:val = self._container[key.lower()]yield val[0], ', '.join(val[1:])def items(self):return list(self.iteritems())@classmethoddef from_httplib(cls, message): # Python 2"""Read headers from a Python 2 httplib message object."""# python2.7 does not expose a proper API for exporting multiheaders# efficiently. This function re-reads raw lines from the message# object and extracts the multiheaders properly.headers = []for line in message.headers:if line.startswith((' ', '\t')):key, value = headers[-1]headers[-1] = (key, value + '\r\n' + line.rstrip())continuekey, value = line.split(':', 1)headers.append((key, value.strip()))return cls(headers)
"""urllib3 - Thread-safe connection pooling and re-using."""from __future__ import absolute_importimport warningsfrom .connectionpool import (HTTPConnectionPool,HTTPSConnectionPool,connection_from_url)from . import exceptionsfrom .filepost import encode_multipart_formdatafrom .poolmanager import PoolManager, ProxyManager, proxy_from_urlfrom .response import HTTPResponsefrom .util.request import make_headersfrom .util.url import get_hostfrom .util.timeout import Timeoutfrom .util.retry import Retry# Set default logging handler to avoid "No handler found" warnings.import loggingtry: # Python 2.7+from logging import NullHandlerexcept ImportError:class NullHandler(logging.Handler):def emit(self, record):pass__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'__license__ = 'MIT'__version__ = '1.16'__all__ = ('HTTPConnectionPool','HTTPSConnectionPool','PoolManager','ProxyManager','HTTPResponse','Retry','Timeout','add_stderr_logger','connection_from_url','disable_warnings','encode_multipart_formdata','get_host','make_headers','proxy_from_url',)logging.getLogger(__name__).addHandler(NullHandler())def add_stderr_logger(level=logging.DEBUG):"""Helper for quickly adding a StreamHandler to the logger. Useful fordebugging.Returns the handler after adding it."""# This method needs to be in this __init__.py to get the __name__ correct# even if urllib3 is vendored within another package.logger = logging.getLogger(__name__)handler = logging.StreamHandler()handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))logger.addHandler(handler)logger.setLevel(level)logger.debug('Added a stderr logging handler to logger: %s', __name__)return handler# ... Clean up.del NullHandler# All warning filters *must* be appended unless you're really certain that they# shouldn't be: otherwise, it's very hard for users to use most Python# mechanisms to silence them.# SecurityWarning's always go off by default.warnings.simplefilter('always', exceptions.SecurityWarning, append=True)# SubjectAltNameWarning's should go off once per hostwarnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True)# InsecurePlatformWarning's don't vary between requests, so we keep it default.warnings.simplefilter('default', exceptions.InsecurePlatformWarning,append=True)# SNIMissingWarnings should go off only once.warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True)def disable_warnings(category=exceptions.HTTPWarning):"""Helper for quickly disabling all urllib3 warnings."""warnings.simplefilter('ignore', category)
######################## BEGIN LICENSE BLOCK ######################### The Original Code is mozilla.org code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from . import constantsfrom .charsetprober import CharSetProberfrom .codingstatemachine import CodingStateMachinefrom .mbcssm import UTF8SMModelONE_CHAR_PROB = 0.5class UTF8Prober(CharSetProber):def __init__(self):CharSetProber.__init__(self)self._mCodingSM = CodingStateMachine(UTF8SMModel)self.reset()def reset(self):CharSetProber.reset(self)self._mCodingSM.reset()self._mNumOfMBChar = 0def get_charset_name(self):return "utf-8"def feed(self, aBuf):for c in aBuf:codingState = self._mCodingSM.next_state(c)if codingState == constants.eError:self._mState = constants.eNotMebreakelif codingState == constants.eItsMe:self._mState = constants.eFoundItbreakelif codingState == constants.eStart:if self._mCodingSM.get_current_charlen() >= 2:self._mNumOfMBChar += 1if self.get_state() == constants.eDetecting:if self.get_confidence() > constants.SHORTCUT_THRESHOLD:self._mState = constants.eFoundItreturn self.get_state()def get_confidence(self):unlike = 0.99if self._mNumOfMBChar < 6:for i in range(0, self._mNumOfMBChar):unlike = unlike * ONE_CHAR_PROBreturn 1.0 - unlikeelse:return unlike
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Universal charset detector code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 2001# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python# Shy Shalom - original C code## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from . import constantsimport sysimport codecsfrom .latin1prober import Latin1Prober # windows-1252from .mbcsgroupprober import MBCSGroupProber # multi-byte character setsfrom .sbcsgroupprober import SBCSGroupProber # single-byte character setsfrom .escprober import EscCharSetProber # ISO-2122, etc.import reMINIMUM_THRESHOLD = 0.20ePureAscii = 0eEscAscii = 1eHighbyte = 2class UniversalDetector:def __init__(self):self._highBitDetector = re.compile(b'[\x80-\xFF]')self._escDetector = re.compile(b'(\033|~{)')self._mEscCharSetProber = Noneself._mCharSetProbers = []self.reset()def reset(self):self.result = {'encoding': None, 'confidence': 0.0}self.done = Falseself._mStart = Trueself._mGotData = Falseself._mInputState = ePureAsciiself._mLastChar = b''if self._mEscCharSetProber:self._mEscCharSetProber.reset()for prober in self._mCharSetProbers:prober.reset()def feed(self, aBuf):if self.done:returnaLen = len(aBuf)if not aLen:returnif not self._mGotData:# If the data starts with BOM, we know it is UTFif aBuf[:3] == codecs.BOM_UTF8:# EF BB BF UTF-8 with BOMself.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0}elif aBuf[:4] == codecs.BOM_UTF32_LE:# FF FE 00 00 UTF-32, little-endian BOMself.result = {'encoding': "UTF-32LE", 'confidence': 1.0}elif aBuf[:4] == codecs.BOM_UTF32_BE:# 00 00 FE FF UTF-32, big-endian BOMself.result = {'encoding': "UTF-32BE", 'confidence': 1.0}elif aBuf[:4] == b'\xFE\xFF\x00\x00':# FE FF 00 00 UCS-4, unusual octet order BOM (3412)self.result = {'encoding': "X-ISO-10646-UCS-4-3412",'confidence': 1.0}elif aBuf[:4] == b'\x00\x00\xFF\xFE':# 00 00 FF FE UCS-4, unusual octet order BOM (2143)self.result = {'encoding': "X-ISO-10646-UCS-4-2143",'confidence': 1.0}elif aBuf[:2] == codecs.BOM_LE:# FF FE UTF-16, little endian BOMself.result = {'encoding': "UTF-16LE", 'confidence': 1.0}elif aBuf[:2] == codecs.BOM_BE:# FE FF UTF-16, big endian BOMself.result = {'encoding': "UTF-16BE", 'confidence': 1.0}self._mGotData = Trueif self.result['encoding'] and (self.result['confidence'] > 0.0):self.done = Truereturnif self._mInputState == ePureAscii:if self._highBitDetector.search(aBuf):self._mInputState = eHighbyteelif ((self._mInputState == ePureAscii) andself._escDetector.search(self._mLastChar + aBuf)):self._mInputState = eEscAsciiself._mLastChar = aBuf[-1:]if self._mInputState == eEscAscii:if not self._mEscCharSetProber:self._mEscCharSetProber = EscCharSetProber()if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt:self.result = {'encoding': self._mEscCharSetProber.get_charset_name(),'confidence': self._mEscCharSetProber.get_confidence()}self.done = Trueelif self._mInputState == eHighbyte:if not self._mCharSetProbers:self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(),Latin1Prober()]for prober in self._mCharSetProbers:if prober.feed(aBuf) == constants.eFoundIt:self.result = {'encoding': prober.get_charset_name(),'confidence': prober.get_confidence()}self.done = Truebreakdef close(self):if self.done:returnif not self._mGotData:if constants._debug:sys.stderr.write('no data received!\n')returnself.done = Trueif self._mInputState == ePureAscii:self.result = {'encoding': 'ascii', 'confidence': 1.0}return self.resultif self._mInputState == eHighbyte:proberConfidence = NonemaxProberConfidence = 0.0maxProber = Nonefor prober in self._mCharSetProbers:if not prober:continueproberConfidence = prober.get_confidence()if proberConfidence > maxProberConfidence:maxProberConfidence = proberConfidencemaxProber = proberif maxProber and (maxProberConfidence > MINIMUM_THRESHOLD):self.result = {'encoding': maxProber.get_charset_name(),'confidence': maxProber.get_confidence()}return self.resultif constants._debug:sys.stderr.write('no probers hit minimum threshhold\n')for prober in self._mCharSetProbers[0].mProbers:if not prober:continuesys.stderr.write('%s confidence = %s\n' %(prober.get_charset_name(),prober.get_confidence()))
######################## BEGIN LICENSE BLOCK ######################### The Original Code is mozilla.org code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################import sysfrom .mbcharsetprober import MultiByteCharSetProberfrom .codingstatemachine import CodingStateMachinefrom .chardistribution import SJISDistributionAnalysisfrom .jpcntx import SJISContextAnalysisfrom .mbcssm import SJISSMModelfrom . import constantsclass SJISProber(MultiByteCharSetProber):def __init__(self):MultiByteCharSetProber.__init__(self)self._mCodingSM = CodingStateMachine(SJISSMModel)self._mDistributionAnalyzer = SJISDistributionAnalysis()self._mContextAnalyzer = SJISContextAnalysis()self.reset()def reset(self):MultiByteCharSetProber.reset(self)self._mContextAnalyzer.reset()def get_charset_name(self):return self._mContextAnalyzer.get_charset_name()def feed(self, aBuf):aLen = len(aBuf)for i in range(0, aLen):codingState = self._mCodingSM.next_state(aBuf[i])if codingState == constants.eError:if constants._debug:sys.stderr.write(self.get_charset_name()+ ' prober hit error at byte ' + str(i)+ '\n')self._mState = constants.eNotMebreakelif codingState == constants.eItsMe:self._mState = constants.eFoundItbreakelif codingState == constants.eStart:charLen = self._mCodingSM.get_current_charlen()if i == 0:self._mLastChar[1] = aBuf[0]self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:],charLen)self._mDistributionAnalyzer.feed(self._mLastChar, charLen)else:self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3- charLen], charLen)self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],charLen)self._mLastChar[0] = aBuf[aLen - 1]if self.get_state() == constants.eDetecting:if (self._mContextAnalyzer.got_enough_data() and(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):self._mState = constants.eFoundItreturn self.get_state()def get_confidence(self):contxtCf = self._mContextAnalyzer.get_confidence()distribCf = self._mDistributionAnalyzer.get_confidence()return max(contxtCf, distribCf)
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Universal charset detector code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 2001# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python# Shy Shalom - original C code## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .charsetgroupprober import CharSetGroupProberfrom .sbcharsetprober import SingleByteCharSetProberfrom .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel,Latin5CyrillicModel, MacCyrillicModel,Ibm866Model, Ibm855Model)from .langgreekmodel import Latin7GreekModel, Win1253GreekModelfrom .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModelfrom .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModelfrom .langthaimodel import TIS620ThaiModelfrom .langhebrewmodel import Win1255HebrewModelfrom .hebrewprober import HebrewProberclass SBCSGroupProber(CharSetGroupProber):def __init__(self):CharSetGroupProber.__init__(self)self._mProbers = [SingleByteCharSetProber(Win1251CyrillicModel),SingleByteCharSetProber(Koi8rModel),SingleByteCharSetProber(Latin5CyrillicModel),SingleByteCharSetProber(MacCyrillicModel),SingleByteCharSetProber(Ibm866Model),SingleByteCharSetProber(Ibm855Model),SingleByteCharSetProber(Latin7GreekModel),SingleByteCharSetProber(Win1253GreekModel),SingleByteCharSetProber(Latin5BulgarianModel),SingleByteCharSetProber(Win1251BulgarianModel),SingleByteCharSetProber(Latin2HungarianModel),SingleByteCharSetProber(Win1250HungarianModel),SingleByteCharSetProber(TIS620ThaiModel),]hebrewProber = HebrewProber()logicalHebrewProber = SingleByteCharSetProber(Win1255HebrewModel,False, hebrewProber)visualHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, True,hebrewProber)hebrewProber.set_model_probers(logicalHebrewProber, visualHebrewProber)self._mProbers.extend([hebrewProber, logicalHebrewProber,visualHebrewProber])self.reset()
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Universal charset detector code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 2001# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python# Shy Shalom - original C code## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################import sysfrom . import constantsfrom .charsetprober import CharSetProberfrom .compat import wrap_ordSAMPLE_SIZE = 64SB_ENOUGH_REL_THRESHOLD = 1024POSITIVE_SHORTCUT_THRESHOLD = 0.95NEGATIVE_SHORTCUT_THRESHOLD = 0.05SYMBOL_CAT_ORDER = 250NUMBER_OF_SEQ_CAT = 4POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1#NEGATIVE_CAT = 0class SingleByteCharSetProber(CharSetProber):def __init__(self, model, reversed=False, nameProber=None):CharSetProber.__init__(self)self._mModel = model# TRUE if we need to reverse every pair in the model lookupself._mReversed = reversed# Optional auxiliary prober for name decisionself._mNameProber = nameProberself.reset()def reset(self):CharSetProber.reset(self)# char order of last characterself._mLastOrder = 255self._mSeqCounters = [0] * NUMBER_OF_SEQ_CATself._mTotalSeqs = 0self._mTotalChar = 0# characters that fall in our sampling rangeself._mFreqChar = 0def get_charset_name(self):if self._mNameProber:return self._mNameProber.get_charset_name()else:return self._mModel['charsetName']def feed(self, aBuf):if not self._mModel['keepEnglishLetter']:aBuf = self.filter_without_english_letters(aBuf)aLen = len(aBuf)if not aLen:return self.get_state()for c in aBuf:order = self._mModel['charToOrderMap'][wrap_ord(c)]if order < SYMBOL_CAT_ORDER:self._mTotalChar += 1if order < SAMPLE_SIZE:self._mFreqChar += 1if self._mLastOrder < SAMPLE_SIZE:self._mTotalSeqs += 1if not self._mReversed:i = (self._mLastOrder * SAMPLE_SIZE) + ordermodel = self._mModel['precedenceMatrix'][i]else: # reverse the order of the letters in the lookupi = (order * SAMPLE_SIZE) + self._mLastOrdermodel = self._mModel['precedenceMatrix'][i]self._mSeqCounters[model] += 1self._mLastOrder = orderif self.get_state() == constants.eDetecting:if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD:cf = self.get_confidence()if cf > POSITIVE_SHORTCUT_THRESHOLD:if constants._debug:sys.stderr.write('%s confidence = %s, we have a''winner\n' %(self._mModel['charsetName'], cf))self._mState = constants.eFoundItelif cf < NEGATIVE_SHORTCUT_THRESHOLD:if constants._debug:sys.stderr.write('%s confidence = %s, below negative''shortcut threshhold %s\n' %(self._mModel['charsetName'], cf,NEGATIVE_SHORTCUT_THRESHOLD))self._mState = constants.eNotMereturn self.get_state()def get_confidence(self):r = 0.01if self._mTotalSeqs > 0:r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs/ self._mModel['mTypicalPositiveRatio'])r = r * self._mFreqChar / self._mTotalCharif r >= 1.0:r = 0.99return r
######################## BEGIN LICENSE BLOCK ######################### The Original Code is mozilla.org code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .constants import eStart, eError, eItsMe# BIG5BIG5_cls = (1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value1,1,1,1,1,1,0,0, # 08 - 0f1,1,1,1,1,1,1,1, # 10 - 171,1,1,0,1,1,1,1, # 18 - 1f1,1,1,1,1,1,1,1, # 20 - 271,1,1,1,1,1,1,1, # 28 - 2f1,1,1,1,1,1,1,1, # 30 - 371,1,1,1,1,1,1,1, # 38 - 3f2,2,2,2,2,2,2,2, # 40 - 472,2,2,2,2,2,2,2, # 48 - 4f2,2,2,2,2,2,2,2, # 50 - 572,2,2,2,2,2,2,2, # 58 - 5f2,2,2,2,2,2,2,2, # 60 - 672,2,2,2,2,2,2,2, # 68 - 6f2,2,2,2,2,2,2,2, # 70 - 772,2,2,2,2,2,2,1, # 78 - 7f4,4,4,4,4,4,4,4, # 80 - 874,4,4,4,4,4,4,4, # 88 - 8f4,4,4,4,4,4,4,4, # 90 - 974,4,4,4,4,4,4,4, # 98 - 9f4,3,3,3,3,3,3,3, # a0 - a73,3,3,3,3,3,3,3, # a8 - af3,3,3,3,3,3,3,3, # b0 - b73,3,3,3,3,3,3,3, # b8 - bf3,3,3,3,3,3,3,3, # c0 - c73,3,3,3,3,3,3,3, # c8 - cf3,3,3,3,3,3,3,3, # d0 - d73,3,3,3,3,3,3,3, # d8 - df3,3,3,3,3,3,3,3, # e0 - e73,3,3,3,3,3,3,3, # e8 - ef3,3,3,3,3,3,3,3, # f0 - f73,3,3,3,3,3,3,0 # f8 - ff)BIG5_st = (eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,#08-0feError,eStart,eStart,eStart,eStart,eStart,eStart,eStart#10-17)Big5CharLenTable = (0, 1, 1, 2, 0)Big5SMModel = {'classTable': BIG5_cls,'classFactor': 5,'stateTable': BIG5_st,'charLenTable': Big5CharLenTable,'name': 'Big5'}# CP949CP949_cls = (1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff)CP949_st = (#cls= 0 1 2 3 4 5 6 7 8 9 # previous state =eError,eStart, 3,eError,eStart,eStart, 4, 5,eError, 6, # eStarteError,eError,eError,eError,eError,eError,eError,eError,eError,eError, # eErroreItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe, # eItsMeeError,eError,eStart,eStart,eError,eError,eError,eStart,eStart,eStart, # 3eError,eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 4eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 5eError,eStart,eStart,eStart,eStart,eError,eError,eStart,eStart,eStart, # 6)CP949CharLenTable = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)CP949SMModel = {'classTable': CP949_cls,'classFactor': 10,'stateTable': CP949_st,'charLenTable': CP949CharLenTable,'name': 'CP949'}# EUC-JPEUCJP_cls = (4,4,4,4,4,4,4,4, # 00 - 074,4,4,4,4,4,5,5, # 08 - 0f4,4,4,4,4,4,4,4, # 10 - 174,4,4,5,4,4,4,4, # 18 - 1f4,4,4,4,4,4,4,4, # 20 - 274,4,4,4,4,4,4,4, # 28 - 2f4,4,4,4,4,4,4,4, # 30 - 374,4,4,4,4,4,4,4, # 38 - 3f4,4,4,4,4,4,4,4, # 40 - 474,4,4,4,4,4,4,4, # 48 - 4f4,4,4,4,4,4,4,4, # 50 - 574,4,4,4,4,4,4,4, # 58 - 5f4,4,4,4,4,4,4,4, # 60 - 674,4,4,4,4,4,4,4, # 68 - 6f4,4,4,4,4,4,4,4, # 70 - 774,4,4,4,4,4,4,4, # 78 - 7f5,5,5,5,5,5,5,5, # 80 - 875,5,5,5,5,5,1,3, # 88 - 8f5,5,5,5,5,5,5,5, # 90 - 975,5,5,5,5,5,5,5, # 98 - 9f5,2,2,2,2,2,2,2, # a0 - a72,2,2,2,2,2,2,2, # a8 - af2,2,2,2,2,2,2,2, # b0 - b72,2,2,2,2,2,2,2, # b8 - bf2,2,2,2,2,2,2,2, # c0 - c72,2,2,2,2,2,2,2, # c8 - cf2,2,2,2,2,2,2,2, # d0 - d72,2,2,2,2,2,2,2, # d8 - df0,0,0,0,0,0,0,0, # e0 - e70,0,0,0,0,0,0,0, # e8 - ef0,0,0,0,0,0,0,0, # f0 - f70,0,0,0,0,0,0,5 # f8 - ff)EUCJP_st = (3, 4, 3, 5,eStart,eError,eError,eError,#00-07eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0feItsMe,eItsMe,eStart,eError,eStart,eError,eError,eError,#10-17eError,eError,eStart,eError,eError,eError, 3,eError,#18-1f3,eError,eError,eError,eStart,eStart,eStart,eStart#20-27)EUCJPCharLenTable = (2, 2, 2, 3, 1, 0)EUCJPSMModel = {'classTable': EUCJP_cls,'classFactor': 6,'stateTable': EUCJP_st,'charLenTable': EUCJPCharLenTable,'name': 'EUC-JP'}# EUC-KREUCKR_cls = (1,1,1,1,1,1,1,1, # 00 - 071,1,1,1,1,1,0,0, # 08 - 0f1,1,1,1,1,1,1,1, # 10 - 171,1,1,0,1,1,1,1, # 18 - 1f1,1,1,1,1,1,1,1, # 20 - 271,1,1,1,1,1,1,1, # 28 - 2f1,1,1,1,1,1,1,1, # 30 - 371,1,1,1,1,1,1,1, # 38 - 3f1,1,1,1,1,1,1,1, # 40 - 471,1,1,1,1,1,1,1, # 48 - 4f1,1,1,1,1,1,1,1, # 50 - 571,1,1,1,1,1,1,1, # 58 - 5f1,1,1,1,1,1,1,1, # 60 - 671,1,1,1,1,1,1,1, # 68 - 6f1,1,1,1,1,1,1,1, # 70 - 771,1,1,1,1,1,1,1, # 78 - 7f0,0,0,0,0,0,0,0, # 80 - 870,0,0,0,0,0,0,0, # 88 - 8f0,0,0,0,0,0,0,0, # 90 - 970,0,0,0,0,0,0,0, # 98 - 9f0,2,2,2,2,2,2,2, # a0 - a72,2,2,2,2,3,3,3, # a8 - af2,2,2,2,2,2,2,2, # b0 - b72,2,2,2,2,2,2,2, # b8 - bf2,2,2,2,2,2,2,2, # c0 - c72,3,2,2,2,2,2,2, # c8 - cf2,2,2,2,2,2,2,2, # d0 - d72,2,2,2,2,2,2,2, # d8 - df2,2,2,2,2,2,2,2, # e0 - e72,2,2,2,2,2,2,2, # e8 - ef2,2,2,2,2,2,2,2, # f0 - f72,2,2,2,2,2,2,0 # f8 - ff)EUCKR_st = (eError,eStart, 3,eError,eError,eError,eError,eError,#00-07eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,eStart #08-0f)EUCKRCharLenTable = (0, 1, 2, 0)EUCKRSMModel = {'classTable': EUCKR_cls,'classFactor': 4,'stateTable': EUCKR_st,'charLenTable': EUCKRCharLenTable,'name': 'EUC-KR'}# EUC-TWEUCTW_cls = (2,2,2,2,2,2,2,2, # 00 - 072,2,2,2,2,2,0,0, # 08 - 0f2,2,2,2,2,2,2,2, # 10 - 172,2,2,0,2,2,2,2, # 18 - 1f2,2,2,2,2,2,2,2, # 20 - 272,2,2,2,2,2,2,2, # 28 - 2f2,2,2,2,2,2,2,2, # 30 - 372,2,2,2,2,2,2,2, # 38 - 3f2,2,2,2,2,2,2,2, # 40 - 472,2,2,2,2,2,2,2, # 48 - 4f2,2,2,2,2,2,2,2, # 50 - 572,2,2,2,2,2,2,2, # 58 - 5f2,2,2,2,2,2,2,2, # 60 - 672,2,2,2,2,2,2,2, # 68 - 6f2,2,2,2,2,2,2,2, # 70 - 772,2,2,2,2,2,2,2, # 78 - 7f0,0,0,0,0,0,0,0, # 80 - 870,0,0,0,0,0,6,0, # 88 - 8f0,0,0,0,0,0,0,0, # 90 - 970,0,0,0,0,0,0,0, # 98 - 9f0,3,4,4,4,4,4,4, # a0 - a75,5,1,1,1,1,1,1, # a8 - af1,1,1,1,1,1,1,1, # b0 - b71,1,1,1,1,1,1,1, # b8 - bf1,1,3,1,3,3,3,3, # c0 - c73,3,3,3,3,3,3,3, # c8 - cf3,3,3,3,3,3,3,3, # d0 - d73,3,3,3,3,3,3,3, # d8 - df3,3,3,3,3,3,3,3, # e0 - e73,3,3,3,3,3,3,3, # e8 - ef3,3,3,3,3,3,3,3, # f0 - f73,3,3,3,3,3,3,0 # f8 - ff)EUCTW_st = (eError,eError,eStart, 3, 3, 3, 4,eError,#00-07eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0feItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eStart,eError,#10-17eStart,eStart,eStart,eError,eError,eError,eError,eError,#18-1f5,eError,eError,eError,eStart,eError,eStart,eStart,#20-27eStart,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f)EUCTWCharLenTable = (0, 0, 1, 2, 2, 2, 3)EUCTWSMModel = {'classTable': EUCTW_cls,'classFactor': 7,'stateTable': EUCTW_st,'charLenTable': EUCTWCharLenTable,'name': 'x-euc-tw'}# GB2312GB2312_cls = (1,1,1,1,1,1,1,1, # 00 - 071,1,1,1,1,1,0,0, # 08 - 0f1,1,1,1,1,1,1,1, # 10 - 171,1,1,0,1,1,1,1, # 18 - 1f1,1,1,1,1,1,1,1, # 20 - 271,1,1,1,1,1,1,1, # 28 - 2f3,3,3,3,3,3,3,3, # 30 - 373,3,1,1,1,1,1,1, # 38 - 3f2,2,2,2,2,2,2,2, # 40 - 472,2,2,2,2,2,2,2, # 48 - 4f2,2,2,2,2,2,2,2, # 50 - 572,2,2,2,2,2,2,2, # 58 - 5f2,2,2,2,2,2,2,2, # 60 - 672,2,2,2,2,2,2,2, # 68 - 6f2,2,2,2,2,2,2,2, # 70 - 772,2,2,2,2,2,2,4, # 78 - 7f5,6,6,6,6,6,6,6, # 80 - 876,6,6,6,6,6,6,6, # 88 - 8f6,6,6,6,6,6,6,6, # 90 - 976,6,6,6,6,6,6,6, # 98 - 9f6,6,6,6,6,6,6,6, # a0 - a76,6,6,6,6,6,6,6, # a8 - af6,6,6,6,6,6,6,6, # b0 - b76,6,6,6,6,6,6,6, # b8 - bf6,6,6,6,6,6,6,6, # c0 - c76,6,6,6,6,6,6,6, # c8 - cf6,6,6,6,6,6,6,6, # d0 - d76,6,6,6,6,6,6,6, # d8 - df6,6,6,6,6,6,6,6, # e0 - e76,6,6,6,6,6,6,6, # e8 - ef6,6,6,6,6,6,6,6, # f0 - f76,6,6,6,6,6,6,0 # f8 - ff)GB2312_st = (eError,eStart,eStart,eStart,eStart,eStart, 3,eError,#00-07eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0feItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,#10-174,eError,eStart,eStart,eError,eError,eError,eError,#18-1feError,eError, 5,eError,eError,eError,eItsMe,eError,#20-27eError,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f)# To be accurate, the length of class 6 can be either 2 or 4.# But it is not necessary to discriminate between the two since# it is used for frequency analysis only, and we are validing# each code range there as well. So it is safe to set it to be# 2 here.GB2312CharLenTable = (0, 1, 1, 1, 1, 1, 2)GB2312SMModel = {'classTable': GB2312_cls,'classFactor': 7,'stateTable': GB2312_st,'charLenTable': GB2312CharLenTable,'name': 'GB2312'}# Shift_JISSJIS_cls = (1,1,1,1,1,1,1,1, # 00 - 071,1,1,1,1,1,0,0, # 08 - 0f1,1,1,1,1,1,1,1, # 10 - 171,1,1,0,1,1,1,1, # 18 - 1f1,1,1,1,1,1,1,1, # 20 - 271,1,1,1,1,1,1,1, # 28 - 2f1,1,1,1,1,1,1,1, # 30 - 371,1,1,1,1,1,1,1, # 38 - 3f2,2,2,2,2,2,2,2, # 40 - 472,2,2,2,2,2,2,2, # 48 - 4f2,2,2,2,2,2,2,2, # 50 - 572,2,2,2,2,2,2,2, # 58 - 5f2,2,2,2,2,2,2,2, # 60 - 672,2,2,2,2,2,2,2, # 68 - 6f2,2,2,2,2,2,2,2, # 70 - 772,2,2,2,2,2,2,1, # 78 - 7f3,3,3,3,3,2,2,3, # 80 - 873,3,3,3,3,3,3,3, # 88 - 8f3,3,3,3,3,3,3,3, # 90 - 973,3,3,3,3,3,3,3, # 98 - 9f#0xa0 is illegal in sjis encoding, but some pages does#contain such byte. We need to be more error forgiven.2,2,2,2,2,2,2,2, # a0 - a72,2,2,2,2,2,2,2, # a8 - af2,2,2,2,2,2,2,2, # b0 - b72,2,2,2,2,2,2,2, # b8 - bf2,2,2,2,2,2,2,2, # c0 - c72,2,2,2,2,2,2,2, # c8 - cf2,2,2,2,2,2,2,2, # d0 - d72,2,2,2,2,2,2,2, # d8 - df3,3,3,3,3,3,3,3, # e0 - e73,3,3,3,3,4,4,4, # e8 - ef3,3,3,3,3,3,3,3, # f0 - f73,3,3,3,3,0,0,0) # f8 - ffSJIS_st = (eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0feItsMe,eItsMe,eError,eError,eStart,eStart,eStart,eStart #10-17)SJISCharLenTable = (0, 1, 1, 2, 0, 0)SJISSMModel = {'classTable': SJIS_cls,'classFactor': 6,'stateTable': SJIS_st,'charLenTable': SJISCharLenTable,'name': 'Shift_JIS'}# UCS2-BEUCS2BE_cls = (0,0,0,0,0,0,0,0, # 00 - 070,0,1,0,0,2,0,0, # 08 - 0f0,0,0,0,0,0,0,0, # 10 - 170,0,0,3,0,0,0,0, # 18 - 1f0,0,0,0,0,0,0,0, # 20 - 270,3,3,3,3,3,0,0, # 28 - 2f0,0,0,0,0,0,0,0, # 30 - 370,0,0,0,0,0,0,0, # 38 - 3f0,0,0,0,0,0,0,0, # 40 - 470,0,0,0,0,0,0,0, # 48 - 4f0,0,0,0,0,0,0,0, # 50 - 570,0,0,0,0,0,0,0, # 58 - 5f0,0,0,0,0,0,0,0, # 60 - 670,0,0,0,0,0,0,0, # 68 - 6f0,0,0,0,0,0,0,0, # 70 - 770,0,0,0,0,0,0,0, # 78 - 7f0,0,0,0,0,0,0,0, # 80 - 870,0,0,0,0,0,0,0, # 88 - 8f0,0,0,0,0,0,0,0, # 90 - 970,0,0,0,0,0,0,0, # 98 - 9f0,0,0,0,0,0,0,0, # a0 - a70,0,0,0,0,0,0,0, # a8 - af0,0,0,0,0,0,0,0, # b0 - b70,0,0,0,0,0,0,0, # b8 - bf0,0,0,0,0,0,0,0, # c0 - c70,0,0,0,0,0,0,0, # c8 - cf0,0,0,0,0,0,0,0, # d0 - d70,0,0,0,0,0,0,0, # d8 - df0,0,0,0,0,0,0,0, # e0 - e70,0,0,0,0,0,0,0, # e8 - ef0,0,0,0,0,0,0,0, # f0 - f70,0,0,0,0,0,4,5 # f8 - ff)UCS2BE_st = (5, 7, 7,eError, 4, 3,eError,eError,#00-07eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0feItsMe,eItsMe, 6, 6, 6, 6,eError,eError,#10-176, 6, 6, 6, 6,eItsMe, 6, 6,#18-1f6, 6, 6, 6, 5, 7, 7,eError,#20-275, 8, 6, 6,eError, 6, 6, 6,#28-2f6, 6, 6, 6,eError,eError,eStart,eStart #30-37)UCS2BECharLenTable = (2, 2, 2, 0, 2, 2)UCS2BESMModel = {'classTable': UCS2BE_cls,'classFactor': 6,'stateTable': UCS2BE_st,'charLenTable': UCS2BECharLenTable,'name': 'UTF-16BE'}# UCS2-LEUCS2LE_cls = (0,0,0,0,0,0,0,0, # 00 - 070,0,1,0,0,2,0,0, # 08 - 0f0,0,0,0,0,0,0,0, # 10 - 170,0,0,3,0,0,0,0, # 18 - 1f0,0,0,0,0,0,0,0, # 20 - 270,3,3,3,3,3,0,0, # 28 - 2f0,0,0,0,0,0,0,0, # 30 - 370,0,0,0,0,0,0,0, # 38 - 3f0,0,0,0,0,0,0,0, # 40 - 470,0,0,0,0,0,0,0, # 48 - 4f0,0,0,0,0,0,0,0, # 50 - 570,0,0,0,0,0,0,0, # 58 - 5f0,0,0,0,0,0,0,0, # 60 - 670,0,0,0,0,0,0,0, # 68 - 6f0,0,0,0,0,0,0,0, # 70 - 770,0,0,0,0,0,0,0, # 78 - 7f0,0,0,0,0,0,0,0, # 80 - 870,0,0,0,0,0,0,0, # 88 - 8f0,0,0,0,0,0,0,0, # 90 - 970,0,0,0,0,0,0,0, # 98 - 9f0,0,0,0,0,0,0,0, # a0 - a70,0,0,0,0,0,0,0, # a8 - af0,0,0,0,0,0,0,0, # b0 - b70,0,0,0,0,0,0,0, # b8 - bf0,0,0,0,0,0,0,0, # c0 - c70,0,0,0,0,0,0,0, # c8 - cf0,0,0,0,0,0,0,0, # d0 - d70,0,0,0,0,0,0,0, # d8 - df0,0,0,0,0,0,0,0, # e0 - e70,0,0,0,0,0,0,0, # e8 - ef0,0,0,0,0,0,0,0, # f0 - f70,0,0,0,0,0,4,5 # f8 - ff)UCS2LE_st = (6, 6, 7, 6, 4, 3,eError,eError,#00-07eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0feItsMe,eItsMe, 5, 5, 5,eError,eItsMe,eError,#10-175, 5, 5,eError, 5,eError, 6, 6,#18-1f7, 6, 8, 8, 5, 5, 5,eError,#20-275, 5, 5,eError,eError,eError, 5, 5,#28-2f5, 5, 5,eError, 5,eError,eStart,eStart #30-37)UCS2LECharLenTable = (2, 2, 2, 2, 2, 2)UCS2LESMModel = {'classTable': UCS2LE_cls,'classFactor': 6,'stateTable': UCS2LE_st,'charLenTable': UCS2LECharLenTable,'name': 'UTF-16LE'}# UTF-8UTF8_cls = (1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value1,1,1,1,1,1,0,0, # 08 - 0f1,1,1,1,1,1,1,1, # 10 - 171,1,1,0,1,1,1,1, # 18 - 1f1,1,1,1,1,1,1,1, # 20 - 271,1,1,1,1,1,1,1, # 28 - 2f1,1,1,1,1,1,1,1, # 30 - 371,1,1,1,1,1,1,1, # 38 - 3f1,1,1,1,1,1,1,1, # 40 - 471,1,1,1,1,1,1,1, # 48 - 4f1,1,1,1,1,1,1,1, # 50 - 571,1,1,1,1,1,1,1, # 58 - 5f1,1,1,1,1,1,1,1, # 60 - 671,1,1,1,1,1,1,1, # 68 - 6f1,1,1,1,1,1,1,1, # 70 - 771,1,1,1,1,1,1,1, # 78 - 7f2,2,2,2,3,3,3,3, # 80 - 874,4,4,4,4,4,4,4, # 88 - 8f4,4,4,4,4,4,4,4, # 90 - 974,4,4,4,4,4,4,4, # 98 - 9f5,5,5,5,5,5,5,5, # a0 - a75,5,5,5,5,5,5,5, # a8 - af5,5,5,5,5,5,5,5, # b0 - b75,5,5,5,5,5,5,5, # b8 - bf0,0,6,6,6,6,6,6, # c0 - c76,6,6,6,6,6,6,6, # c8 - cf6,6,6,6,6,6,6,6, # d0 - d76,6,6,6,6,6,6,6, # d8 - df7,8,8,8,8,8,8,8, # e0 - e78,8,8,8,8,9,8,8, # e8 - ef10,11,11,11,11,11,11,11, # f0 - f712,13,13,13,14,15,0,0 # f8 - ff)UTF8_st = (eError,eStart,eError,eError,eError,eError, 12, 10,#00-079, 11, 8, 7, 6, 5, 4, 3,#08-0feError,eError,eError,eError,eError,eError,eError,eError,#10-17eError,eError,eError,eError,eError,eError,eError,eError,#18-1feItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#20-27eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#28-2feError,eError, 5, 5, 5, 5,eError,eError,#30-37eError,eError,eError,eError,eError,eError,eError,eError,#38-3feError,eError,eError, 5, 5, 5,eError,eError,#40-47eError,eError,eError,eError,eError,eError,eError,eError,#48-4feError,eError, 7, 7, 7, 7,eError,eError,#50-57eError,eError,eError,eError,eError,eError,eError,eError,#58-5feError,eError,eError,eError, 7, 7,eError,eError,#60-67eError,eError,eError,eError,eError,eError,eError,eError,#68-6feError,eError, 9, 9, 9, 9,eError,eError,#70-77eError,eError,eError,eError,eError,eError,eError,eError,#78-7feError,eError,eError,eError,eError, 9,eError,eError,#80-87eError,eError,eError,eError,eError,eError,eError,eError,#88-8feError,eError, 12, 12, 12, 12,eError,eError,#90-97eError,eError,eError,eError,eError,eError,eError,eError,#98-9feError,eError,eError,eError,eError, 12,eError,eError,#a0-a7eError,eError,eError,eError,eError,eError,eError,eError,#a8-afeError,eError, 12, 12, 12,eError,eError,eError,#b0-b7eError,eError,eError,eError,eError,eError,eError,eError,#b8-bfeError,eError,eStart,eStart,eStart,eStart,eError,eError,#c0-c7eError,eError,eError,eError,eError,eError,eError,eError #c8-cf)UTF8CharLenTable = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)UTF8SMModel = {'classTable': UTF8_cls,'classFactor': 16,'stateTable': UTF8_st,'charLenTable': UTF8CharLenTable,'name': 'UTF-8'}
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Universal charset detector code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 2001# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python# Shy Shalom - original C code# Proofpoint, Inc.## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .charsetgroupprober import CharSetGroupProberfrom .utf8prober import UTF8Proberfrom .sjisprober import SJISProberfrom .eucjpprober import EUCJPProberfrom .gb2312prober import GB2312Proberfrom .euckrprober import EUCKRProberfrom .cp949prober import CP949Proberfrom .big5prober import Big5Proberfrom .euctwprober import EUCTWProberclass MBCSGroupProber(CharSetGroupProber):def __init__(self):CharSetGroupProber.__init__(self)self._mProbers = [UTF8Prober(),SJISProber(),EUCJPProber(),GB2312Prober(),EUCKRProber(),CP949Prober(),Big5Prober(),EUCTWProber()]self.reset()
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Universal charset detector code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 2001# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python# Shy Shalom - original C code# Proofpoint, Inc.## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################import sysfrom . import constantsfrom .charsetprober import CharSetProberclass MultiByteCharSetProber(CharSetProber):def __init__(self):CharSetProber.__init__(self)self._mDistributionAnalyzer = Noneself._mCodingSM = Noneself._mLastChar = [0, 0]def reset(self):CharSetProber.reset(self)if self._mCodingSM:self._mCodingSM.reset()if self._mDistributionAnalyzer:self._mDistributionAnalyzer.reset()self._mLastChar = [0, 0]def get_charset_name(self):passdef feed(self, aBuf):aLen = len(aBuf)for i in range(0, aLen):codingState = self._mCodingSM.next_state(aBuf[i])if codingState == constants.eError:if constants._debug:sys.stderr.write(self.get_charset_name()+ ' prober hit error at byte ' + str(i)+ '\n')self._mState = constants.eNotMebreakelif codingState == constants.eItsMe:self._mState = constants.eFoundItbreakelif codingState == constants.eStart:charLen = self._mCodingSM.get_current_charlen()if i == 0:self._mLastChar[1] = aBuf[0]self._mDistributionAnalyzer.feed(self._mLastChar, charLen)else:self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],charLen)self._mLastChar[0] = aBuf[aLen - 1]if self.get_state() == constants.eDetecting:if (self._mDistributionAnalyzer.got_enough_data() and(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):self._mState = constants.eFoundItreturn self.get_state()def get_confidence(self):return self._mDistributionAnalyzer.get_confidence()
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Universal charset detector code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 2001# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python# Shy Shalom - original C code## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .charsetprober import CharSetProberfrom .constants import eNotMefrom .compat import wrap_ordFREQ_CAT_NUM = 4UDF = 0 # undefinedOTH = 1 # otherASC = 2 # ascii capital letterASS = 3 # ascii small letterACV = 4 # accent capital vowelACO = 5 # accent capital otherASV = 6 # accent small vowelASO = 7 # accent small otherCLASS_NUM = 8 # total classesLatin1_CharToClass = (OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0FOTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1FOTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2FOTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3FOTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4FASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5FOTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6FASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7FOTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8FUDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9FOTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AFOTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BFACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CFACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DFASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EFASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF)# 0 : illegal# 1 : very unlikely# 2 : normal# 3 : very likelyLatin1ClassModel = (# UDF OTH ASC ASS ACV ACO ASV ASO0, 0, 0, 0, 0, 0, 0, 0, # UDF0, 3, 3, 3, 3, 3, 3, 3, # OTH0, 3, 3, 3, 3, 3, 3, 3, # ASC0, 3, 3, 3, 1, 1, 3, 3, # ASS0, 3, 3, 3, 1, 2, 1, 2, # ACV0, 3, 3, 3, 3, 3, 3, 3, # ACO0, 3, 1, 3, 1, 1, 1, 3, # ASV0, 3, 1, 3, 1, 1, 3, 3, # ASO)class Latin1Prober(CharSetProber):def __init__(self):CharSetProber.__init__(self)self.reset()def reset(self):self._mLastCharClass = OTHself._mFreqCounter = [0] * FREQ_CAT_NUMCharSetProber.reset(self)def get_charset_name(self):return "windows-1252"def feed(self, aBuf):aBuf = self.filter_with_english_letters(aBuf)for c in aBuf:charClass = Latin1_CharToClass[wrap_ord(c)]freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM)+ charClass]if freq == 0:self._mState = eNotMebreakself._mFreqCounter[freq] += 1self._mLastCharClass = charClassreturn self.get_state()def get_confidence(self):if self.get_state() == eNotMe:return 0.01total = sum(self._mFreqCounter)if total < 0.01:confidence = 0.0else:confidence = ((self._mFreqCounter[3] - self._mFreqCounter[1] * 20.0)/ total)if confidence < 0.0:confidence = 0.0# lower the confidence of latin1 so that other more accurate# detector can take priority.confidence = confidence * 0.73return confidence
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK ########################## 255: Control characters that usually does not exist in any text# 254: Carriage/Return# 253: symbol (punctuation) that does not belong to word# 252: 0 - 9# The following result for thai was collected from a limited sample (1M).# Character Mapping Table:TIS620CharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 6096,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57,49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54,45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63,22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247,68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,)# Model Table:# total sequences: 100%# first 512 sequences: 92.6386%# first 1024 sequences:7.3177%# rest sequences: 1.0230%# negative sequences: 0.0436%ThaiLangModel = (0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,)TIS620ThaiModel = {'charToOrderMap': TIS620CharToOrderMap,'precedenceMatrix': ThaiLangModel,'mTypicalPositiveRatio': 0.926386,'keepEnglishLetter': False,'charsetName': "TIS-620"}# flake8: noqa
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK ########################## 255: Control characters that usually does not exist in any text# 254: Carriage/Return# 253: symbol (punctuation) that does not belong to word# 252: 0 - 9# Character Mapping Table:Latin2_HungarianCharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,)win1250HungarianCharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,)# Model Table:# total sequences: 100%# first 512 sequences: 94.7368%# first 1024 sequences:5.2623%# rest sequences: 0.8894%# negative sequences: 0.0009%HungarianLangModel = (0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,)Latin2HungarianModel = {'charToOrderMap': Latin2_HungarianCharToOrderMap,'precedenceMatrix': HungarianLangModel,'mTypicalPositiveRatio': 0.947368,'keepEnglishLetter': True,'charsetName': "ISO-8859-2"}Win1250HungarianModel = {'charToOrderMap': win1250HungarianCharToOrderMap,'precedenceMatrix': HungarianLangModel,'mTypicalPositiveRatio': 0.947368,'keepEnglishLetter': True,'charsetName': "windows-1250"}# flake8: noqa
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Universal charset detector code.## The Initial Developer of the Original Code is# Simon Montagu# Portions created by the Initial Developer are Copyright (C) 2005# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python# Shy Shalom - original C code# Shoshannah Forbes - original C code (?)## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK ########################## 255: Control characters that usually does not exist in any text# 254: Carriage/Return# 253: symbol (punctuation) that does not belong to word# 252: 0 - 9# Windows-1255 language model# Character Mapping Table:win1255_CharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 4078,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 6066,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23,12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253,)# Model Table:# total sequences: 100%# first 512 sequences: 98.4004%# first 1024 sequences: 1.5981%# rest sequences: 0.087%# negative sequences: 0.0015%HebrewLangModel = (0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,)Win1255HebrewModel = {'charToOrderMap': win1255_CharToOrderMap,'precedenceMatrix': HebrewLangModel,'mTypicalPositiveRatio': 0.984004,'keepEnglishLetter': False,'charsetName': "windows-1255"}# flake8: noqa
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK ########################## 255: Control characters that usually does not exist in any text# 254: Carriage/Return# 253: symbol (punctuation) that does not belong to word# 252: 0 - 9# Character Mapping Table:Latin7_CharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 4079,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 6078,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c035, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e09, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0)win1253_CharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 4079,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 6078,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c035, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e09, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0)# Model Table:# total sequences: 100%# first 512 sequences: 98.2851%# first 1024 sequences:1.7001%# rest sequences: 0.0359%# negative sequences: 0.0148%GreekLangModel = (0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0,3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0,2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0,2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0,3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0,3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0,2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0,0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0,0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2,0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2,0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0,0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2,0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2,0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0,0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0,0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0,0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2,0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2,0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2,0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2,0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2,0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2,0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0,0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,)Latin7GreekModel = {'charToOrderMap': Latin7_CharToOrderMap,'precedenceMatrix': GreekLangModel,'mTypicalPositiveRatio': 0.982851,'keepEnglishLetter': False,'charsetName': "ISO-8859-7"}Win1253GreekModel = {'charToOrderMap': win1253_CharToOrderMap,'precedenceMatrix': GreekLangModel,'mTypicalPositiveRatio': 0.982851,'keepEnglishLetter': False,'charsetName': "windows-1253"}# flake8: noqa
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK ########################## KOI8-R language model# Character Mapping Table:KOI8R_CharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 6067,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b027, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c015, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d059, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e035, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0)win1251_CharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 6067,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,)latin5_CharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 6067,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,)macCyrillic_CharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 6067,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 7037, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,)IBM855_CharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 6067,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219,220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229,230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248,43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,)IBM866_CharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 6067,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 7037, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,)# Model Table:# total sequences: 100%# first 512 sequences: 97.6601%# first 1024 sequences: 2.3389%# rest sequences: 0.1237%# negative sequences: 0.0009%RussianLangModel = (0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,)Koi8rModel = {'charToOrderMap': KOI8R_CharToOrderMap,'precedenceMatrix': RussianLangModel,'mTypicalPositiveRatio': 0.976601,'keepEnglishLetter': False,'charsetName': "KOI8-R"}Win1251CyrillicModel = {'charToOrderMap': win1251_CharToOrderMap,'precedenceMatrix': RussianLangModel,'mTypicalPositiveRatio': 0.976601,'keepEnglishLetter': False,'charsetName': "windows-1251"}Latin5CyrillicModel = {'charToOrderMap': latin5_CharToOrderMap,'precedenceMatrix': RussianLangModel,'mTypicalPositiveRatio': 0.976601,'keepEnglishLetter': False,'charsetName': "ISO-8859-5"}MacCyrillicModel = {'charToOrderMap': macCyrillic_CharToOrderMap,'precedenceMatrix': RussianLangModel,'mTypicalPositiveRatio': 0.976601,'keepEnglishLetter': False,'charsetName': "MacCyrillic"};Ibm866Model = {'charToOrderMap': IBM866_CharToOrderMap,'precedenceMatrix': RussianLangModel,'mTypicalPositiveRatio': 0.976601,'keepEnglishLetter': False,'charsetName': "IBM866"}Ibm855Model = {'charToOrderMap': IBM855_CharToOrderMap,'precedenceMatrix': RussianLangModel,'mTypicalPositiveRatio': 0.976601,'keepEnglishLetter': False,'charsetName': "IBM855"}# flake8: noqa
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK ########################## 255: Control characters that usually does not exist in any text# 254: Carriage/Return# 253: symbol (punctuation) that does not belong to word# 252: 0 - 9# Character Mapping Table:# this table is modified base on win1251BulgarianCharToOrderMap, so# only number <64 is sure validLatin5_BulgarianCharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 9081,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a031, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b039, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c01, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d07, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e062,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0)win1251BulgarianCharToOrderMap = (255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 9088,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a073, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b031, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c039, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d01, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e07, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0)# Model Table:# total sequences: 100%# first 512 sequences: 96.9392%# first 1024 sequences:3.0618%# rest sequences: 0.2992%# negative sequences: 0.0020%BulgarianLangModel = (0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,)Latin5BulgarianModel = {'charToOrderMap': Latin5_BulgarianCharToOrderMap,'precedenceMatrix': BulgarianLangModel,'mTypicalPositiveRatio': 0.969392,'keepEnglishLetter': False,'charsetName': "ISO-8859-5"}Win1251BulgarianModel = {'charToOrderMap': win1251BulgarianCharToOrderMap,'precedenceMatrix': BulgarianLangModel,'mTypicalPositiveRatio': 0.969392,'keepEnglishLetter': False,'charsetName': "windows-1251"}# flake8: noqa
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .compat import wrap_ordNUM_OF_CATEGORY = 6DONT_KNOW = -1ENOUGH_REL_THRESHOLD = 100MAX_REL_THRESHOLD = 1000MINIMUM_DATA_THRESHOLD = 4# This is hiragana 2-char sequence table, the number in each cell represents its frequency categoryjp2CharContext = ((0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),)class JapaneseContextAnalysis:def __init__(self):self.reset()def reset(self):self._mTotalRel = 0 # total sequence received# category counters, each interger counts sequence in its categoryself._mRelSample = [0] * NUM_OF_CATEGORY# if last byte in current buffer is not the last byte of a character,# we need to know how many bytes to skip in next bufferself._mNeedToSkipCharNum = 0self._mLastCharOrder = -1 # The order of previous char# If this flag is set to True, detection is done and conclusion has# been madeself._mDone = Falsedef feed(self, aBuf, aLen):if self._mDone:return# The buffer we got is byte oriented, and a character may span in more than one# buffers. In case the last one or two byte in last buffer is not# complete, we record how many byte needed to complete that character# and skip these bytes here. We can choose to record those bytes as# well and analyse the character once it is complete, but since a# character will not make much difference, by simply skipping# this character will simply our logic and improve performance.i = self._mNeedToSkipCharNumwhile i < aLen:order, charLen = self.get_order(aBuf[i:i + 2])i += charLenif i > aLen:self._mNeedToSkipCharNum = i - aLenself._mLastCharOrder = -1else:if (order != -1) and (self._mLastCharOrder != -1):self._mTotalRel += 1if self._mTotalRel > MAX_REL_THRESHOLD:self._mDone = Truebreakself._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1self._mLastCharOrder = orderdef got_enough_data(self):return self._mTotalRel > ENOUGH_REL_THRESHOLDdef get_confidence(self):# This is just one way to calculate confidence. It works well for me.if self._mTotalRel > MINIMUM_DATA_THRESHOLD:return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRelelse:return DONT_KNOWdef get_order(self, aBuf):return -1, 1class SJISContextAnalysis(JapaneseContextAnalysis):def __init__(self):self.charset_name = "SHIFT_JIS"def get_charset_name(self):return self.charset_namedef get_order(self, aBuf):if not aBuf:return -1, 1# find out current char's byte lengthfirst_char = wrap_ord(aBuf[0])if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):charLen = 2if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):self.charset_name = "CP932"else:charLen = 1# return its order if it is hiraganaif len(aBuf) > 1:second_char = wrap_ord(aBuf[1])if (first_char == 202) and (0x9F <= second_char <= 0xF1):return second_char - 0x9F, charLenreturn -1, charLenclass EUCJPContextAnalysis(JapaneseContextAnalysis):def get_order(self, aBuf):if not aBuf:return -1, 1# find out current char's byte lengthfirst_char = wrap_ord(aBuf[0])if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):charLen = 2elif first_char == 0x8F:charLen = 3else:charLen = 1# return its order if it is hiraganaif len(aBuf) > 1:second_char = wrap_ord(aBuf[1])if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):return second_char - 0xA1, charLenreturn -1, charLen# flake8: noqa
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK ########################## Sampling from about 20M text materials include literature and computer technology## Japanese frequency table, applied to both S-JIS and EUC-JP# They are sorted in order.# 128 --> 0.77094# 256 --> 0.85710# 512 --> 0.92635# 1024 --> 0.97130# 2048 --> 0.99431## Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191## Typical Distribution Ratio, 25% of IDRJIS_TYPICAL_DISTRIBUTION_RATIO = 3.0# Char to FreqOrder table ,JIS_TABLE_SIZE = 4368JISCharToFreqOrder = (40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 163511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 321262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 482042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 642108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 805081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 961691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 1125087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 1285103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 1445118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 1605134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 1765150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 1925165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 2081309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 2241875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 2401865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 2562049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 2723299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 2883691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 3044, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 32012, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 3361591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 3685199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 40032, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 41643, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 44854, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 4645209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 4805224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 4965239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 5124617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 5285252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 5445268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 5605284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 5765300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 5925316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 6085332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 6245348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 6405364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 6565380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 6723931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 6885393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 7045409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 7205425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 7365441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 7525457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 7685473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 7845489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 8005505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 8165521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 8325537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 8485553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 8645569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 8805585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 8965601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 9125617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 9285633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 9445649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 9605665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 9765681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 9925697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 10085713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 10245729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 10405745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 10565761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 10725777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 10885793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 11045809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 11205825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 11365841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 11525857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 11685873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 11845889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 12005905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 12165921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 12325937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 12485953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 12645969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 12805985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 12966001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 13126017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 13286033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 13446049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 13606065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 13766081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 13926097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 14086113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 14244313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 14721970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 14881761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 15203028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 15363517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 15683436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 15843696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 16162177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 16483936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 16641900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 16961407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 17282829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 17442481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 17602659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 17762598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 17921604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 18081080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 18241148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 18401575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 18562524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 18721866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 18882129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 19041260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 19201956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 19361027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 19521380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 19681177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 19841089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 20321536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 20482753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 20642093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 20802441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 20963816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 21123002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 21443170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 21601972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 21922370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 22081682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 22403603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 22564112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 22722715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 22881570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 23042601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 23201075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 23681593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 23842603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 24002427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 24162290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 24323701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 24481453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 24642470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 25281209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 25442203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 25761257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 25921815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 26241459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 26401008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 26561643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 26882978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 27202167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 27363175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 27522545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 27681580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 27846147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 28001336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 28162373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 28321203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 286472,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 28803609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 28963316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 29121584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 29281123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 29441442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 29601715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 30082671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 30403706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 30562916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 30881937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 31042207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 31361984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 31684337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 31842800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 32001429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 32321913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 32482674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 32806157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 32961251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 33121883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 33282719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 33443532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 33763534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 33921938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 34241399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 34563829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 34882277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 35204343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 35362184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 35521652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 35681994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 35841582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 36161990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 36323956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 36481915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 36643832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 37282452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 37441230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 37761188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 38081487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 38721387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 38881985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 39042019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 39204355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 39521671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 39841916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 40003088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 40161004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 40322840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 40482953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 40641803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 40801141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 40962954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 41282650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 41441411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 41601967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 41761279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 41921376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 42083621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 42242514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 42402325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 42723839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 42883727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 43041639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 43202413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 43361444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 43522922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512#Everything below is of no interest for detection purpose2138,2122,3730,2888,1995,1820,1044,6190,6191,6192,6193,6194,6195,6196,6197,6198, # 43846199,6200,6201,6202,6203,6204,6205,4670,6206,6207,6208,6209,6210,6211,6212,6213, # 44006214,6215,6216,6217,6218,6219,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229, # 44166230,6231,6232,6233,6234,6235,6236,6237,3187,6238,6239,3969,6240,6241,6242,6243, # 44326244,4671,6245,6246,4672,6247,6248,4133,6249,6250,4364,6251,2923,2556,2613,4673, # 44484365,3970,6252,6253,6254,6255,4674,6256,6257,6258,2768,2353,4366,4675,4676,3188, # 44644367,3463,6259,4134,4677,4678,6260,2267,6261,3842,3332,4368,3543,6262,6263,6264, # 44803013,1954,1928,4135,4679,6265,6266,2478,3091,6267,4680,4369,6268,6269,1699,6270, # 44963544,4136,4681,6271,4137,6272,4370,2804,6273,6274,2593,3971,3972,4682,6275,2236, # 45124683,6276,6277,4684,6278,6279,4138,3973,4685,6280,6281,3258,6282,6283,6284,6285, # 45283974,4686,2841,3975,6286,6287,3545,6288,6289,4139,4687,4140,6290,4141,6291,4142, # 45446292,6293,3333,6294,6295,6296,4371,6297,3399,6298,6299,4372,3976,6300,6301,6302, # 45604373,6303,6304,3843,3731,6305,4688,4374,6306,6307,3259,2294,6308,3732,2530,4143, # 45766309,4689,6310,6311,6312,3048,6313,6314,4690,3733,2237,6315,6316,2282,3334,6317, # 45926318,3844,6319,6320,4691,6321,3400,4692,6322,4693,6323,3049,6324,4375,6325,3977, # 46086326,6327,6328,3546,6329,4694,3335,6330,4695,4696,6331,6332,6333,6334,4376,3978, # 46246335,4697,3979,4144,6336,3980,4698,6337,6338,6339,6340,6341,4699,4700,4701,6342, # 46406343,4702,6344,6345,4703,6346,6347,4704,6348,4705,4706,3135,6349,4707,6350,4708, # 46566351,4377,6352,4709,3734,4145,6353,2506,4710,3189,6354,3050,4711,3981,6355,3547, # 46723014,4146,4378,3735,2651,3845,3260,3136,2224,1986,6356,3401,6357,4712,2594,3627, # 46883137,2573,3736,3982,4713,3628,4714,4715,2682,3629,4716,6358,3630,4379,3631,6359, # 47046360,6361,3983,6362,6363,6364,6365,4147,3846,4717,6366,6367,3737,2842,6368,4718, # 47202628,6369,3261,6370,2386,6371,6372,3738,3984,4719,3464,4720,3402,6373,2924,3336, # 47364148,2866,6374,2805,3262,4380,2704,2069,2531,3138,2806,2984,6375,2769,6376,4721, # 47524722,3403,6377,6378,3548,6379,6380,2705,3092,1979,4149,2629,3337,2889,6381,3338, # 47684150,2557,3339,4381,6382,3190,3263,3739,6383,4151,4723,4152,2558,2574,3404,3191, # 47846384,6385,4153,6386,4724,4382,6387,6388,4383,6389,6390,4154,6391,4725,3985,6392, # 48003847,4155,6393,6394,6395,6396,6397,3465,6398,4384,6399,6400,6401,6402,6403,6404, # 48164156,6405,6406,6407,6408,2123,6409,6410,2326,3192,4726,6411,6412,6413,6414,4385, # 48324157,6415,6416,4158,6417,3093,3848,6418,3986,6419,6420,3849,6421,6422,6423,4159, # 48486424,6425,4160,6426,3740,6427,6428,6429,6430,3987,6431,4727,6432,2238,6433,6434, # 48644386,3988,6435,6436,3632,6437,6438,2843,6439,6440,6441,6442,3633,6443,2958,6444, # 48806445,3466,6446,2364,4387,3850,6447,4388,2959,3340,6448,3851,6449,4728,6450,6451, # 48963264,4729,6452,3193,6453,4389,4390,2706,3341,4730,6454,3139,6455,3194,6456,3051, # 49122124,3852,1602,4391,4161,3853,1158,3854,4162,3989,4392,3990,4731,4732,4393,2040, # 49284163,4394,3265,6457,2807,3467,3855,6458,6459,6460,3991,3468,4733,4734,6461,3140, # 49442960,6462,4735,6463,6464,6465,6466,4736,4737,4738,4739,6467,6468,4164,2403,3856, # 49606469,6470,2770,2844,6471,4740,6472,6473,6474,6475,6476,6477,6478,3195,6479,4741, # 49764395,6480,2867,6481,4742,2808,6482,2493,4165,6483,6484,6485,6486,2295,4743,6487, # 49926488,6489,3634,6490,6491,6492,6493,6494,6495,6496,2985,4744,6497,6498,4745,6499, # 50086500,2925,3141,4166,6501,6502,4746,6503,6504,4747,6505,6506,6507,2890,6508,6509, # 50246510,6511,6512,6513,6514,6515,6516,6517,6518,6519,3469,4167,6520,6521,6522,4748, # 50404396,3741,4397,4749,4398,3342,2125,4750,6523,4751,4752,4753,3052,6524,2961,4168, # 50566525,4754,6526,4755,4399,2926,4169,6527,3857,6528,4400,4170,6529,4171,6530,6531, # 50722595,6532,6533,6534,6535,3635,6536,6537,6538,6539,6540,6541,6542,4756,6543,6544, # 50886545,6546,6547,6548,4401,6549,6550,6551,6552,4402,3405,4757,4403,6553,6554,6555, # 51044172,3742,6556,6557,6558,3992,3636,6559,6560,3053,2726,6561,3549,4173,3054,4404, # 51206562,6563,3993,4405,3266,3550,2809,4406,6564,6565,6566,4758,4759,6567,3743,6568, # 51364760,3744,4761,3470,6569,6570,6571,4407,6572,3745,4174,6573,4175,2810,4176,3196, # 51524762,6574,4177,6575,6576,2494,2891,3551,6577,6578,3471,6579,4408,6580,3015,3197, # 51686581,3343,2532,3994,3858,6582,3094,3406,4409,6583,2892,4178,4763,4410,3016,4411, # 51846584,3995,3142,3017,2683,6585,4179,6586,6587,4764,4412,6588,6589,4413,6590,2986, # 52006591,2962,3552,6592,2963,3472,6593,6594,4180,4765,6595,6596,2225,3267,4414,6597, # 52163407,3637,4766,6598,6599,3198,6600,4415,6601,3859,3199,6602,3473,4767,2811,4416, # 52321856,3268,3200,2575,3996,3997,3201,4417,6603,3095,2927,6604,3143,6605,2268,6606, # 52483998,3860,3096,2771,6607,6608,3638,2495,4768,6609,3861,6610,3269,2745,4769,4181, # 52643553,6611,2845,3270,6612,6613,6614,3862,6615,6616,4770,4771,6617,3474,3999,4418, # 52804419,6618,3639,3344,6619,4772,4182,6620,2126,6621,6622,6623,4420,4773,6624,3018, # 52966625,4774,3554,6626,4183,2025,3746,6627,4184,2707,6628,4421,4422,3097,1775,4185, # 53123555,6629,6630,2868,6631,6632,4423,6633,6634,4424,2414,2533,2928,6635,4186,2387, # 53286636,4775,6637,4187,6638,1891,4425,3202,3203,6639,6640,4776,6641,3345,6642,6643, # 53443640,6644,3475,3346,3641,4000,6645,3144,6646,3098,2812,4188,3642,3204,6647,3863, # 53603476,6648,3864,6649,4426,4001,6650,6651,6652,2576,6653,4189,4777,6654,6655,6656, # 53762846,6657,3477,3205,4002,6658,4003,6659,3347,2252,6660,6661,6662,4778,6663,6664, # 53926665,6666,6667,6668,6669,4779,4780,2048,6670,3478,3099,6671,3556,3747,4004,6672, # 54086673,6674,3145,4005,3748,6675,6676,6677,6678,6679,3408,6680,6681,6682,6683,3206, # 54243207,6684,6685,4781,4427,6686,4782,4783,4784,6687,6688,6689,4190,6690,6691,3479, # 54406692,2746,6693,4428,6694,6695,6696,6697,6698,6699,4785,6700,6701,3208,2727,6702, # 54563146,6703,6704,3409,2196,6705,4429,6706,6707,6708,2534,1996,6709,6710,6711,2747, # 54726712,6713,6714,4786,3643,6715,4430,4431,6716,3557,6717,4432,4433,6718,6719,6720, # 54886721,3749,6722,4006,4787,6723,6724,3644,4788,4434,6725,6726,4789,2772,6727,6728, # 55046729,6730,6731,2708,3865,2813,4435,6732,6733,4790,4791,3480,6734,6735,6736,6737, # 55204436,3348,6738,3410,4007,6739,6740,4008,6741,6742,4792,3411,4191,6743,6744,6745, # 55366746,6747,3866,6748,3750,6749,6750,6751,6752,6753,6754,6755,3867,6756,4009,6757, # 55524793,4794,6758,2814,2987,6759,6760,6761,4437,6762,6763,6764,6765,3645,6766,6767, # 55683481,4192,6768,3751,6769,6770,2174,6771,3868,3752,6772,6773,6774,4193,4795,4438, # 55843558,4796,4439,6775,4797,6776,6777,4798,6778,4799,3559,4800,6779,6780,6781,3482, # 56006782,2893,6783,6784,4194,4801,4010,6785,6786,4440,6787,4011,6788,6789,6790,6791, # 56166792,6793,4802,6794,6795,6796,4012,6797,6798,6799,6800,3349,4803,3483,6801,4804, # 56324195,6802,4013,6803,6804,4196,6805,4014,4015,6806,2847,3271,2848,6807,3484,6808, # 56486809,6810,4441,6811,4442,4197,4443,3272,4805,6812,3412,4016,1579,6813,6814,4017, # 56646815,3869,6816,2964,6817,4806,6818,6819,4018,3646,6820,6821,4807,4019,4020,6822, # 56806823,3560,6824,6825,4021,4444,6826,4198,6827,6828,4445,6829,6830,4199,4808,6831, # 56966832,6833,3870,3019,2458,6834,3753,3413,3350,6835,4809,3871,4810,3561,4446,6836, # 57126837,4447,4811,4812,6838,2459,4448,6839,4449,6840,6841,4022,3872,6842,4813,4814, # 57286843,6844,4815,4200,4201,4202,6845,4023,6846,6847,4450,3562,3873,6848,6849,4816, # 57444817,6850,4451,4818,2139,6851,3563,6852,6853,3351,6854,6855,3352,4024,2709,3414, # 57604203,4452,6856,4204,6857,6858,3874,3875,6859,6860,4819,6861,6862,6863,6864,4453, # 57763647,6865,6866,4820,6867,6868,6869,6870,4454,6871,2869,6872,6873,4821,6874,3754, # 57926875,4822,4205,6876,6877,6878,3648,4206,4455,6879,4823,6880,4824,3876,6881,3055, # 58084207,6882,3415,6883,6884,6885,4208,4209,6886,4210,3353,6887,3354,3564,3209,3485, # 58242652,6888,2728,6889,3210,3755,6890,4025,4456,6891,4825,6892,6893,6894,6895,4211, # 58406896,6897,6898,4826,6899,6900,4212,6901,4827,6902,2773,3565,6903,4828,6904,6905, # 58566906,6907,3649,3650,6908,2849,3566,6909,3567,3100,6910,6911,6912,6913,6914,6915, # 58724026,6916,3355,4829,3056,4457,3756,6917,3651,6918,4213,3652,2870,6919,4458,6920, # 58882438,6921,6922,3757,2774,4830,6923,3356,4831,4832,6924,4833,4459,3653,2507,6925, # 59044834,2535,6926,6927,3273,4027,3147,6928,3568,6929,6930,6931,4460,6932,3877,4461, # 59202729,3654,6933,6934,6935,6936,2175,4835,2630,4214,4028,4462,4836,4215,6937,3148, # 59364216,4463,4837,4838,4217,6938,6939,2850,4839,6940,4464,6941,6942,6943,4840,6944, # 59524218,3274,4465,6945,6946,2710,6947,4841,4466,6948,6949,2894,6950,6951,4842,6952, # 59684219,3057,2871,6953,6954,6955,6956,4467,6957,2711,6958,6959,6960,3275,3101,4843, # 59846961,3357,3569,6962,4844,6963,6964,4468,4845,3570,6965,3102,4846,3758,6966,4847, # 60003878,4848,4849,4029,6967,2929,3879,4850,4851,6968,6969,1733,6970,4220,6971,6972, # 60166973,6974,6975,6976,4852,6977,6978,6979,6980,6981,6982,3759,6983,6984,6985,3486, # 60323487,6986,3488,3416,6987,6988,6989,6990,6991,6992,6993,6994,6995,6996,6997,4853, # 60486998,6999,4030,7000,7001,3211,7002,7003,4221,7004,7005,3571,4031,7006,3572,7007, # 60642614,4854,2577,7008,7009,2965,3655,3656,4855,2775,3489,3880,4222,4856,3881,4032, # 60803882,3657,2730,3490,4857,7010,3149,7011,4469,4858,2496,3491,4859,2283,7012,7013, # 60967014,2365,4860,4470,7015,7016,3760,7017,7018,4223,1917,7019,7020,7021,4471,7022, # 61122776,4472,7023,7024,7025,7026,4033,7027,3573,4224,4861,4034,4862,7028,7029,1929, # 61283883,4035,7030,4473,3058,7031,2536,3761,3884,7032,4036,7033,2966,2895,1968,4474, # 61443276,4225,3417,3492,4226,2105,7034,7035,1754,2596,3762,4227,4863,4475,3763,4864, # 61603764,2615,2777,3103,3765,3658,3418,4865,2296,3766,2815,7036,7037,7038,3574,2872, # 61763277,4476,7039,4037,4477,7040,7041,4038,7042,7043,7044,7045,7046,7047,2537,7048, # 61927049,7050,7051,7052,7053,7054,4478,7055,7056,3767,3659,4228,3575,7057,7058,4229, # 62087059,7060,7061,3660,7062,3212,7063,3885,4039,2460,7064,7065,7066,7067,7068,7069, # 62247070,7071,7072,7073,7074,4866,3768,4867,7075,7076,7077,7078,4868,3358,3278,2653, # 62407079,7080,4479,3886,7081,7082,4869,7083,7084,7085,7086,7087,7088,2538,7089,7090, # 62567091,4040,3150,3769,4870,4041,2896,3359,4230,2930,7092,3279,7093,2967,4480,3213, # 62724481,3661,7094,7095,7096,7097,7098,7099,7100,7101,7102,2461,3770,7103,7104,4231, # 62883151,7105,7106,7107,4042,3662,7108,7109,4871,3663,4872,4043,3059,7110,7111,7112, # 63043493,2988,7113,4873,7114,7115,7116,3771,4874,7117,7118,4232,4875,7119,3576,2336, # 63204876,7120,4233,3419,4044,4877,4878,4482,4483,4879,4484,4234,7121,3772,4880,1045, # 63363280,3664,4881,4882,7122,7123,7124,7125,4883,7126,2778,7127,4485,4486,7128,4884, # 63523214,3887,7129,7130,3215,7131,4885,4045,7132,7133,4046,7134,7135,7136,7137,7138, # 63687139,7140,7141,7142,7143,4235,7144,4886,7145,7146,7147,4887,7148,7149,7150,4487, # 63844047,4488,7151,7152,4888,4048,2989,3888,7153,3665,7154,4049,7155,7156,7157,7158, # 64007159,7160,2931,4889,4890,4489,7161,2631,3889,4236,2779,7162,7163,4891,7164,3060, # 64167165,1672,4892,7166,4893,4237,3281,4894,7167,7168,3666,7169,3494,7170,7171,4050, # 64327172,7173,3104,3360,3420,4490,4051,2684,4052,7174,4053,7175,7176,7177,2253,4054, # 64487178,7179,4895,7180,3152,3890,3153,4491,3216,7181,7182,7183,2968,4238,4492,4055, # 64647184,2990,7185,2479,7186,7187,4493,7188,7189,7190,7191,7192,4896,7193,4897,2969, # 64804494,4898,7194,3495,7195,7196,4899,4495,7197,3105,2731,7198,4900,7199,7200,7201, # 64964056,7202,3361,7203,7204,4496,4901,4902,7205,4497,7206,7207,2315,4903,7208,4904, # 65127209,4905,2851,7210,7211,3577,7212,3578,4906,7213,4057,3667,4907,7214,4058,2354, # 65283891,2376,3217,3773,7215,7216,7217,7218,7219,4498,7220,4908,3282,2685,7221,3496, # 65444909,2632,3154,4910,7222,2337,7223,4911,7224,7225,7226,4912,4913,3283,4239,4499, # 65607227,2816,7228,7229,7230,7231,7232,7233,7234,4914,4500,4501,7235,7236,7237,2686, # 65767238,4915,7239,2897,4502,7240,4503,7241,2516,7242,4504,3362,3218,7243,7244,7245, # 65924916,7246,7247,4505,3363,7248,7249,7250,7251,3774,4506,7252,7253,4917,7254,7255, # 66083284,2991,4918,4919,3219,3892,4920,3106,3497,4921,7256,7257,7258,4922,7259,4923, # 66243364,4507,4508,4059,7260,4240,3498,7261,7262,4924,7263,2992,3893,4060,3220,7264, # 66407265,7266,7267,7268,7269,4509,3775,7270,2817,7271,4061,4925,4510,3776,7272,4241, # 66564511,3285,7273,7274,3499,7275,7276,7277,4062,4512,4926,7278,3107,3894,7279,7280, # 66724927,7281,4513,7282,7283,3668,7284,7285,4242,4514,4243,7286,2058,4515,4928,4929, # 66884516,7287,3286,4244,7288,4517,7289,7290,7291,3669,7292,7293,4930,4931,4932,2355, # 67044933,7294,2633,4518,7295,4245,7296,7297,4519,7298,7299,4520,4521,4934,7300,4246, # 67204522,7301,7302,7303,3579,7304,4247,4935,7305,4936,7306,7307,7308,7309,3777,7310, # 67364523,7311,7312,7313,4248,3580,7314,4524,3778,4249,7315,3581,7316,3287,7317,3221, # 67527318,4937,7319,7320,7321,7322,7323,7324,4938,4939,7325,4525,7326,7327,7328,4063, # 67687329,7330,4940,7331,7332,4941,7333,4526,7334,3500,2780,1741,4942,2026,1742,7335, # 67847336,3582,4527,2388,7337,7338,7339,4528,7340,4250,4943,7341,7342,7343,4944,7344, # 68007345,7346,3020,7347,4945,7348,7349,7350,7351,3895,7352,3896,4064,3897,7353,7354, # 68167355,4251,7356,7357,3898,7358,3779,7359,3780,3288,7360,7361,4529,7362,4946,4530, # 68322027,7363,3899,4531,4947,3222,3583,7364,4948,7365,7366,7367,7368,4949,3501,4950, # 68483781,4951,4532,7369,2517,4952,4252,4953,3155,7370,4954,4955,4253,2518,4533,7371, # 68647372,2712,4254,7373,7374,7375,3670,4956,3671,7376,2389,3502,4065,7377,2338,7378, # 68807379,7380,7381,3061,7382,4957,7383,7384,7385,7386,4958,4534,7387,7388,2993,7389, # 68963062,7390,4959,7391,7392,7393,4960,3108,4961,7394,4535,7395,4962,3421,4536,7396, # 69124963,7397,4964,1857,7398,4965,7399,7400,2176,3584,4966,7401,7402,3422,4537,3900, # 69283585,7403,3782,7404,2852,7405,7406,7407,4538,3783,2654,3423,4967,4539,7408,3784, # 69443586,2853,4540,4541,7409,3901,7410,3902,7411,7412,3785,3109,2327,3903,7413,7414, # 69602970,4066,2932,7415,7416,7417,3904,3672,3424,7418,4542,4543,4544,7419,4968,7420, # 69767421,4255,7422,7423,7424,7425,7426,4067,7427,3673,3365,4545,7428,3110,2559,3674, # 69927429,7430,3156,7431,7432,3503,7433,3425,4546,7434,3063,2873,7435,3223,4969,4547, # 70084548,2898,4256,4068,7436,4069,3587,3786,2933,3787,4257,4970,4971,3788,7437,4972, # 70243064,7438,4549,7439,7440,7441,7442,7443,4973,3905,7444,2874,7445,7446,7447,7448, # 70403021,7449,4550,3906,3588,4974,7450,7451,3789,3675,7452,2578,7453,4070,7454,7455, # 70567456,4258,3676,7457,4975,7458,4976,4259,3790,3504,2634,4977,3677,4551,4260,7459, # 70727460,7461,7462,3907,4261,4978,7463,7464,7465,7466,4979,4980,7467,7468,2213,4262, # 70887469,7470,7471,3678,4981,7472,2439,7473,4263,3224,3289,7474,3908,2415,4982,7475, # 71044264,7476,4983,2655,7477,7478,2732,4552,2854,2875,7479,7480,4265,7481,4553,4984, # 71207482,7483,4266,7484,3679,3366,3680,2818,2781,2782,3367,3589,4554,3065,7485,4071, # 71362899,7486,7487,3157,2462,4072,4555,4073,4985,4986,3111,4267,2687,3368,4556,4074, # 71523791,4268,7488,3909,2783,7489,2656,1962,3158,4557,4987,1963,3159,3160,7490,3112, # 71684988,4989,3022,4990,4991,3792,2855,7491,7492,2971,4558,7493,7494,4992,7495,7496, # 71847497,7498,4993,7499,3426,4559,4994,7500,3681,4560,4269,4270,3910,7501,4075,4995, # 72004271,7502,7503,4076,7504,4996,7505,3225,4997,4272,4077,2819,3023,7506,7507,2733, # 72164561,7508,4562,7509,3369,3793,7510,3590,2508,7511,7512,4273,3113,2994,2616,7513, # 72327514,7515,7516,7517,7518,2820,3911,4078,2748,7519,7520,4563,4998,7521,7522,7523, # 72487524,4999,4274,7525,4564,3682,2239,4079,4565,7526,7527,7528,7529,5000,7530,7531, # 72645001,4275,3794,7532,7533,7534,3066,5002,4566,3161,7535,7536,4080,7537,3162,7538, # 72807539,4567,7540,7541,7542,7543,7544,7545,5003,7546,4568,7547,7548,7549,7550,7551, # 72967552,7553,7554,7555,7556,5004,7557,7558,7559,5005,7560,3795,7561,4569,7562,7563, # 73127564,2821,3796,4276,4277,4081,7565,2876,7566,5006,7567,7568,2900,7569,3797,3912, # 73287570,7571,7572,4278,7573,7574,7575,5007,7576,7577,5008,7578,7579,4279,2934,7580, # 73447581,5009,7582,4570,7583,4280,7584,7585,7586,4571,4572,3913,7587,4573,3505,7588, # 73605010,7589,7590,7591,7592,3798,4574,7593,7594,5011,7595,4281,7596,7597,7598,4282, # 73765012,7599,7600,5013,3163,7601,5014,7602,3914,7603,7604,2734,4575,4576,4577,7605, # 73927606,7607,7608,7609,3506,5015,4578,7610,4082,7611,2822,2901,2579,3683,3024,4579, # 74083507,7612,4580,7613,3226,3799,5016,7614,7615,7616,7617,7618,7619,7620,2995,3290, # 74247621,4083,7622,5017,7623,7624,7625,7626,7627,4581,3915,7628,3291,7629,5018,7630, # 74407631,7632,7633,4084,7634,7635,3427,3800,7636,7637,4582,7638,5019,4583,5020,7639, # 74563916,7640,3801,5021,4584,4283,7641,7642,3428,3591,2269,7643,2617,7644,4585,3592, # 74727645,4586,2902,7646,7647,3227,5022,7648,4587,7649,4284,7650,7651,7652,4588,2284, # 74887653,5023,7654,7655,7656,4589,5024,3802,7657,7658,5025,3508,4590,7659,7660,7661, # 75041969,5026,7662,7663,3684,1821,2688,7664,2028,2509,4285,7665,2823,1841,7666,2689, # 75203114,7667,3917,4085,2160,5027,5028,2972,7668,5029,7669,7670,7671,3593,4086,7672, # 75364591,4087,5030,3803,7673,7674,7675,7676,7677,7678,7679,4286,2366,4592,4593,3067, # 75522328,7680,7681,4594,3594,3918,2029,4287,7682,5031,3919,3370,4288,4595,2856,7683, # 75683509,7684,7685,5032,5033,7686,7687,3804,2784,7688,7689,7690,7691,3371,7692,7693, # 75842877,5034,7694,7695,3920,4289,4088,7696,7697,7698,5035,7699,5036,4290,5037,5038, # 76005039,7700,7701,7702,5040,5041,3228,7703,1760,7704,5042,3229,4596,2106,4089,7705, # 76164597,2824,5043,2107,3372,7706,4291,4090,5044,7707,4091,7708,5045,3025,3805,4598, # 76324292,4293,4294,3373,7709,4599,7710,5046,7711,7712,5047,5048,3806,7713,7714,7715, # 76485049,7716,7717,7718,7719,4600,5050,7720,7721,7722,5051,7723,4295,3429,7724,7725, # 76647726,7727,3921,7728,3292,5052,4092,7729,7730,7731,7732,7733,7734,7735,5053,5054, # 76807736,7737,7738,7739,3922,3685,7740,7741,7742,7743,2635,5055,7744,5056,4601,7745, # 76967746,2560,7747,7748,7749,7750,3923,7751,7752,7753,7754,7755,4296,2903,7756,7757, # 77127758,7759,7760,3924,7761,5057,4297,7762,7763,5058,4298,7764,4093,7765,7766,5059, # 77283925,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,3595,7777,4299,5060,4094, # 77447778,3293,5061,7779,7780,4300,7781,7782,4602,7783,3596,7784,7785,3430,2367,7786, # 77603164,5062,5063,4301,7787,7788,4095,5064,5065,7789,3374,3115,7790,7791,7792,7793, # 77767794,7795,7796,3597,4603,7797,7798,3686,3116,3807,5066,7799,7800,5067,7801,7802, # 77924604,4302,5068,4303,4096,7803,7804,3294,7805,7806,5069,4605,2690,7807,3026,7808, # 78087809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824, # 78247825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 78407841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856, # 78567857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872, # 78727873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888, # 78887889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904, # 79047905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920, # 79207921,7922,7923,7924,3926,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, # 79367936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, # 79527952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, # 79687968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, # 79847984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, # 80008000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, # 80168016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, # 80328032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, # 80488048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, # 80648064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, # 80808080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, # 80968096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, # 81128112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, # 81288128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, # 81448144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, # 81608160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, # 81768176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, # 81928192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, # 82088208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, # 82248224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, # 82408240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, # 82568256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271) # 8272# flake8: noqa
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Universal charset detector code.## The Initial Developer of the Original Code is# Shy Shalom# Portions created by the Initial Developer are Copyright (C) 2005# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .charsetprober import CharSetProberfrom .constants import eNotMe, eDetectingfrom .compat import wrap_ord# This prober doesn't actually recognize a language or a charset.# It is a helper prober for the use of the Hebrew model probers### General ideas of the Hebrew charset recognition ##### Four main charsets exist in Hebrew:# "ISO-8859-8" - Visual Hebrew# "windows-1255" - Logical Hebrew# "ISO-8859-8-I" - Logical Hebrew# "x-mac-hebrew" - ?? Logical Hebrew ??## Both "ISO" charsets use a completely identical set of code points, whereas# "windows-1255" and "x-mac-hebrew" are two different proper supersets of# these code points. windows-1255 defines additional characters in the range# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.# x-mac-hebrew defines similar additional code points but with a different# mapping.## As far as an average Hebrew text with no diacritics is concerned, all four# charsets are identical with respect to code points. Meaning that for the# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters# (including final letters).## The dominant difference between these charsets is their directionality.# "Visual" directionality means that the text is ordered as if the renderer is# not aware of a BIDI rendering algorithm. The renderer sees the text and# draws it from left to right. The text itself when ordered naturally is read# backwards. A buffer of Visual Hebrew generally looks like so:# "[last word of first line spelled backwards] [whole line ordered backwards# and spelled backwards] [first word of first line spelled backwards]# [end of line] [last word of second line] ... etc' "# adding punctuation marks, numbers and English text to visual text is# naturally also "visual" and from left to right.## "Logical" directionality means the text is ordered "naturally" according to# the order it is read. It is the responsibility of the renderer to display# the text from right to left. A BIDI algorithm is used to place general# punctuation marks, numbers and English text in the text.## Texts in x-mac-hebrew are almost impossible to find on the Internet. From# what little evidence I could find, it seems that its general directionality# is Logical.## To sum up all of the above, the Hebrew probing mechanism knows about two# charsets:# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are# backwards while line order is natural. For charset recognition purposes# the line order is unimportant (In fact, for this implementation, even# word order is unimportant).# Logical Hebrew - "windows-1255" - normal, naturally ordered text.## "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be# specifically identified.# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew# that contain special punctuation marks or diacritics is displayed with# some unconverted characters showing as question marks. This problem might# be corrected using another model prober for x-mac-hebrew. Due to the fact# that x-mac-hebrew texts are so rare, writing another model prober isn't# worth the effort and performance hit.##### The Prober ###### The prober is divided between two SBCharSetProbers and a HebrewProber,# all of which are managed, created, fed data, inquired and deleted by the# SBCSGroupProber. The two SBCharSetProbers identify that the text is in# fact some kind of Hebrew, Logical or Visual. The final decision about which# one is it is made by the HebrewProber by combining final-letter scores# with the scores of the two SBCharSetProbers to produce a final answer.## The SBCSGroupProber is responsible for stripping the original text of HTML# tags, English characters, numbers, low-ASCII punctuation characters, spaces# and new lines. It reduces any sequence of such characters to a single space.# The buffer fed to each prober in the SBCS group prober is pure text in# high-ASCII.# The two SBCharSetProbers (model probers) share the same language model:# Win1255Model.# The first SBCharSetProber uses the model normally as any other# SBCharSetProber does, to recognize windows-1255, upon which this model was# built. The second SBCharSetProber is told to make the pair-of-letter# lookup in the language model backwards. This in practice exactly simulates# a visual Hebrew model using the windows-1255 logical Hebrew model.## The HebrewProber is not using any language model. All it does is look for# final-letter evidence suggesting the text is either logical Hebrew or visual# Hebrew. Disjointed from the model probers, the results of the HebrewProber# alone are meaningless. HebrewProber always returns 0.00 as confidence# since it never identifies a charset by itself. Instead, the pointer to the# HebrewProber is passed to the model probers as a helper "Name Prober".# When the Group prober receives a positive identification from any prober,# it asks for the name of the charset identified. If the prober queried is a# Hebrew model prober, the model prober forwards the call to the# HebrewProber to make the final decision. In the HebrewProber, the# decision is made according to the final-letters scores maintained and Both# model probers scores. The answer is returned in the form of the name of the# charset identified, either "windows-1255" or "ISO-8859-8".# windows-1255 / ISO-8859-8 code points of interestFINAL_KAF = 0xeaNORMAL_KAF = 0xebFINAL_MEM = 0xedNORMAL_MEM = 0xeeFINAL_NUN = 0xefNORMAL_NUN = 0xf0FINAL_PE = 0xf3NORMAL_PE = 0xf4FINAL_TSADI = 0xf5NORMAL_TSADI = 0xf6# Minimum Visual vs Logical final letter score difference.# If the difference is below this, don't rely solely on the final letter score# distance.MIN_FINAL_CHAR_DISTANCE = 5# Minimum Visual vs Logical model score difference.# If the difference is below this, don't rely at all on the model score# distance.MIN_MODEL_DISTANCE = 0.01VISUAL_HEBREW_NAME = "ISO-8859-8"LOGICAL_HEBREW_NAME = "windows-1255"class HebrewProber(CharSetProber):def __init__(self):CharSetProber.__init__(self)self._mLogicalProber = Noneself._mVisualProber = Noneself.reset()def reset(self):self._mFinalCharLogicalScore = 0self._mFinalCharVisualScore = 0# The two last characters seen in the previous buffer,# mPrev and mBeforePrev are initialized to space in order to simulate# a word delimiter at the beginning of the dataself._mPrev = ' 'self._mBeforePrev = ' '# These probers are owned by the group prober.def set_model_probers(self, logicalProber, visualProber):self._mLogicalProber = logicalProberself._mVisualProber = visualProberdef is_final(self, c):return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,FINAL_TSADI]def is_non_final(self, c):# The normal Tsadi is not a good Non-Final letter due to words like# 'lechotet' (to chat) containing an apostrophe after the tsadi. This# apostrophe is converted to a space in FilterWithoutEnglishLetters# causing the Non-Final tsadi to appear at an end of a word even# though this is not the case in the original text.# The letters Pe and Kaf rarely display a related behavior of not being# a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'# for example legally end with a Non-Final Pe or Kaf. However, the# benefit of these letters as Non-Final letters outweighs the damage# since these words are quite rare.return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]def feed(self, aBuf):# Final letter analysis for logical-visual decision.# Look for evidence that the received buffer is either logical Hebrew# or visual Hebrew.# The following cases are checked:# 1) A word longer than 1 letter, ending with a final letter. This is# an indication that the text is laid out "naturally" since the# final letter really appears at the end. +1 for logical score.# 2) A word longer than 1 letter, ending with a Non-Final letter. In# normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,# should not end with the Non-Final form of that letter. Exceptions# to this rule are mentioned above in isNonFinal(). This is an# indication that the text is laid out backwards. +1 for visual# score# 3) A word longer than 1 letter, starting with a final letter. Final# letters should not appear at the beginning of a word. This is an# indication that the text is laid out backwards. +1 for visual# score.## The visual score and logical score are accumulated throughout the# text and are finally checked against each other in GetCharSetName().# No checking for final letters in the middle of words is done since# that case is not an indication for either Logical or Visual text.## We automatically filter out all 7-bit characters (replace them with# spaces) so the word boundary detection works properly. [MAP]if self.get_state() == eNotMe:# Both model probers say it's not them. No reason to continue.return eNotMeaBuf = self.filter_high_bit_only(aBuf)for cur in aBuf:if cur == ' ':# We stand on a space - a word just endedif self._mBeforePrev != ' ':# next-to-last char was not a space so self._mPrev is not a# 1 letter wordif self.is_final(self._mPrev):# case (1) [-2:not space][-1:final letter][cur:space]self._mFinalCharLogicalScore += 1elif self.is_non_final(self._mPrev):# case (2) [-2:not space][-1:Non-Final letter][# cur:space]self._mFinalCharVisualScore += 1else:# Not standing on a spaceif ((self._mBeforePrev == ' ') and(self.is_final(self._mPrev)) and (cur != ' ')):# case (3) [-2:space][-1:final letter][cur:not space]self._mFinalCharVisualScore += 1self._mBeforePrev = self._mPrevself._mPrev = cur# Forever detecting, till the end or until both model probers return# eNotMe (handled above)return eDetectingdef get_charset_name(self):# Make the decision: is it Logical or Visual?# If the final letter score distance is dominant enough, rely on it.finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScoreif finalsub >= MIN_FINAL_CHAR_DISTANCE:return LOGICAL_HEBREW_NAMEif finalsub <= -MIN_FINAL_CHAR_DISTANCE:return VISUAL_HEBREW_NAME# It's not dominant enough, try to rely on the model scores instead.modelsub = (self._mLogicalProber.get_confidence()- self._mVisualProber.get_confidence())if modelsub > MIN_MODEL_DISTANCE:return LOGICAL_HEBREW_NAMEif modelsub < -MIN_MODEL_DISTANCE:return VISUAL_HEBREW_NAME# Still no good, back to final letter distance, maybe it'll save the# day.if finalsub < 0.0:return VISUAL_HEBREW_NAME# (finalsub > 0 - Logical) or (don't know what to do) default to# Logical.return LOGICAL_HEBREW_NAMEdef get_state(self):# Remain active as long as any of the model probers are active.if (self._mLogicalProber.get_state() == eNotMe) and \(self._mVisualProber.get_state() == eNotMe):return eNotMereturn eDetecting
######################## BEGIN LICENSE BLOCK ######################### The Original Code is mozilla.org code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .mbcharsetprober import MultiByteCharSetProberfrom .codingstatemachine import CodingStateMachinefrom .chardistribution import GB2312DistributionAnalysisfrom .mbcssm import GB2312SMModelclass GB2312Prober(MultiByteCharSetProber):def __init__(self):MultiByteCharSetProber.__init__(self)self._mCodingSM = CodingStateMachine(GB2312SMModel)self._mDistributionAnalyzer = GB2312DistributionAnalysis()self.reset()def get_charset_name(self):return "GB2312"
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK ########################## GB2312 most frequently used character table## Char to FreqOrder table , from hz6763# 512 --> 0.79 -- 0.79# 1024 --> 0.92 -- 0.13# 2048 --> 0.98 -- 0.06# 6768 --> 1.00 -- 0.02## Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79# Random Distribution Ration = 512 / (3755 - 512) = 0.157## Typical Distribution Ratio about 25% of Ideal one, still much higher that RDRGB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9GB2312_TABLE_SIZE = 3760GB2312CharToFreqOrder = (1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670,1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820,1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585,152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566,1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575,2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853,3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061,544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155,1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406,927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816,2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606,360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023,2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414,1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513,3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052,198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570,1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575,253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250,2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506,1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26,3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835,1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686,2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054,1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894,585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105,3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403,3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694,252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873,3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940,836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121,1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648,3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992,2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233,1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157,755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807,1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094,4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258,887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478,3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152,3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909,509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272,1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221,2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252,1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301,1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254,389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070,3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461,3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360,4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124,296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535,3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243,1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713,1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071,4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442,215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946,814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257,3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180,1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427,602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781,1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724,2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937,930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943,432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789,396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552,3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246,4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451,3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310,750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860,2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297,2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780,2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745,776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936,2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032,968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657,163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414,220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976,3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436,2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254,2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536,1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238,18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059,2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741,90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447,286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601,1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269,1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894,915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173,681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994,1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956,2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437,3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154,2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240,2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143,2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634,3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472,1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541,1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143,2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312,1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414,3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754,1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424,1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302,3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739,795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004,2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484,1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739,4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535,1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641,1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307,3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573,1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533,47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965,504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99,1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280,160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505,1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012,1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039,744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982,3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530,4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392,3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656,2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220,2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766,1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535,3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728,2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338,1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627,1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885,125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411,2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671,2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162,3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774,4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524,3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346,180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040,3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188,2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280,1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131,259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947,774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970,3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814,4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557,2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997,1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972,1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369,766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376,1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480,3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610,955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128,642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769,1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207,57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392,1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623,193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782,2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650,158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478,2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773,2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007,1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323,1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598,2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961,819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302,1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409,1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683,2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191,2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616,3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302,1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774,4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147,571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731,845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464,3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377,1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315,470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557,3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903,1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060,4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261,1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092,2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810,1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708,498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658,1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871,3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503,448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229,2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112,136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504,1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389,1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27,1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542,3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861,2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845,3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700,3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469,3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582,996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999,2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274,786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020,2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601,12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628,1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31,475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668,233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778,1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169,3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667,3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881,1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276,1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320,3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751,2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432,2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772,1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843,3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116,451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904,4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652,1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664,2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770,3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283,3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626,1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713,768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333,391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062,2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555,931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014,1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510,386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015,1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459,1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390,1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238,1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232,1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, # last 512#Everything below is of no interest for detection purpose5508,6484,3900,3414,3974,4441,4024,3537,4037,5628,5099,3633,6485,3148,6486,3636,5509,3257,5510,5973,5445,5872,4941,4403,3174,4627,5873,6276,2286,4230,5446,5874,5122,6102,6103,4162,5447,5123,5323,4849,6277,3980,3851,5066,4246,5774,5067,6278,3001,2807,5695,3346,5775,5974,5158,5448,6487,5975,5976,5776,3598,6279,5696,4806,4211,4154,6280,6488,6489,6490,6281,4212,5037,3374,4171,6491,4562,4807,4722,4827,5977,6104,4532,4079,5159,5324,5160,4404,3858,5359,5875,3975,4288,4610,3486,4512,5325,3893,5360,6282,6283,5560,2522,4231,5978,5186,5449,2569,3878,6284,5401,3578,4415,6285,4656,5124,5979,2506,4247,4449,3219,3417,4334,4969,4329,6492,4576,4828,4172,4416,4829,5402,6286,3927,3852,5361,4369,4830,4477,4867,5876,4173,6493,6105,4657,6287,6106,5877,5450,6494,4155,4868,5451,3700,5629,4384,6288,6289,5878,3189,4881,6107,6290,6495,4513,6496,4692,4515,4723,5100,3356,6497,6291,3810,4080,5561,3570,4430,5980,6498,4355,5697,6499,4724,6108,6109,3764,4050,5038,5879,4093,3226,6292,5068,5217,4693,3342,5630,3504,4831,4377,4466,4309,5698,4431,5777,6293,5778,4272,3706,6110,5326,3752,4676,5327,4273,5403,4767,5631,6500,5699,5880,3475,5039,6294,5562,5125,4348,4301,4482,4068,5126,4593,5700,3380,3462,5981,5563,3824,5404,4970,5511,3825,4738,6295,6501,5452,4516,6111,5881,5564,6502,6296,5982,6503,4213,4163,3454,6504,6112,4009,4450,6113,4658,6297,6114,3035,6505,6115,3995,4904,4739,4563,4942,4110,5040,3661,3928,5362,3674,6506,5292,3612,4791,5565,4149,5983,5328,5259,5021,4725,4577,4564,4517,4364,6298,5405,4578,5260,4594,4156,4157,5453,3592,3491,6507,5127,5512,4709,4922,5984,5701,4726,4289,6508,4015,6116,5128,4628,3424,4241,5779,6299,4905,6509,6510,5454,5702,5780,6300,4365,4923,3971,6511,5161,3270,3158,5985,4100, 867,5129,5703,6117,5363,3695,3301,5513,4467,6118,6512,5455,4232,4242,4629,6513,3959,4478,6514,5514,5329,5986,4850,5162,5566,3846,4694,6119,5456,4869,5781,3779,6301,5704,5987,5515,4710,6302,5882,6120,4392,5364,5705,6515,6121,6516,6517,3736,5988,5457,5989,4695,2457,5883,4551,5782,6303,6304,6305,5130,4971,6122,5163,6123,4870,3263,5365,3150,4871,6518,6306,5783,5069,5706,3513,3498,4409,5330,5632,5366,5458,5459,3991,5990,4502,3324,5991,5784,3696,4518,5633,4119,6519,4630,5634,4417,5707,4832,5992,3418,6124,5993,5567,4768,5218,6520,4595,3458,5367,6125,5635,6126,4202,6521,4740,4924,6307,3981,4069,4385,6308,3883,2675,4051,3834,4302,4483,5568,5994,4972,4101,5368,6309,5164,5884,3922,6127,6522,6523,5261,5460,5187,4164,5219,3538,5516,4111,3524,5995,6310,6311,5369,3181,3386,2484,5188,3464,5569,3627,5708,6524,5406,5165,4677,4492,6312,4872,4851,5885,4468,5996,6313,5709,5710,6128,2470,5886,6314,5293,4882,5785,3325,5461,5101,6129,5711,5786,6525,4906,6526,6527,4418,5887,5712,4808,2907,3701,5713,5888,6528,3765,5636,5331,6529,6530,3593,5889,3637,4943,3692,5714,5787,4925,6315,6130,5462,4405,6131,6132,6316,5262,6531,6532,5715,3859,5716,5070,4696,5102,3929,5788,3987,4792,5997,6533,6534,3920,4809,5000,5998,6535,2974,5370,6317,5189,5263,5717,3826,6536,3953,5001,4883,3190,5463,5890,4973,5999,4741,6133,6134,3607,5570,6000,4711,3362,3630,4552,5041,6318,6001,2950,2953,5637,4646,5371,4944,6002,2044,4120,3429,6319,6537,5103,4833,6538,6539,4884,4647,3884,6003,6004,4758,3835,5220,5789,4565,5407,6540,6135,5294,4697,4852,6320,6321,3206,4907,6541,6322,4945,6542,6136,6543,6323,6005,4631,3519,6544,5891,6545,5464,3784,5221,6546,5571,4659,6547,6324,6137,5190,6548,3853,6549,4016,4834,3954,6138,5332,3827,4017,3210,3546,4469,5408,5718,3505,4648,5790,5131,5638,5791,5465,4727,4318,6325,6326,5792,4553,4010,4698,3439,4974,3638,4335,3085,6006,5104,5042,5166,5892,5572,6327,4356,4519,5222,5573,5333,5793,5043,6550,5639,5071,4503,6328,6139,6551,6140,3914,3901,5372,6007,5640,4728,4793,3976,3836,4885,6552,4127,6553,4451,4102,5002,6554,3686,5105,6555,5191,5072,5295,4611,5794,5296,6556,5893,5264,5894,4975,5466,5265,4699,4976,4370,4056,3492,5044,4886,6557,5795,4432,4769,4357,5467,3940,4660,4290,6141,4484,4770,4661,3992,6329,4025,4662,5022,4632,4835,4070,5297,4663,4596,5574,5132,5409,5895,6142,4504,5192,4664,5796,5896,3885,5575,5797,5023,4810,5798,3732,5223,4712,5298,4084,5334,5468,6143,4052,4053,4336,4977,4794,6558,5335,4908,5576,5224,4233,5024,4128,5469,5225,4873,6008,5045,4729,4742,4633,3675,4597,6559,5897,5133,5577,5003,5641,5719,6330,6560,3017,2382,3854,4406,4811,6331,4393,3964,4946,6561,2420,3722,6562,4926,4378,3247,1736,4442,6332,5134,6333,5226,3996,2918,5470,4319,4003,4598,4743,4744,4485,3785,3902,5167,5004,5373,4394,5898,6144,4874,1793,3997,6334,4085,4214,5106,5642,4909,5799,6009,4419,4189,3330,5899,4165,4420,5299,5720,5227,3347,6145,4081,6335,2876,3930,6146,3293,3786,3910,3998,5900,5300,5578,2840,6563,5901,5579,6147,3531,5374,6564,6565,5580,4759,5375,6566,6148,3559,5643,6336,6010,5517,6337,6338,5721,5902,3873,6011,6339,6567,5518,3868,3649,5722,6568,4771,4947,6569,6149,4812,6570,2853,5471,6340,6341,5644,4795,6342,6012,5723,6343,5724,6013,4349,6344,3160,6150,5193,4599,4514,4493,5168,4320,6345,4927,3666,4745,5169,5903,5005,4928,6346,5725,6014,4730,4203,5046,4948,3395,5170,6015,4150,6016,5726,5519,6347,5047,3550,6151,6348,4197,4310,5904,6571,5581,2965,6152,4978,3960,4291,5135,6572,5301,5727,4129,4026,5905,4853,5728,5472,6153,6349,4533,2700,4505,5336,4678,3583,5073,2994,4486,3043,4554,5520,6350,6017,5800,4487,6351,3931,4103,5376,6352,4011,4321,4311,4190,5136,6018,3988,3233,4350,5906,5645,4198,6573,5107,3432,4191,3435,5582,6574,4139,5410,6353,5411,3944,5583,5074,3198,6575,6354,4358,6576,5302,4600,5584,5194,5412,6577,6578,5585,5413,5303,4248,5414,3879,4433,6579,4479,5025,4854,5415,6355,4760,4772,3683,2978,4700,3797,4452,3965,3932,3721,4910,5801,6580,5195,3551,5907,3221,3471,3029,6019,3999,5908,5909,5266,5267,3444,3023,3828,3170,4796,5646,4979,4259,6356,5647,5337,3694,6357,5648,5338,4520,4322,5802,3031,3759,4071,6020,5586,4836,4386,5048,6581,3571,4679,4174,4949,6154,4813,3787,3402,3822,3958,3215,3552,5268,4387,3933,4950,4359,6021,5910,5075,3579,6358,4234,4566,5521,6359,3613,5049,6022,5911,3375,3702,3178,4911,5339,4521,6582,6583,4395,3087,3811,5377,6023,6360,6155,4027,5171,5649,4421,4249,2804,6584,2270,6585,4000,4235,3045,6156,5137,5729,4140,4312,3886,6361,4330,6157,4215,6158,3500,3676,4929,4331,3713,4930,5912,4265,3776,3368,5587,4470,4855,3038,4980,3631,6159,6160,4132,4680,6161,6362,3923,4379,5588,4255,6586,4121,6587,6363,4649,6364,3288,4773,4774,6162,6024,6365,3543,6588,4274,3107,3737,5050,5803,4797,4522,5589,5051,5730,3714,4887,5378,4001,4523,6163,5026,5522,4701,4175,2791,3760,6589,5473,4224,4133,3847,4814,4815,4775,3259,5416,6590,2738,6164,6025,5304,3733,5076,5650,4816,5590,6591,6165,6592,3934,5269,6593,3396,5340,6594,5804,3445,3602,4042,4488,5731,5732,3525,5591,4601,5196,6166,6026,5172,3642,4612,3202,4506,4798,6366,3818,5108,4303,5138,5139,4776,3332,4304,2915,3415,4434,5077,5109,4856,2879,5305,4817,6595,5913,3104,3144,3903,4634,5341,3133,5110,5651,5805,6167,4057,5592,2945,4371,5593,6596,3474,4182,6367,6597,6168,4507,4279,6598,2822,6599,4777,4713,5594,3829,6169,3887,5417,6170,3653,5474,6368,4216,2971,5228,3790,4579,6369,5733,6600,6601,4951,4746,4555,6602,5418,5475,6027,3400,4665,5806,6171,4799,6028,5052,6172,3343,4800,4747,5006,6370,4556,4217,5476,4396,5229,5379,5477,3839,5914,5652,5807,4714,3068,4635,5808,6173,5342,4192,5078,5419,5523,5734,6174,4557,6175,4602,6371,6176,6603,5809,6372,5735,4260,3869,5111,5230,6029,5112,6177,3126,4681,5524,5915,2706,3563,4748,3130,6178,4018,5525,6604,6605,5478,4012,4837,6606,4534,4193,5810,4857,3615,5479,6030,4082,3697,3539,4086,5270,3662,4508,4931,5916,4912,5811,5027,3888,6607,4397,3527,3302,3798,2775,2921,2637,3966,4122,4388,4028,4054,1633,4858,5079,3024,5007,3982,3412,5736,6608,3426,3236,5595,3030,6179,3427,3336,3279,3110,6373,3874,3039,5080,5917,5140,4489,3119,6374,5812,3405,4494,6031,4666,4141,6180,4166,6032,5813,4981,6609,5081,4422,4982,4112,3915,5653,3296,3983,6375,4266,4410,5654,6610,6181,3436,5082,6611,5380,6033,3819,5596,4535,5231,5306,5113,6612,4952,5918,4275,3113,6613,6376,6182,6183,5814,3073,4731,4838,5008,3831,6614,4888,3090,3848,4280,5526,5232,3014,5655,5009,5737,5420,5527,6615,5815,5343,5173,5381,4818,6616,3151,4953,6617,5738,2796,3204,4360,2989,4281,5739,5174,5421,5197,3132,5141,3849,5142,5528,5083,3799,3904,4839,5480,2880,4495,3448,6377,6184,5271,5919,3771,3193,6034,6035,5920,5010,6036,5597,6037,6378,6038,3106,5422,6618,5423,5424,4142,6619,4889,5084,4890,4313,5740,6620,3437,5175,5307,5816,4199,5198,5529,5817,5199,5656,4913,5028,5344,3850,6185,2955,5272,5011,5818,4567,4580,5029,5921,3616,5233,6621,6622,6186,4176,6039,6379,6380,3352,5200,5273,2908,5598,5234,3837,5308,6623,6624,5819,4496,4323,5309,5201,6625,6626,4983,3194,3838,4167,5530,5922,5274,6381,6382,3860,3861,5599,3333,4292,4509,6383,3553,5481,5820,5531,4778,6187,3955,3956,4324,4389,4218,3945,4325,3397,2681,5923,4779,5085,4019,5482,4891,5382,5383,6040,4682,3425,5275,4094,6627,5310,3015,5483,5657,4398,5924,3168,4819,6628,5925,6629,5532,4932,4613,6041,6630,4636,6384,4780,4204,5658,4423,5821,3989,4683,5822,6385,4954,6631,5345,6188,5425,5012,5384,3894,6386,4490,4104,6632,5741,5053,6633,5823,5926,5659,5660,5927,6634,5235,5742,5824,4840,4933,4820,6387,4859,5928,4955,6388,4143,3584,5825,5346,5013,6635,5661,6389,5014,5484,5743,4337,5176,5662,6390,2836,6391,3268,6392,6636,6042,5236,6637,4158,6638,5744,5663,4471,5347,3663,4123,5143,4293,3895,6639,6640,5311,5929,5826,3800,6189,6393,6190,5664,5348,3554,3594,4749,4603,6641,5385,4801,6043,5827,4183,6642,5312,5426,4761,6394,5665,6191,4715,2669,6643,6644,5533,3185,5427,5086,5930,5931,5386,6192,6044,6645,4781,4013,5745,4282,4435,5534,4390,4267,6045,5746,4984,6046,2743,6193,3501,4087,5485,5932,5428,4184,4095,5747,4061,5054,3058,3862,5933,5600,6646,5144,3618,6395,3131,5055,5313,6396,4650,4956,3855,6194,3896,5202,4985,4029,4225,6195,6647,5828,5486,5829,3589,3002,6648,6397,4782,5276,6649,6196,6650,4105,3803,4043,5237,5830,6398,4096,3643,6399,3528,6651,4453,3315,4637,6652,3984,6197,5535,3182,3339,6653,3096,2660,6400,6654,3449,5934,4250,4236,6047,6401,5831,6655,5487,3753,4062,5832,6198,6199,6656,3766,6657,3403,4667,6048,6658,4338,2897,5833,3880,2797,3780,4326,6659,5748,5015,6660,5387,4351,5601,4411,6661,3654,4424,5935,4339,4072,5277,4568,5536,6402,6662,5238,6663,5349,5203,6200,5204,6201,5145,4536,5016,5056,4762,5834,4399,4957,6202,6403,5666,5749,6664,4340,6665,5936,5177,5667,6666,6667,3459,4668,6404,6668,6669,4543,6203,6670,4276,6405,4480,5537,6671,4614,5205,5668,6672,3348,2193,4763,6406,6204,5937,5602,4177,5669,3419,6673,4020,6205,4443,4569,5388,3715,3639,6407,6049,4058,6206,6674,5938,4544,6050,4185,4294,4841,4651,4615,5488,6207,6408,6051,5178,3241,3509,5835,6208,4958,5836,4341,5489,5278,6209,2823,5538,5350,5206,5429,6675,4638,4875,4073,3516,4684,4914,4860,5939,5603,5389,6052,5057,3237,5490,3791,6676,6409,6677,4821,4915,4106,5351,5058,4243,5539,4244,5604,4842,4916,5239,3028,3716,5837,5114,5605,5390,5940,5430,6210,4332,6678,5540,4732,3667,3840,6053,4305,3408,5670,5541,6410,2744,5240,5750,6679,3234,5606,6680,5607,5671,3608,4283,4159,4400,5352,4783,6681,6411,6682,4491,4802,6211,6412,5941,6413,6414,5542,5751,6683,4669,3734,5942,6684,6415,5943,5059,3328,4670,4144,4268,6685,6686,6687,6688,4372,3603,6689,5944,5491,4373,3440,6416,5543,4784,4822,5608,3792,4616,5838,5672,3514,5391,6417,4892,6690,4639,6691,6054,5673,5839,6055,6692,6056,5392,6212,4038,5544,5674,4497,6057,6693,5840,4284,5675,4021,4545,5609,6418,4454,6419,6213,4113,4472,5314,3738,5087,5279,4074,5610,4959,4063,3179,4750,6058,6420,6214,3476,4498,4716,5431,4960,4685,6215,5241,6694,6421,6216,6695,5841,5945,6422,3748,5946,5179,3905,5752,5545,5947,4374,6217,4455,6423,4412,6218,4803,5353,6696,3832,5280,6219,4327,4702,6220,6221,6059,4652,5432,6424,3749,4751,6425,5753,4986,5393,4917,5948,5030,5754,4861,4733,6426,4703,6697,6222,4671,5949,4546,4961,5180,6223,5031,3316,5281,6698,4862,4295,4934,5207,3644,6427,5842,5950,6428,6429,4570,5843,5282,6430,6224,5088,3239,6060,6699,5844,5755,6061,6431,2701,5546,6432,5115,5676,4039,3993,3327,4752,4425,5315,6433,3941,6434,5677,4617,4604,3074,4581,6225,5433,6435,6226,6062,4823,5756,5116,6227,3717,5678,4717,5845,6436,5679,5846,6063,5847,6064,3977,3354,6437,3863,5117,6228,5547,5394,4499,4524,6229,4605,6230,4306,4500,6700,5951,6065,3693,5952,5089,4366,4918,6701,6231,5548,6232,6702,6438,4704,5434,6703,6704,5953,4168,6705,5680,3420,6706,5242,4407,6066,3812,5757,5090,5954,4672,4525,3481,5681,4618,5395,5354,5316,5955,6439,4962,6707,4526,6440,3465,4673,6067,6441,5682,6708,5435,5492,5758,5683,4619,4571,4674,4804,4893,4686,5493,4753,6233,6068,4269,6442,6234,5032,4705,5146,5243,5208,5848,6235,6443,4963,5033,4640,4226,6236,5849,3387,6444,6445,4436,4437,5850,4843,5494,4785,4894,6709,4361,6710,5091,5956,3331,6237,4987,5549,6069,6711,4342,3517,4473,5317,6070,6712,6071,4706,6446,5017,5355,6713,6714,4988,5436,6447,4734,5759,6715,4735,4547,4456,4754,6448,5851,6449,6450,3547,5852,5318,6451,6452,5092,4205,6716,6238,4620,4219,5611,6239,6072,4481,5760,5957,5958,4059,6240,6453,4227,4537,6241,5761,4030,4186,5244,5209,3761,4457,4876,3337,5495,5181,6242,5959,5319,5612,5684,5853,3493,5854,6073,4169,5613,5147,4895,6074,5210,6717,5182,6718,3830,6243,2798,3841,6075,6244,5855,5614,3604,4606,5496,5685,5118,5356,6719,6454,5960,5357,5961,6720,4145,3935,4621,5119,5962,4261,6721,6455,4786,5963,4375,4582,6245,6246,6247,6076,5437,4877,5856,3376,4380,6248,4160,6722,5148,6456,5211,6457,6723,4718,6458,6724,6249,5358,4044,3297,6459,6250,5857,5615,5497,5245,6460,5498,6725,6251,6252,5550,3793,5499,2959,5396,6461,6462,4572,5093,5500,5964,3806,4146,6463,4426,5762,5858,6077,6253,4755,3967,4220,5965,6254,4989,5501,6464,4352,6726,6078,4764,2290,5246,3906,5438,5283,3767,4964,2861,5763,5094,6255,6256,4622,5616,5859,5860,4707,6727,4285,4708,4824,5617,6257,5551,4787,5212,4965,4935,4687,6465,6728,6466,5686,6079,3494,4413,2995,5247,5966,5618,6729,5967,5764,5765,5687,5502,6730,6731,6080,5397,6467,4990,6258,6732,4538,5060,5619,6733,4719,5688,5439,5018,5149,5284,5503,6734,6081,4607,6259,5120,3645,5861,4583,6260,4584,4675,5620,4098,5440,6261,4863,2379,3306,4585,5552,5689,4586,5285,6735,4864,6736,5286,6082,6737,4623,3010,4788,4381,4558,5621,4587,4896,3698,3161,5248,4353,4045,6262,3754,5183,4588,6738,6263,6739,6740,5622,3936,6741,6468,6742,6264,5095,6469,4991,5968,6743,4992,6744,6083,4897,6745,4256,5766,4307,3108,3968,4444,5287,3889,4343,6084,4510,6085,4559,6086,4898,5969,6746,5623,5061,4919,5249,5250,5504,5441,6265,5320,4878,3242,5862,5251,3428,6087,6747,4237,5624,5442,6266,5553,4539,6748,2585,3533,5398,4262,6088,5150,4736,4438,6089,6267,5505,4966,6749,6268,6750,6269,5288,5554,3650,6090,6091,4624,6092,5690,6751,5863,4270,5691,4277,5555,5864,6752,5692,4720,4865,6470,5151,4688,4825,6753,3094,6754,6471,3235,4653,6755,5213,5399,6756,3201,4589,5865,4967,6472,5866,6473,5019,3016,6757,5321,4756,3957,4573,6093,4993,5767,4721,6474,6758,5625,6759,4458,6475,6270,6760,5556,4994,5214,5252,6271,3875,5768,6094,5034,5506,4376,5769,6761,2120,6476,5253,5770,6762,5771,5970,3990,5971,5557,5558,5772,6477,6095,2787,4641,5972,5121,6096,6097,6272,6763,3703,5867,5507,6273,4206,6274,4789,6098,6764,3619,3646,3833,3804,2394,3788,4936,3978,4866,4899,6099,6100,5559,6478,6765,3599,5868,6101,5869,5870,6275,6766,4527,6767)# flake8: noqa
######################## BEGIN LICENSE BLOCK ######################### The Original Code is mozilla.org code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .mbcharsetprober import MultiByteCharSetProberfrom .codingstatemachine import CodingStateMachinefrom .chardistribution import EUCTWDistributionAnalysisfrom .mbcssm import EUCTWSMModelclass EUCTWProber(MultiByteCharSetProber):def __init__(self):MultiByteCharSetProber.__init__(self)self._mCodingSM = CodingStateMachine(EUCTWSMModel)self._mDistributionAnalyzer = EUCTWDistributionAnalysis()self.reset()def get_charset_name(self):return "EUC-TW"
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK ########################## EUCTW frequency table# Converted from big5 work# by Taiwan's Mandarin Promotion Council# <http:#www.edu.tw:81/mandr/># 128 --> 0.42261# 256 --> 0.57851# 512 --> 0.74851# 1024 --> 0.89384# 2048 --> 0.97583## Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98# Random Distribution Ration = 512/(5401-512)=0.105## Typical Distribution Ratio about 25% of Ideal one, still much higher than RDREUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75# Char to FreqOrder table ,EUCTW_TABLE_SIZE = 8102EUCTWCharToFreqOrder = (1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 27423735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 27581198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 277463,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 27903616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 28064297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 28227317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 28862495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 29021376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 29183243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 29501426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 29663503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 29822810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 30143873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 30301664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 30467328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 30787332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 30941665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 311032,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 31423621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 31583879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 31902128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 32062079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 32543738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 32701699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 32861130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 33021475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 33182520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 33504071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 33661920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 33827358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 33982591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 343098,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 34787366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 35101817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 35587375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 35741095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 36063072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 36224081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 36383119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 36861074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 37024084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 37183422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 37343748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 37502397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 37667404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 37823632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 37987409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 38141484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 38302976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 38461355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 386278,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 38781820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 38944341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 39103894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 39742083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 39907425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 40061486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 40222164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 40381444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 40541993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 40707442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 40867446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 41027452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 41183323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 41344353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 41501262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 41667463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 41822332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 41987467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 42143078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 42303903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 42467474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 42622652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 42787479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 43104111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 43262683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 43427486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 43583908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 43742185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 43902752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 44222335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 44381762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 44541284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 44702622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 44861956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 45027505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 45187507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 45342186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 45504381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 45661952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 45827511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 46144127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 46462250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 46781041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 46941478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 47263917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 47423918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 47581126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 47743541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 47907542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 48067544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 48221230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 48382149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 48541378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 48703771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 48862828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 49023651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 49182446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 49344142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 49504405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 49663037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 498297, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 49983195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 50303337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 50463929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 50623656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 50781741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 50947585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 51267590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 51421702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 51744417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 51903936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 52222150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 52382385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 52543553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 52701642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 52864420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 53022600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 53181016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 53341051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 53502721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 53663276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 53821006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 53987617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 54141841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 54304160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 54461688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 54781372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 54943953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 55103956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 55262056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 55421780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 55584162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 55907647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 56062317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 56223558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 56384440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 56707662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 56867663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 57021238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 57184444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 57343969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 57502690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 57663795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 57823796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 57982580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 58141358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 58304448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 58463670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 58623801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 58782365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 58944455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 59107701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 59263352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 59422631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 59583803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 59741624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 59902582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 60063460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 60224466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 60382106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 60542791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 60707722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 60861908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 61022388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 61181618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 61343292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 61504189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 61662697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 61823214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 61983215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 62142839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 62304478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 62462793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 62623217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 62784481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 62947754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 63103571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 63421161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 63584484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 63741651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 63904487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 64067770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 64387777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 64542796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 64701769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 64861171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 65023995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 65663577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 65822918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 66147806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 66301136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 66463999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 66627813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 66781620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 66947819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 67104003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 67261292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 67422483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 67582253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 67744504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 68223580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 68383705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 68541439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 68702585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 68867851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 69021561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 69181025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 69343584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 69661346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 69824226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 69987874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 70142429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 70303158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 70621422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 70782226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 70942639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 71107898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 71267899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 71427902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 71582431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 71742277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 71901612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 72064019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 72223712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 72383378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 72544246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 72704025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 72862881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 73022736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 73187936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 73344536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 73507942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 73662882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 73821148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 73983232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 74144030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 74302538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 74622704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 74781999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 74942410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 75102885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 75264544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 75427983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 75581605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 75743011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 75907988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 76061243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 76228000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 76382770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 76548006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 76702887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 76862328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 77028014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 77188018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 77348024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 77668031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 77824558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 77983858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 78148039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 78301173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 78468045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 78781799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 79104289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 79261018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 79424051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 79581842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 79903172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 80064577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 80228095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 80543613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 80862294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102#Everything below is of no interest for detection purpose2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 81182493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 81348134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 81508150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 81668166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 81828182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 81988198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 82148214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 82308230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 82468246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 82628262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 82788278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 82948294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 83108310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 83268326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 83428342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 83588358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 83748374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 83908390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 84068406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 84228422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 84388438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 84548454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 84708470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 84868486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 85028502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 85188518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 85348534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 85508550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 85668566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 85828582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 85988598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 86148614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 86308630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 86468646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 86628662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 86788678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 86948694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 87108710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 87268726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742# flake8: noqa
######################## BEGIN LICENSE BLOCK ######################### The Original Code is mozilla.org code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .mbcharsetprober import MultiByteCharSetProberfrom .codingstatemachine import CodingStateMachinefrom .chardistribution import EUCKRDistributionAnalysisfrom .mbcssm import EUCKRSMModelclass EUCKRProber(MultiByteCharSetProber):def __init__(self):MultiByteCharSetProber.__init__(self)self._mCodingSM = CodingStateMachine(EUCKRSMModel)self._mDistributionAnalyzer = EUCKRDistributionAnalysis()self.reset()def get_charset_name(self):return "EUC-KR"
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK ########################## Sampling from about 20M text materials include literature and computer technology# 128 --> 0.79# 256 --> 0.92# 512 --> 0.986# 1024 --> 0.99944# 2048 --> 0.99999## Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24# Random Distribution Ration = 512 / (2350-512) = 0.279.## Typical Distribution RatioEUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0EUCKR_TABLE_SIZE = 2352# Char to FreqOrder table ,EUCKRCharToFreqOrder = ( \13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256#Everything below is of no interest for detection purpose2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658,2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674,2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690,2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704,2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720,2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734,2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750,2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765,2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779,2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793,2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809,2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824,2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840,2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856,1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869,2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883,2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899,2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915,2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331,2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945,2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961,2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976,2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992,2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008,3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021,3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037,3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052,3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066,3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080,3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095,3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110,3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124,3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140,3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156,3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172,3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187,3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201,3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217,3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233,3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248,3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264,3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279,3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295,3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311,3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327,3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343,3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359,3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374,3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389,3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405,3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338,3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432,3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446,3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191,3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471,3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486,1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499,1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513,3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525,3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541,3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557,3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573,3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587,3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603,3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618,3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632,3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648,3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663,3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679,3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695,3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583,1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722,3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738,3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753,3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767,3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782,3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796,3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810,3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591,1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836,3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851,3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866,3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880,3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895,1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905,3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921,3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934,3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603,3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964,3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978,3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993,3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009,4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024,4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040,1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055,4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069,4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083,4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098,4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113,4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610,4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142,4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157,4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173,4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189,4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205,4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220,4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234,4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249,4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265,4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279,4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294,4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310,4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326,4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341,4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357,4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371,4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387,4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403,4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418,4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432,4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446,4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461,4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476,4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491,4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507,4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623,4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536,4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551,4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567,4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581,4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627,4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611,4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626,4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642,4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657,4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672,4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687,1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700,4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715,4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731,4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633,4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758,4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773,4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788,4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803,4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817,4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832,4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847,4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863,4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879,4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893,4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909,4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923,4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938,4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954,4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970,4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645,4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999,5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078,5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028,1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042,5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056,5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072,5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103,5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118,1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132,5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177,5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192,5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206,1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218,5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249,5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385,5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415,5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477,5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570,1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585,5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600,5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615,5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673,5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703,5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729,5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744,1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758,5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773,1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786,5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801,5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862,5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876,5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687,5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133,6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147,6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256,6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #10246272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287,6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699,6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317,6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347,6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411,6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425,6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440,6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456,6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551,1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565,6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581,6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613,6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629,6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644,1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659,6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674,1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736,1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779,6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711,6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903,6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934,6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950,6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996,6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042,7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074,7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138,7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186,7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312,7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455,7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503,7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551,7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567,7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615,7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631,7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695,7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711,7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759,7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807,7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983,7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999,8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047,8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741)# flake8: noqa
######################## BEGIN LICENSE BLOCK ######################### The Original Code is mozilla.org code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################import sysfrom . import constantsfrom .mbcharsetprober import MultiByteCharSetProberfrom .codingstatemachine import CodingStateMachinefrom .chardistribution import EUCJPDistributionAnalysisfrom .jpcntx import EUCJPContextAnalysisfrom .mbcssm import EUCJPSMModelclass EUCJPProber(MultiByteCharSetProber):def __init__(self):MultiByteCharSetProber.__init__(self)self._mCodingSM = CodingStateMachine(EUCJPSMModel)self._mDistributionAnalyzer = EUCJPDistributionAnalysis()self._mContextAnalyzer = EUCJPContextAnalysis()self.reset()def reset(self):MultiByteCharSetProber.reset(self)self._mContextAnalyzer.reset()def get_charset_name(self):return "EUC-JP"def feed(self, aBuf):aLen = len(aBuf)for i in range(0, aLen):# PY3K: aBuf is a byte array, so aBuf[i] is an int, not a bytecodingState = self._mCodingSM.next_state(aBuf[i])if codingState == constants.eError:if constants._debug:sys.stderr.write(self.get_charset_name()+ ' prober hit error at byte ' + str(i)+ '\n')self._mState = constants.eNotMebreakelif codingState == constants.eItsMe:self._mState = constants.eFoundItbreakelif codingState == constants.eStart:charLen = self._mCodingSM.get_current_charlen()if i == 0:self._mLastChar[1] = aBuf[0]self._mContextAnalyzer.feed(self._mLastChar, charLen)self._mDistributionAnalyzer.feed(self._mLastChar, charLen)else:self._mContextAnalyzer.feed(aBuf[i - 1:i + 1], charLen)self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],charLen)self._mLastChar[0] = aBuf[aLen - 1]if self.get_state() == constants.eDetecting:if (self._mContextAnalyzer.got_enough_data() and(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):self._mState = constants.eFoundItreturn self.get_state()def get_confidence(self):contxtCf = self._mContextAnalyzer.get_confidence()distribCf = self._mDistributionAnalyzer.get_confidence()return max(contxtCf, distribCf)
######################## BEGIN LICENSE BLOCK ######################### The Original Code is mozilla.org code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .constants import eStart, eError, eItsMeHZ_cls = (1,0,0,0,0,0,0,0, # 00 - 070,0,0,0,0,0,0,0, # 08 - 0f0,0,0,0,0,0,0,0, # 10 - 170,0,0,1,0,0,0,0, # 18 - 1f0,0,0,0,0,0,0,0, # 20 - 270,0,0,0,0,0,0,0, # 28 - 2f0,0,0,0,0,0,0,0, # 30 - 370,0,0,0,0,0,0,0, # 38 - 3f0,0,0,0,0,0,0,0, # 40 - 470,0,0,0,0,0,0,0, # 48 - 4f0,0,0,0,0,0,0,0, # 50 - 570,0,0,0,0,0,0,0, # 58 - 5f0,0,0,0,0,0,0,0, # 60 - 670,0,0,0,0,0,0,0, # 68 - 6f0,0,0,0,0,0,0,0, # 70 - 770,0,0,4,0,5,2,0, # 78 - 7f1,1,1,1,1,1,1,1, # 80 - 871,1,1,1,1,1,1,1, # 88 - 8f1,1,1,1,1,1,1,1, # 90 - 971,1,1,1,1,1,1,1, # 98 - 9f1,1,1,1,1,1,1,1, # a0 - a71,1,1,1,1,1,1,1, # a8 - af1,1,1,1,1,1,1,1, # b0 - b71,1,1,1,1,1,1,1, # b8 - bf1,1,1,1,1,1,1,1, # c0 - c71,1,1,1,1,1,1,1, # c8 - cf1,1,1,1,1,1,1,1, # d0 - d71,1,1,1,1,1,1,1, # d8 - df1,1,1,1,1,1,1,1, # e0 - e71,1,1,1,1,1,1,1, # e8 - ef1,1,1,1,1,1,1,1, # f0 - f71,1,1,1,1,1,1,1, # f8 - ff)HZ_st = (eStart,eError, 3,eStart,eStart,eStart,eError,eError,# 00-07eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0feItsMe,eItsMe,eError,eError,eStart,eStart, 4,eError,# 10-175,eError, 6,eError, 5, 5, 4,eError,# 18-1f4,eError, 4, 4, 4,eError, 4,eError,# 20-274,eItsMe,eStart,eStart,eStart,eStart,eStart,eStart,# 28-2f)HZCharLenTable = (0, 0, 0, 0, 0, 0)HZSMModel = {'classTable': HZ_cls,'classFactor': 6,'stateTable': HZ_st,'charLenTable': HZCharLenTable,'name': "HZ-GB-2312"}ISO2022CN_cls = (2,0,0,0,0,0,0,0, # 00 - 070,0,0,0,0,0,0,0, # 08 - 0f0,0,0,0,0,0,0,0, # 10 - 170,0,0,1,0,0,0,0, # 18 - 1f0,0,0,0,0,0,0,0, # 20 - 270,3,0,0,0,0,0,0, # 28 - 2f0,0,0,0,0,0,0,0, # 30 - 370,0,0,0,0,0,0,0, # 38 - 3f0,0,0,4,0,0,0,0, # 40 - 470,0,0,0,0,0,0,0, # 48 - 4f0,0,0,0,0,0,0,0, # 50 - 570,0,0,0,0,0,0,0, # 58 - 5f0,0,0,0,0,0,0,0, # 60 - 670,0,0,0,0,0,0,0, # 68 - 6f0,0,0,0,0,0,0,0, # 70 - 770,0,0,0,0,0,0,0, # 78 - 7f2,2,2,2,2,2,2,2, # 80 - 872,2,2,2,2,2,2,2, # 88 - 8f2,2,2,2,2,2,2,2, # 90 - 972,2,2,2,2,2,2,2, # 98 - 9f2,2,2,2,2,2,2,2, # a0 - a72,2,2,2,2,2,2,2, # a8 - af2,2,2,2,2,2,2,2, # b0 - b72,2,2,2,2,2,2,2, # b8 - bf2,2,2,2,2,2,2,2, # c0 - c72,2,2,2,2,2,2,2, # c8 - cf2,2,2,2,2,2,2,2, # d0 - d72,2,2,2,2,2,2,2, # d8 - df2,2,2,2,2,2,2,2, # e0 - e72,2,2,2,2,2,2,2, # e8 - ef2,2,2,2,2,2,2,2, # f0 - f72,2,2,2,2,2,2,2, # f8 - ff)ISO2022CN_st = (eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07eStart,eError,eError,eError,eError,eError,eError,eError,# 08-0feError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17eItsMe,eItsMe,eItsMe,eError,eError,eError, 4,eError,# 18-1feError,eError,eError,eItsMe,eError,eError,eError,eError,# 20-275, 6,eError,eError,eError,eError,eError,eError,# 28-2feError,eError,eError,eItsMe,eError,eError,eError,eError,# 30-37eError,eError,eError,eError,eError,eItsMe,eError,eStart,# 38-3f)ISO2022CNCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0)ISO2022CNSMModel = {'classTable': ISO2022CN_cls,'classFactor': 9,'stateTable': ISO2022CN_st,'charLenTable': ISO2022CNCharLenTable,'name': "ISO-2022-CN"}ISO2022JP_cls = (2,0,0,0,0,0,0,0, # 00 - 070,0,0,0,0,0,2,2, # 08 - 0f0,0,0,0,0,0,0,0, # 10 - 170,0,0,1,0,0,0,0, # 18 - 1f0,0,0,0,7,0,0,0, # 20 - 273,0,0,0,0,0,0,0, # 28 - 2f0,0,0,0,0,0,0,0, # 30 - 370,0,0,0,0,0,0,0, # 38 - 3f6,0,4,0,8,0,0,0, # 40 - 470,9,5,0,0,0,0,0, # 48 - 4f0,0,0,0,0,0,0,0, # 50 - 570,0,0,0,0,0,0,0, # 58 - 5f0,0,0,0,0,0,0,0, # 60 - 670,0,0,0,0,0,0,0, # 68 - 6f0,0,0,0,0,0,0,0, # 70 - 770,0,0,0,0,0,0,0, # 78 - 7f2,2,2,2,2,2,2,2, # 80 - 872,2,2,2,2,2,2,2, # 88 - 8f2,2,2,2,2,2,2,2, # 90 - 972,2,2,2,2,2,2,2, # 98 - 9f2,2,2,2,2,2,2,2, # a0 - a72,2,2,2,2,2,2,2, # a8 - af2,2,2,2,2,2,2,2, # b0 - b72,2,2,2,2,2,2,2, # b8 - bf2,2,2,2,2,2,2,2, # c0 - c72,2,2,2,2,2,2,2, # c8 - cf2,2,2,2,2,2,2,2, # d0 - d72,2,2,2,2,2,2,2, # d8 - df2,2,2,2,2,2,2,2, # e0 - e72,2,2,2,2,2,2,2, # e8 - ef2,2,2,2,2,2,2,2, # f0 - f72,2,2,2,2,2,2,2, # f8 - ff)ISO2022JP_st = (eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07eStart,eStart,eError,eError,eError,eError,eError,eError,# 08-0feError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,# 18-1feError, 5,eError,eError,eError, 4,eError,eError,# 20-27eError,eError,eError, 6,eItsMe,eError,eItsMe,eError,# 28-2feError,eError,eError,eError,eError,eError,eItsMe,eItsMe,# 30-37eError,eError,eError,eItsMe,eError,eError,eError,eError,# 38-3feError,eError,eError,eError,eItsMe,eError,eStart,eStart,# 40-47)ISO2022JPCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)ISO2022JPSMModel = {'classTable': ISO2022JP_cls,'classFactor': 10,'stateTable': ISO2022JP_st,'charLenTable': ISO2022JPCharLenTable,'name': "ISO-2022-JP"}ISO2022KR_cls = (2,0,0,0,0,0,0,0, # 00 - 070,0,0,0,0,0,0,0, # 08 - 0f0,0,0,0,0,0,0,0, # 10 - 170,0,0,1,0,0,0,0, # 18 - 1f0,0,0,0,3,0,0,0, # 20 - 270,4,0,0,0,0,0,0, # 28 - 2f0,0,0,0,0,0,0,0, # 30 - 370,0,0,0,0,0,0,0, # 38 - 3f0,0,0,5,0,0,0,0, # 40 - 470,0,0,0,0,0,0,0, # 48 - 4f0,0,0,0,0,0,0,0, # 50 - 570,0,0,0,0,0,0,0, # 58 - 5f0,0,0,0,0,0,0,0, # 60 - 670,0,0,0,0,0,0,0, # 68 - 6f0,0,0,0,0,0,0,0, # 70 - 770,0,0,0,0,0,0,0, # 78 - 7f2,2,2,2,2,2,2,2, # 80 - 872,2,2,2,2,2,2,2, # 88 - 8f2,2,2,2,2,2,2,2, # 90 - 972,2,2,2,2,2,2,2, # 98 - 9f2,2,2,2,2,2,2,2, # a0 - a72,2,2,2,2,2,2,2, # a8 - af2,2,2,2,2,2,2,2, # b0 - b72,2,2,2,2,2,2,2, # b8 - bf2,2,2,2,2,2,2,2, # c0 - c72,2,2,2,2,2,2,2, # c8 - cf2,2,2,2,2,2,2,2, # d0 - d72,2,2,2,2,2,2,2, # d8 - df2,2,2,2,2,2,2,2, # e0 - e72,2,2,2,2,2,2,2, # e8 - ef2,2,2,2,2,2,2,2, # f0 - f72,2,2,2,2,2,2,2, # f8 - ff)ISO2022KR_st = (eStart, 3,eError,eStart,eStart,eStart,eError,eError,# 00-07eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0feItsMe,eItsMe,eError,eError,eError, 4,eError,eError,# 10-17eError,eError,eError,eError, 5,eError,eError,eError,# 18-1feError,eError,eError,eItsMe,eStart,eStart,eStart,eStart,# 20-27)ISO2022KRCharLenTable = (0, 0, 0, 0, 0, 0)ISO2022KRSMModel = {'classTable': ISO2022KR_cls,'classFactor': 6,'stateTable': ISO2022KR_st,'charLenTable': ISO2022KRCharLenTable,'name': "ISO-2022-KR"}# flake8: noqa
######################## BEGIN LICENSE BLOCK ######################### The Original Code is mozilla.org code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from . import constantsfrom .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel,ISO2022KRSMModel)from .charsetprober import CharSetProberfrom .codingstatemachine import CodingStateMachinefrom .compat import wrap_ordclass EscCharSetProber(CharSetProber):def __init__(self):CharSetProber.__init__(self)self._mCodingSM = [CodingStateMachine(HZSMModel),CodingStateMachine(ISO2022CNSMModel),CodingStateMachine(ISO2022JPSMModel),CodingStateMachine(ISO2022KRSMModel)]self.reset()def reset(self):CharSetProber.reset(self)for codingSM in self._mCodingSM:if not codingSM:continuecodingSM.active = TruecodingSM.reset()self._mActiveSM = len(self._mCodingSM)self._mDetectedCharset = Nonedef get_charset_name(self):return self._mDetectedCharsetdef get_confidence(self):if self._mDetectedCharset:return 0.99else:return 0.00def feed(self, aBuf):for c in aBuf:# PY3K: aBuf is a byte array, so c is an int, not a bytefor codingSM in self._mCodingSM:if not codingSM:continueif not codingSM.active:continuecodingState = codingSM.next_state(wrap_ord(c))if codingState == constants.eError:codingSM.active = Falseself._mActiveSM -= 1if self._mActiveSM <= 0:self._mState = constants.eNotMereturn self.get_state()elif codingState == constants.eItsMe:self._mState = constants.eFoundItself._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8return self.get_state()return self.get_state()
######################## BEGIN LICENSE BLOCK ######################### The Original Code is mozilla.org code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .mbcharsetprober import MultiByteCharSetProberfrom .codingstatemachine import CodingStateMachinefrom .chardistribution import EUCKRDistributionAnalysisfrom .mbcssm import CP949SMModelclass CP949Prober(MultiByteCharSetProber):def __init__(self):MultiByteCharSetProber.__init__(self)self._mCodingSM = CodingStateMachine(CP949SMModel)# NOTE: CP949 is a superset of EUC-KR, so the distribution should be# not different.self._mDistributionAnalyzer = EUCKRDistributionAnalysis()self.reset()def get_charset_name(self):return "CP949"
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Universal charset detector code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 2001# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python# Shy Shalom - original C code## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################_debug = 0eDetecting = 0eFoundIt = 1eNotMe = 2eStart = 0eError = 1eItsMe = 2SHORTCUT_THRESHOLD = 0.95
######################## BEGIN LICENSE BLOCK ######################### Contributor(s):# Ian Cordasco - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################import sysif sys.version_info < (3, 0):base_str = (str, unicode)else:base_str = (bytes, str)def wrap_ord(a):if sys.version_info < (3, 0) and isinstance(a, base_str):return ord(a)else:return a
######################## BEGIN LICENSE BLOCK ######################### The Original Code is mozilla.org code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .constants import eStartfrom .compat import wrap_ordclass CodingStateMachine:def __init__(self, sm):self._mModel = smself._mCurrentBytePos = 0self._mCurrentCharLen = 0self.reset()def reset(self):self._mCurrentState = eStartdef next_state(self, c):# for each byte we get its class# if it is first byte, we also get byte length# PY3K: aBuf is a byte stream, so c is an int, not a bytebyteCls = self._mModel['classTable'][wrap_ord(c)]if self._mCurrentState == eStart:self._mCurrentBytePos = 0self._mCurrentCharLen = self._mModel['charLenTable'][byteCls]# from byte's class and stateTable, we get its next statecurr_state = (self._mCurrentState * self._mModel['classFactor']+ byteCls)self._mCurrentState = self._mModel['stateTable'][curr_state]self._mCurrentBytePos += 1return self._mCurrentStatedef get_current_charlen(self):return self._mCurrentCharLendef get_coding_state_machine(self):return self._mModel['name']
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Universal charset detector code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 2001# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python# Shy Shalom - original C code## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from . import constantsimport reclass CharSetProber:def __init__(self):passdef reset(self):self._mState = constants.eDetectingdef get_charset_name(self):return Nonedef feed(self, aBuf):passdef get_state(self):return self._mStatedef get_confidence(self):return 0.0def filter_high_bit_only(self, aBuf):aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf)return aBufdef filter_without_english_letters(self, aBuf):aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf)return aBufdef filter_with_english_letters(self, aBuf):# TODOreturn aBuf
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from . import constantsimport sysfrom .charsetprober import CharSetProberclass CharSetGroupProber(CharSetProber):def __init__(self):CharSetProber.__init__(self)self._mActiveNum = 0self._mProbers = []self._mBestGuessProber = Nonedef reset(self):CharSetProber.reset(self)self._mActiveNum = 0for prober in self._mProbers:if prober:prober.reset()prober.active = Trueself._mActiveNum += 1self._mBestGuessProber = Nonedef get_charset_name(self):if not self._mBestGuessProber:self.get_confidence()if not self._mBestGuessProber:return None# self._mBestGuessProber = self._mProbers[0]return self._mBestGuessProber.get_charset_name()def feed(self, aBuf):for prober in self._mProbers:if not prober:continueif not prober.active:continuest = prober.feed(aBuf)if not st:continueif st == constants.eFoundIt:self._mBestGuessProber = proberreturn self.get_state()elif st == constants.eNotMe:prober.active = Falseself._mActiveNum -= 1if self._mActiveNum <= 0:self._mState = constants.eNotMereturn self.get_state()return self.get_state()def get_confidence(self):st = self.get_state()if st == constants.eFoundIt:return 0.99elif st == constants.eNotMe:return 0.01bestConf = 0.0self._mBestGuessProber = Nonefor prober in self._mProbers:if not prober:continueif not prober.active:if constants._debug:sys.stderr.write(prober.get_charset_name()+ ' not active\n')continuecf = prober.get_confidence()if constants._debug:sys.stderr.write('%s confidence = %s\n' %(prober.get_charset_name(), cf))if bestConf < cf:bestConf = cfself._mBestGuessProber = proberif not self._mBestGuessProber:return 0.0return bestConf# else:# self._mBestGuessProber = self._mProbers[0]# return self._mBestGuessProber.get_confidence()
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE,EUCTW_TYPICAL_DISTRIBUTION_RATIO)from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE,EUCKR_TYPICAL_DISTRIBUTION_RATIO)from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE,GB2312_TYPICAL_DISTRIBUTION_RATIO)from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE,BIG5_TYPICAL_DISTRIBUTION_RATIO)from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE,JIS_TYPICAL_DISTRIBUTION_RATIO)from .compat import wrap_ordENOUGH_DATA_THRESHOLD = 1024SURE_YES = 0.99SURE_NO = 0.01MINIMUM_DATA_THRESHOLD = 3class CharDistributionAnalysis:def __init__(self):# Mapping table to get frequency order from char order (get from# GetOrder())self._mCharToFreqOrder = Noneself._mTableSize = None # Size of above table# This is a constant value which varies from language to language,# used in calculating confidence. See# http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html# for further detail.self._mTypicalDistributionRatio = Noneself.reset()def reset(self):"""reset analyser, clear any state"""# If this flag is set to True, detection is done and conclusion has# been madeself._mDone = Falseself._mTotalChars = 0 # Total characters encountered# The number of characters whose frequency order is less than 512self._mFreqChars = 0def feed(self, aBuf, aCharLen):"""feed a character with known length"""if aCharLen == 2:# we only care about 2-bytes character in our distribution analysisorder = self.get_order(aBuf)else:order = -1if order >= 0:self._mTotalChars += 1# order is validif order < self._mTableSize:if 512 > self._mCharToFreqOrder[order]:self._mFreqChars += 1def get_confidence(self):"""return confidence based on existing data"""# if we didn't receive any character in our consideration range,# return negative answerif self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:return SURE_NOif self._mTotalChars != self._mFreqChars:r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars)* self._mTypicalDistributionRatio))if r < SURE_YES:return r# normalize confidence (we don't want to be 100% sure)return SURE_YESdef got_enough_data(self):# It is not necessary to receive all data to draw conclusion.# For charset detection, certain amount of data is enoughreturn self._mTotalChars > ENOUGH_DATA_THRESHOLDdef get_order(self, aBuf):# We do not handle characters based on the original encoding string,# but convert this encoding string to a number, here called order.# This allows multiple encodings of a language to share one frequency# table.return -1class EUCTWDistributionAnalysis(CharDistributionAnalysis):def __init__(self):CharDistributionAnalysis.__init__(self)self._mCharToFreqOrder = EUCTWCharToFreqOrderself._mTableSize = EUCTW_TABLE_SIZEself._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIOdef get_order(self, aBuf):# for euc-TW encoding, we are interested# first byte range: 0xc4 -- 0xfe# second byte range: 0xa1 -- 0xfe# no validation needed here. State machine has done thatfirst_char = wrap_ord(aBuf[0])if first_char >= 0xC4:return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1else:return -1class EUCKRDistributionAnalysis(CharDistributionAnalysis):def __init__(self):CharDistributionAnalysis.__init__(self)self._mCharToFreqOrder = EUCKRCharToFreqOrderself._mTableSize = EUCKR_TABLE_SIZEself._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIOdef get_order(self, aBuf):# for euc-KR encoding, we are interested# first byte range: 0xb0 -- 0xfe# second byte range: 0xa1 -- 0xfe# no validation needed here. State machine has done thatfirst_char = wrap_ord(aBuf[0])if first_char >= 0xB0:return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1else:return -1class GB2312DistributionAnalysis(CharDistributionAnalysis):def __init__(self):CharDistributionAnalysis.__init__(self)self._mCharToFreqOrder = GB2312CharToFreqOrderself._mTableSize = GB2312_TABLE_SIZEself._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIOdef get_order(self, aBuf):# for GB2312 encoding, we are interested# first byte range: 0xb0 -- 0xfe# second byte range: 0xa1 -- 0xfe# no validation needed here. State machine has done thatfirst_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])if (first_char >= 0xB0) and (second_char >= 0xA1):return 94 * (first_char - 0xB0) + second_char - 0xA1else:return -1class Big5DistributionAnalysis(CharDistributionAnalysis):def __init__(self):CharDistributionAnalysis.__init__(self)self._mCharToFreqOrder = Big5CharToFreqOrderself._mTableSize = BIG5_TABLE_SIZEself._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIOdef get_order(self, aBuf):# for big5 encoding, we are interested# first byte range: 0xa4 -- 0xfe# second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe# no validation needed here. State machine has done thatfirst_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])if first_char >= 0xA4:if second_char >= 0xA1:return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63else:return 157 * (first_char - 0xA4) + second_char - 0x40else:return -1class SJISDistributionAnalysis(CharDistributionAnalysis):def __init__(self):CharDistributionAnalysis.__init__(self)self._mCharToFreqOrder = JISCharToFreqOrderself._mTableSize = JIS_TABLE_SIZEself._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIOdef get_order(self, aBuf):# for sjis encoding, we are interested# first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe# second byte range: 0x40 -- 0x7e, 0x81 -- oxfe# no validation needed here. State machine has done thatfirst_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])if (first_char >= 0x81) and (first_char <= 0x9F):order = 188 * (first_char - 0x81)elif (first_char >= 0xE0) and (first_char <= 0xEF):order = 188 * (first_char - 0xE0 + 31)else:return -1order = order + second_char - 0x40if second_char > 0x7F:order = -1return orderclass EUCJPDistributionAnalysis(CharDistributionAnalysis):def __init__(self):CharDistributionAnalysis.__init__(self)self._mCharToFreqOrder = JISCharToFreqOrderself._mTableSize = JIS_TABLE_SIZEself._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIOdef get_order(self, aBuf):# for euc-JP encoding, we are interested# first byte range: 0xa0 -- 0xfe# second byte range: 0xa1 -- 0xfe# no validation needed here. State machine has done thatchar = wrap_ord(aBuf[0])if char >= 0xA0:return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1else:return -1
#!/usr/bin/env python"""Script which takes one or more file paths and reports on their detectedencodingsExample::% chardetect somefile someotherfilesomefile: windows-1252 with confidence 0.5someotherfile: ascii with confidence 1.0If no paths are provided, it takes its input from stdin."""from __future__ import absolute_import, print_function, unicode_literalsimport argparseimport sysfrom io import openfrom chardet import __version__from chardet.universaldetector import UniversalDetectordef description_of(lines, name='stdin'):"""Return a string describing the probable encoding of a file orlist of strings.:param lines: The lines to get the encoding of.:type lines: Iterable of bytes:param name: Name of file or collection of lines:type name: str"""u = UniversalDetector()for line in lines:u.feed(line)u.close()result = u.resultif result['encoding']:return '{0}: {1} with confidence {2}'.format(name, result['encoding'],result['confidence'])else:return '{0}: no result'.format(name)def main(argv=None):'''Handles command line arguments and gets things started.:param argv: List of arguments, as if specified on the command-line.If None, ``sys.argv[1:]`` is used instead.:type argv: list of str'''# Get command line argumentsparser = argparse.ArgumentParser(description="Takes one or more file paths and reports their detected \encodings",formatter_class=argparse.ArgumentDefaultsHelpFormatter,conflict_handler='resolve')parser.add_argument('input',help='File whose encoding we would like to determine.',type=argparse.FileType('rb'), nargs='*',default=[sys.stdin])parser.add_argument('--version', action='version',version='%(prog)s {0}'.format(__version__))args = parser.parse_args(argv)for f in args.input:if f.isatty():print("You are running chardetect interactively. Press " +"CTRL-D twice at the start of a blank line to signal the " +"end of your input. If you want help, run chardetect " +"--help\n", file=sys.stderr)print(description_of(f, f.name))if __name__ == '__main__':main()
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################from .mbcharsetprober import MultiByteCharSetProberfrom .codingstatemachine import CodingStateMachinefrom .chardistribution import Big5DistributionAnalysisfrom .mbcssm import Big5SMModelclass Big5Prober(MultiByteCharSetProber):def __init__(self):MultiByteCharSetProber.__init__(self)self._mCodingSM = CodingStateMachine(Big5SMModel)self._mDistributionAnalyzer = Big5DistributionAnalysis()self.reset()def get_charset_name(self):return "Big5"
######################## BEGIN LICENSE BLOCK ######################### The Original Code is Mozilla Communicator client code.## The Initial Developer of the Original Code is# Netscape Communications Corporation.# Portions created by the Initial Developer are Copyright (C) 1998# the Initial Developer. All Rights Reserved.## Contributor(s):# Mark Pilgrim - port to Python## This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK ########################## Big5 frequency table# by Taiwan's Mandarin Promotion Council# <http://www.edu.tw:81/mandr/>## 128 --> 0.42261# 256 --> 0.57851# 512 --> 0.74851# 1024 --> 0.89384# 2048 --> 0.97583## Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98# Random Distribution Ration = 512/(5401-512)=0.105## Typical Distribution Ratio about 25% of Ideal one, still much higher than RDRBIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75#Char to FreqOrder tableBIG5_TABLE_SIZE = 5376Big5CharToFreqOrder = (1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 163814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 321198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 4863,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 643682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 804511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 965015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 1602502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 1761376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 1923276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 2241426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 2403558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 2562823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 2883976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 3041664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 3205026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 3525030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 3681665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 38432,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 4163687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 4323982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 4642129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 4802080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 5283817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 5441699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 5601130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 5761475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 5922527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 6244214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 6401921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 6565055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 6722601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 70498,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 7525063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 7841818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 8325072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 8481095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 8803095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 8964224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 9123144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 9601074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 9764227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 9923469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 10083827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 10242402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 10405101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 10563698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 10725106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 10881484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 11042994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 11201355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 113678,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 11521821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 11684555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 11843997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 12482084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 12645122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 12801486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 12962165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 13121444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 13281994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 13445139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 13605143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 13765149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 13923360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 14084567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 14244245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 14402952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 14565163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 14723581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 15045168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 15205170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 15361323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 15522421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 15683474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 15844254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 16005182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 16163475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 16324010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 16481902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 16641871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 16804580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 16961031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 17281521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 17441673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 17603293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 17925203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 18082387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 18241324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 18401953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 18565207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 18884271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 19202254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 19521041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 19681478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 20004020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 20164021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 20321126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 20483596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 20645238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 20805240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 20961230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 21122150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 21281378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 21443850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 21602841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 21763717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 21922451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 22084286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 22244619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 22403057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 225697, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 22723226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 23043374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 23204032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 23363722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 23521741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 23685281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 24005286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 24161702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 24484631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 24644039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 24962151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 25122390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 25283608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 25441642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 25604634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 25762610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 25921016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 26081051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 26242730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 26403309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 26561006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 26725313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 26881842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 27044304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 27201688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 27521372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 27684056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 27844059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 28002057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 28161781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 28324306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 28645343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 28802322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 28963613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 29124654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 29445358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 29605359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 29761238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 29924658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 30084072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 30242699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 30403874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 30563875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 30722588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 30881358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 31044662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 31203736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 31363880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 31522370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 31684669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 31845397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 32003389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 32162641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 32323882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 32481624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 32642590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 32803507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 32964680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 33122107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 33282804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 33445418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 33601909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 33762393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 33921618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 34083325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 34244333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 34402706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 34563245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 34723246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 34882852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 35044692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 35202806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 35363248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 35524695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 35685450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 35843626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 36161161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 36324698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 36481651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 36644701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 36805466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 37125473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 37285477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 37442431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 37603252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 37762971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 37922854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 38241117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 38404346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 38563069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 38723121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 39042209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 39362774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 39524357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 39681949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 39844359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 40001930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 40163332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 40483071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 40645527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 40805530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 40963257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 41123771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 41281439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 41442593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 41605545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 41761561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 41921025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 42083639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 42401346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 42564370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 42725568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 42882434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 43043183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 43361422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 43522230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 43682649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 43845591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 44005592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 44165594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 44322436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 44482282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 44641612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 44804122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 44963778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 45123415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 45284390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 45444128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 45602895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 45762745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 45925626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 46084756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 46245632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 46404400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 46881987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 47043264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 47204406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 47361266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 47525653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 47682898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 47842978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 48003028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 48165669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 48321605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 48483029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 48645673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 48801243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 48965685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 49122781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 49283187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 49442034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 49603931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 49763932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 49923541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 50084778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 50402416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 50564784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 50723937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 50885722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 51041173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 51205728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 51521800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 51844433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 52001018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 52164155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 52321843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 52643197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 52804805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 52965776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 53283668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 53602299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512#Everything below is of no interest for detection purpose2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 53922500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 54085813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 54245829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 54405845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 54565861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 54725877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 54885893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 55045909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 55205925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 55365941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 55525957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 55685973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 55845989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 56006005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 56166021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 56326037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 56486053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 56646069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 56806085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 56966101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 57126117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 57286133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 57446149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 57606165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 57766181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 57926197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 58086213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 58246228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 58406244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 58566260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 58726276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 58886291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 59046306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 59206319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 59366335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 59526350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 59686366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 59846382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 60006398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 60166413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 60326428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 60486444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 60646459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 60806475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 60966486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 61126501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 61286517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 61446533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 61606549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 61766563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 61926577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 62086590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 62246606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 62406620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 62563547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 62726647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 62886660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 63043952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 63206685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 63366700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 63526716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 63686732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 63846746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 64006762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 64166778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 64324169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 64486807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 64646820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 64803548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 64966850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 65126865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 65286880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 65446895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 65606910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 65766926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 65926939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 66086953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 66246969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 66406985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 66566999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 66727014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 66887030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 67047044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 67207060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 67367075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 67527090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 67687106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 67847121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 68007137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 68167152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 68327168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 68487184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 68647198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 68807214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 68967230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 69127246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 69287262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 69447278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 69607294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 69767309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 69927324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 70087340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 70247355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 70407370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 70567386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 70727401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 70887416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 71047432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 71207448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 71367462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 71527477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 71687492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 71847507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 72007522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 72167538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 72327552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 72487568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 72647583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 72807599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 72967615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 73127629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 73287644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 73447660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 73607675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 73767691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 73927705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 74087720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 74247735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 74403134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 74567763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 74727779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 74887795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 75047810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 75204855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 75367841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 75527856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 75687871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 75847887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 76007900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 76167914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 76327928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 76487944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 76647960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 76807976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 76967991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 77128006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 77288021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 77448036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 77608049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 77768065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 77928081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 78088097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 78248111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 78408126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 78568142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 78728156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 78888171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 79048186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 79208202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 79368218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 79528234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 79688250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 79848264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 80008278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 80168293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 80328308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 80488323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 80648338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 80808354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 80968368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 81128383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 81288399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 81448414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 81608429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 81768444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 81928460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 82088476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 82248491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 82408507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 82568523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 82728539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 82888555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 83048570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 83208585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 83368601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 83528614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 83688629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 83848644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 84008659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 84168673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 84328688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 84488704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 84648719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 84808734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 84968750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 85128765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 85288781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 85448794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 85608809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 85768824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 85928840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 86088855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 86248871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 86408886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 86568902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 86728918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 86884883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 87048947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 87208963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 87368978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 87528993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 87689009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 87849024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 88009040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 88169056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 88329070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 88489086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 88649102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 88809118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 88969134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 89129149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 89289164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 89449179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 89609195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 89769210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 89929225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 90089241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 90249256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 90409269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 90569284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 90729299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 90889315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 91049330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 91209346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 91369361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 91529375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 91689390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 91849406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 92009418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 92169432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 92329447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 92489462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 92649477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 92809491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 92969507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 93129522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 93289537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 93449553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 93609569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 93763805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 93929600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 94089614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 94249629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 94404907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 94569659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 94729674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 94889688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 95049701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 95209716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 95369732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 95529746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 95689761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 95849777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 96009792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 96169807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 96329823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 96489839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 96649855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 96809870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 96969886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 97129900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 97289915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 97449929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 97609945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 97769959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 97929973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 98089988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 982410004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 984010020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 985610034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 987210049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 988810065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 990410081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 992010096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 993610111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 995210126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 99684186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 998410153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #1000010167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #1001610183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #1003210198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #1004810212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #1006410228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #1008010243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #1009610259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #101124928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #1012810288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #1014410304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #1016010320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #1017610335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #1019210350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #1020810363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #1022410378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #1024010393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #1025610408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #1027210423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #1028810437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #1030410451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #1032010467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #1033610483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #1035210499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #1036810513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #1038410528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #104004482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #1041610558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #1043210572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #1044810587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #1046410603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #1048010619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #1049610633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #1051210649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #1052810663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #1054410678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #1056010693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #1057610706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #1059210721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #1060810736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #1062410752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #1064010767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #1065610781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #1067210797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #1068810813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #1070410829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #1072010845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #1073610861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #1075210877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #1076810891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #1078410907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #1080010922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #1081610937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #1083210952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #1084810967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #1086410983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #1088010999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #1089611014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #1091211030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #1092811045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #109444953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #1096011073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #1097611089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #1099211105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #1100811120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #1102411135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #1104011151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #1105611166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #1107211181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #1108811196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #1110411210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #1112011226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #1113611242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #1115211257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #1116811273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #1118411289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #1120011305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #1121611319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #1123211335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #1124811350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #1126411366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #1128011380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #1129611396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #1131211411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #1132811426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #1134411442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #1136011458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #1137611472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #1139211487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #1140811502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #1142411518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #1144011533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #1145611549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #114724193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #1148811579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #1150411593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #1152011608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #1153611623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #1155211639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #1156811655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #1158411671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #1160011686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #1161611701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #1163211717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #1164811731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #1166411746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #1168011761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #1169611777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #1171211792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #1172811807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #1174411822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #1176011836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #1177611851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #1179211867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #1180811883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #1182411898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #1184011914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #1185611929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #1187211945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #1188811960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #1190411976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #1192011991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #1193612006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #1195212022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #1196812038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #1198412054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #1200012069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #1201612085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #1203212101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #1204812117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #1206412131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #1208012147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #1209612161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #1211212174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #1212812189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #1214412205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #1216012221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #121764982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #121924984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #122084985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #1222412279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #1224012294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #1225612309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #1227212324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #1228812339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #1230412355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #1232012370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #1233612385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #1235212401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #1236812416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #1238412432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #1240012447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #1241612463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #1243212478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #1244812492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #1246412508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #1248012524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #1249612540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #1251212555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #1252812571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #1254412585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #1256012601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #1257612617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #1259212633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #1260812648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #1262412664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #1264012680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #1265612696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #1267212711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #1268812727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #1270412743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #1272012759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #1273612775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #1275212788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #1276812804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #1278412820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #1280012834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #1281612850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #1283212865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #1284812881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #1286412896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #1288012912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #1289612927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #1291212943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #1292812958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #1294412974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #1296012990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #129764504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #1299213019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #1300813034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #1302413050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #1304013066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #1305613082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #1307213098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #1308813114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #131044505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #1312013140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #1313613156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #1315213171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #1316813186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #1318413202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #1320013217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #1321613231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #1323213245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #1324813261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #1326413276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #1328013291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #1329613306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #1331213322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #1332813337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #1334413352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #133605004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #1337613383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #1339213399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #1340813415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #1342413431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #1344013445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #1345613460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #1347213475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #1348813491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #1350413507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #1352013523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #1353613539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #1355213555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #1356813571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #1358413587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #1360013603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #1361613619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #1363213635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #1364813650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #1366413665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #1368013681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #1369613697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #1371213713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #1372813729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #1374413745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #1376013761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #1377613776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #1379213792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #1380813808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #1382413824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #1384013840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #1385613856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #1387213872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #1388813888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #1390413904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #1392013920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #1393613936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #1395213952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #1396813968,13969,13970,13971,13972) #13973# flake8: noqa
######################## BEGIN LICENSE BLOCK ######################### This library is free software; you can redistribute it and/or# modify it under the terms of the GNU Lesser General Public# License as published by the Free Software Foundation; either# version 2.1 of the License, or (at your option) any later version.## This library is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU# Lesser General Public License for more details.## You should have received a copy of the GNU Lesser General Public# License along with this library; if not, write to the Free Software# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA# 02110-1301 USA######################### END LICENSE BLOCK #########################__version__ = "2.3.0"from sys import version_infodef detect(aBuf):if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or(version_info >= (3, 0) and not isinstance(aBuf, bytes))):raise ValueError('Expected a bytes object, not a unicode object')from . import universaldetectoru = universaldetector.UniversalDetector()u.reset()u.feed(aBuf)u.close()return u.result
'''Debian and other distributions "unbundle" requests' vendored dependencies, andrewrite all imports to use the global versions of ``urllib3`` and ``chardet``.The problem with this is that not only requests itself imports thosedependencies, but third-party code outside of the distros' control too.In reaction to these problems, the distro maintainers replaced``requests.packages`` with a magical "stub module" that imports the correctmodules. The implementations were varying in quality and all had severeproblems. For example, a symlink (or hardlink) that links the correct modulesinto place introduces problems regarding object identity, since you now havetwo modules in `sys.modules` with the same API, but different identities::requests.packages.urllib3 is not urllib3With version ``2.5.2``, requests started to maintain its own stub, so thatdistro-specific breakage would be reduced to a minimum, even though the wholeissue is not requests' fault in the first place. Seehttps://github.com/kennethreitz/requests/pull/2375 for the corresponding pullrequest.'''from __future__ import absolute_importimport systry:from . import urllib3except ImportError:import urllib3sys.modules['%s.urllib3' % __name__] = urllib3try:from . import chardetexcept ImportError:import chardetsys.modules['%s.chardet' % __name__] = chardet
# -*- coding: utf-8 -*-"""requests.models~~~~~~~~~~~~~~~This module contains the primary objects that power Requests."""import collectionsimport datetimefrom io import BytesIO, UnsupportedOperationfrom .hooks import default_hooksfrom .structures import CaseInsensitiveDictfrom .auth import HTTPBasicAuthfrom .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jarfrom .packages.urllib3.fields import RequestFieldfrom .packages.urllib3.filepost import encode_multipart_formdatafrom .packages.urllib3.util import parse_urlfrom .packages.urllib3.exceptions import (DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)from .exceptions import (HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,ContentDecodingError, ConnectionError, StreamConsumedError)from .utils import (guess_filename, get_auth_from_url, requote_uri,stream_decode_response_unicode, to_key_val_list, parse_header_links,iter_slices, guess_json_utf, super_len, to_native_string,check_header_validity)from .compat import (cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,is_py2, chardet, builtin_str, basestring)from .compat import json as complexjsonfrom .status_codes import codes#: The set of HTTP status codes that indicate an automatically#: processable redirect.REDIRECT_STATI = (codes.moved, # 301codes.found, # 302codes.other, # 303codes.temporary_redirect, # 307codes.permanent_redirect, # 308)DEFAULT_REDIRECT_LIMIT = 30CONTENT_CHUNK_SIZE = 10 * 1024ITER_CHUNK_SIZE = 512class RequestEncodingMixin(object):@propertydef path_url(self):"""Build the path URL to use."""url = []p = urlsplit(self.url)path = p.pathif not path:path = '/'url.append(path)query = p.queryif query:url.append('?')url.append(query)return ''.join(url)@staticmethoddef _encode_params(data):"""Encode parameters in a piece of data.Will successfully encode parameters when passed as a dict or a list of2-tuples. Order is retained if data is a list of 2-tuples but arbitraryif parameters are supplied as a dict."""if isinstance(data, (str, bytes)):return dataelif hasattr(data, 'read'):return dataelif hasattr(data, '__iter__'):result = []for k, vs in to_key_val_list(data):if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):vs = [vs]for v in vs:if v is not None:result.append((k.encode('utf-8') if isinstance(k, str) else k,v.encode('utf-8') if isinstance(v, str) else v))return urlencode(result, doseq=True)else:return data@staticmethoddef _encode_files(files, data):"""Build the body for a multipart/form-data request.Will successfully encode files when passed as a dict or a list oftuples. Order is retained if data is a list of tuples but arbitraryif parameters are supplied as a dict.The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)or 4-tuples (filename, fileobj, contentype, custom_headers)."""if (not files):raise ValueError("Files must be provided.")elif isinstance(data, basestring):raise ValueError("Data must not be a string.")new_fields = []fields = to_key_val_list(data or {})files = to_key_val_list(files or {})for field, val in fields:if isinstance(val, basestring) or not hasattr(val, '__iter__'):val = [val]for v in val:if v is not None:# Don't call str() on bytestrings: in Py3 it all goes wrong.if not isinstance(v, bytes):v = str(v)new_fields.append((field.decode('utf-8') if isinstance(field, bytes) else field,v.encode('utf-8') if isinstance(v, str) else v))for (k, v) in files:# support for explicit filenameft = Nonefh = Noneif isinstance(v, (tuple, list)):if len(v) == 2:fn, fp = velif len(v) == 3:fn, fp, ft = velse:fn, fp, ft, fh = velse:fn = guess_filename(v) or kfp = vif isinstance(fp, (str, bytes, bytearray)):fdata = fpelse:fdata = fp.read()rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)rf.make_multipart(content_type=ft)new_fields.append(rf)body, content_type = encode_multipart_formdata(new_fields)return body, content_typeclass RequestHooksMixin(object):def register_hook(self, event, hook):"""Properly register a hook."""if event not in self.hooks:raise ValueError('Unsupported event specified, with event name "%s"' % (event))if isinstance(hook, collections.Callable):self.hooks[event].append(hook)elif hasattr(hook, '__iter__'):self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))def deregister_hook(self, event, hook):"""Deregister a previously registered hook.Returns True if the hook existed, False if not."""try:self.hooks[event].remove(hook)return Trueexcept ValueError:return Falseclass Request(RequestHooksMixin):"""A user-created :class:`Request <Request>` object.Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.:param method: HTTP method to use.:param url: URL to send.:param headers: dictionary of headers to send.:param files: dictionary of {filename: fileobject} files to multipart upload.:param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.:param json: json for the body to attach to the request (if files or data is not specified).:param params: dictionary of URL parameters to append to the URL.:param auth: Auth handler or (user, pass) tuple.:param cookies: dictionary or CookieJar of cookies to attach to this request.:param hooks: dictionary of callback hooks, for internal usage.Usage::>>> import requests>>> req = requests.Request('GET', 'http://httpbin.org/get')>>> req.prepare()<PreparedRequest [GET]>"""def __init__(self, method=None, url=None, headers=None, files=None,data=None, params=None, auth=None, cookies=None, hooks=None, json=None):# Default empty dicts for dict params.data = [] if data is None else datafiles = [] if files is None else filesheaders = {} if headers is None else headersparams = {} if params is None else paramshooks = {} if hooks is None else hooksself.hooks = default_hooks()for (k, v) in list(hooks.items()):self.register_hook(event=k, hook=v)self.method = methodself.url = urlself.headers = headersself.files = filesself.data = dataself.json = jsonself.params = paramsself.auth = authself.cookies = cookiesdef __repr__(self):return '<Request [%s]>' % (self.method)def prepare(self):"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""p = PreparedRequest()p.prepare(method=self.method,url=self.url,headers=self.headers,files=self.files,data=self.data,json=self.json,params=self.params,auth=self.auth,cookies=self.cookies,hooks=self.hooks,)return pclass PreparedRequest(RequestEncodingMixin, RequestHooksMixin):"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,containing the exact bytes that will be sent to the server.Generated from either a :class:`Request <Request>` object or manually.Usage::>>> import requests>>> req = requests.Request('GET', 'http://httpbin.org/get')>>> r = req.prepare()<PreparedRequest [GET]>>>> s = requests.Session()>>> s.send(r)<Response [200]>"""def __init__(self):#: HTTP verb to send to the server.self.method = None#: HTTP URL to send the request to.self.url = None#: dictionary of HTTP headers.self.headers = None# The `CookieJar` used to create the Cookie header will be stored here# after prepare_cookies is calledself._cookies = None#: request body to send to the server.self.body = None#: dictionary of callback hooks, for internal usage.self.hooks = default_hooks()def prepare(self, method=None, url=None, headers=None, files=None,data=None, params=None, auth=None, cookies=None, hooks=None, json=None):"""Prepares the entire request with the given parameters."""self.prepare_method(method)self.prepare_url(url, params)self.prepare_headers(headers)self.prepare_cookies(cookies)self.prepare_body(data, files, json)self.prepare_auth(auth, url)# Note that prepare_auth must be last to enable authentication schemes# such as OAuth to work on a fully prepared request.# This MUST go after prepare_auth. Authenticators could add a hookself.prepare_hooks(hooks)def __repr__(self):return '<PreparedRequest [%s]>' % (self.method)def copy(self):p = PreparedRequest()p.method = self.methodp.url = self.urlp.headers = self.headers.copy() if self.headers is not None else Nonep._cookies = _copy_cookie_jar(self._cookies)p.body = self.bodyp.hooks = self.hooksreturn pdef prepare_method(self, method):"""Prepares the given HTTP method."""self.method = methodif self.method is not None:self.method = to_native_string(self.method.upper())def prepare_url(self, url, params):"""Prepares the given HTTP URL."""#: Accept objects that have string representations.#: We're unable to blindly call unicode/str functions#: as this will include the bytestring indicator (b'')#: on python 3.x.#: https://github.com/kennethreitz/requests/pull/2238if isinstance(url, bytes):url = url.decode('utf8')else:url = unicode(url) if is_py2 else str(url)# Don't do any URL preparation for non-HTTP schemes like `mailto`,# `data` etc to work around exceptions from `url_parse`, which# handles RFC 3986 only.if ':' in url and not url.lower().startswith('http'):self.url = urlreturn# Support for unicode domain names and paths.try:scheme, auth, host, port, path, query, fragment = parse_url(url)except LocationParseError as e:raise InvalidURL(*e.args)if not scheme:error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")error = error.format(to_native_string(url, 'utf8'))raise MissingSchema(error)if not host:raise InvalidURL("Invalid URL %r: No host supplied" % url)# Only want to apply IDNA to the hostnametry:host = host.encode('idna').decode('utf-8')except UnicodeError:raise InvalidURL('URL has an invalid label.')# Carefully reconstruct the network locationnetloc = auth or ''if netloc:netloc += '@'netloc += hostif port:netloc += ':' + str(port)# Bare domains aren't valid URLs.if not path:path = '/'if is_py2:if isinstance(scheme, str):scheme = scheme.encode('utf-8')if isinstance(netloc, str):netloc = netloc.encode('utf-8')if isinstance(path, str):path = path.encode('utf-8')if isinstance(query, str):query = query.encode('utf-8')if isinstance(fragment, str):fragment = fragment.encode('utf-8')if isinstance(params, (str, bytes)):params = to_native_string(params)enc_params = self._encode_params(params)if enc_params:if query:query = '%s&%s' % (query, enc_params)else:query = enc_paramsurl = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))self.url = urldef prepare_headers(self, headers):"""Prepares the given HTTP headers."""self.headers = CaseInsensitiveDict()if headers:for header in headers.items():# Raise exception on invalid header value.check_header_validity(header)name, value = headerself.headers[to_native_string(name)] = valuedef prepare_body(self, data, files, json=None):"""Prepares the given HTTP body data."""# Check if file, fo, generator, iterator.# If not, run through normal process.# Nottin' on you.body = Nonecontent_type = Nonelength = Noneif not data and json is not None:# urllib3 requires a bytes-like body. Python 2's json.dumps# provides this natively, but Python 3 gives a Unicode string.content_type = 'application/json'body = complexjson.dumps(json)if not isinstance(body, bytes):body = body.encode('utf-8')is_stream = all([hasattr(data, '__iter__'),not isinstance(data, (basestring, list, tuple, dict))])try:length = super_len(data)except (TypeError, AttributeError, UnsupportedOperation):length = Noneif is_stream:body = dataif files:raise NotImplementedError('Streamed bodies and files are mutually exclusive.')if length:self.headers['Content-Length'] = builtin_str(length)else:self.headers['Transfer-Encoding'] = 'chunked'else:# Multi-part file uploads.if files:(body, content_type) = self._encode_files(files, data)else:if data:body = self._encode_params(data)if isinstance(data, basestring) or hasattr(data, 'read'):content_type = Noneelse:content_type = 'application/x-www-form-urlencoded'self.prepare_content_length(body)# Add content-type if it wasn't explicitly provided.if content_type and ('content-type' not in self.headers):self.headers['Content-Type'] = content_typeself.body = bodydef prepare_content_length(self, body):if hasattr(body, 'seek') and hasattr(body, 'tell'):curr_pos = body.tell()body.seek(0, 2)end_pos = body.tell()self.headers['Content-Length'] = builtin_str(max(0, end_pos - curr_pos))body.seek(curr_pos, 0)elif body is not None:l = super_len(body)if l:self.headers['Content-Length'] = builtin_str(l)elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None):self.headers['Content-Length'] = '0'def prepare_auth(self, auth, url=''):"""Prepares the given HTTP auth data."""# If no Auth is explicitly provided, extract it from the URL first.if auth is None:url_auth = get_auth_from_url(self.url)auth = url_auth if any(url_auth) else Noneif auth:if isinstance(auth, tuple) and len(auth) == 2:# special-case basic HTTP authauth = HTTPBasicAuth(*auth)# Allow auth to make its changes.r = auth(self)# Update self to reflect the auth changes.self.__dict__.update(r.__dict__)# Recompute Content-Lengthself.prepare_content_length(self.body)def prepare_cookies(self, cookies):"""Prepares the given HTTP cookie data.This function eventually generates a ``Cookie`` header from thegiven cookies using cookielib. Due to cookielib's design, the headerwill not be regenerated if it already exists, meaning this functioncan only be called once for the life of the:class:`PreparedRequest <PreparedRequest>` object. Any subsequent callsto ``prepare_cookies`` will have no actual effect, unless the "Cookie"header is removed beforehand."""if isinstance(cookies, cookielib.CookieJar):self._cookies = cookieselse:self._cookies = cookiejar_from_dict(cookies)cookie_header = get_cookie_header(self._cookies, self)if cookie_header is not None:self.headers['Cookie'] = cookie_headerdef prepare_hooks(self, hooks):"""Prepares the given hooks."""# hooks can be passed as None to the prepare method and to this# method. To prevent iterating over None, simply use an empty list# if hooks is False-yhooks = hooks or []for event in hooks:self.register_hook(event, hooks[event])class Response(object):"""The :class:`Response <Response>` object, which contains aserver's response to an HTTP request."""__attrs__ = ['_content', 'status_code', 'headers', 'url', 'history','encoding', 'reason', 'cookies', 'elapsed', 'request']def __init__(self):super(Response, self).__init__()self._content = Falseself._content_consumed = False#: Integer Code of responded HTTP Status, e.g. 404 or 200.self.status_code = None#: Case-insensitive Dictionary of Response Headers.#: For example, ``headers['content-encoding']`` will return the#: value of a ``'Content-Encoding'`` response header.self.headers = CaseInsensitiveDict()#: File-like object representation of response (for advanced usage).#: Use of ``raw`` requires that ``stream=True`` be set on the request.# This requirement does not apply for use internally to Requests.self.raw = None#: Final URL location of Response.self.url = None#: Encoding to decode with when accessing r.text.self.encoding = None#: A list of :class:`Response <Response>` objects from#: the history of the Request. Any redirect responses will end#: up here. The list is sorted from the oldest to the most recent request.self.history = []#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".self.reason = None#: A CookieJar of Cookies the server sent back.self.cookies = cookiejar_from_dict({})#: The amount of time elapsed between sending the request#: and the arrival of the response (as a timedelta).#: This property specifically measures the time taken between sending#: the first byte of the request and finishing parsing the headers. It#: is therefore unaffected by consuming the response content or the#: value of the ``stream`` keyword argument.self.elapsed = datetime.timedelta(0)#: The :class:`PreparedRequest <PreparedRequest>` object to which this#: is a response.self.request = Nonedef __getstate__(self):# Consume everything; accessing the content attribute makes# sure the content has been fully read.if not self._content_consumed:self.contentreturn dict((attr, getattr(self, attr, None))for attr in self.__attrs__)def __setstate__(self, state):for name, value in state.items():setattr(self, name, value)# pickled objects do not have .rawsetattr(self, '_content_consumed', True)setattr(self, 'raw', None)def __repr__(self):return '<Response [%s]>' % (self.status_code)def __bool__(self):"""Returns true if :attr:`status_code` is 'OK'."""return self.okdef __nonzero__(self):"""Returns true if :attr:`status_code` is 'OK'."""return self.okdef __iter__(self):"""Allows you to use a response as an iterator."""return self.iter_content(128)@propertydef ok(self):try:self.raise_for_status()except HTTPError:return Falsereturn True@propertydef is_redirect(self):"""True if this Response is a well-formed HTTP redirect that could havebeen processed automatically (by :meth:`Session.resolve_redirects`)."""return ('location' in self.headers and self.status_code in REDIRECT_STATI)@propertydef is_permanent_redirect(self):"""True if this Response one of the permanent versions of redirect"""return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))@propertydef apparent_encoding(self):"""The apparent encoding, provided by the chardet library"""return chardet.detect(self.content)['encoding']def iter_content(self, chunk_size=1, decode_unicode=False):"""Iterates over the response data. When stream=True is set on therequest, this avoids reading the content at once into memory forlarge responses. The chunk size is the number of bytes it shouldread into memory. This is not necessarily the length of each itemreturned as decoding can take place.chunk_size must be of type int or None. A value of None willfunction differently depending on the value of `stream`.stream=True will read data as it arrives in whatever size thechunks are received. If stream=False, data is returned asa single chunk.If decode_unicode is True, content will be decoded using the bestavailable encoding based on the response."""def generate():# Special case for urllib3.if hasattr(self.raw, 'stream'):try:for chunk in self.raw.stream(chunk_size, decode_content=True):yield chunkexcept ProtocolError as e:raise ChunkedEncodingError(e)except DecodeError as e:raise ContentDecodingError(e)except ReadTimeoutError as e:raise ConnectionError(e)else:# Standard file-like object.while True:chunk = self.raw.read(chunk_size)if not chunk:breakyield chunkself._content_consumed = Trueif self._content_consumed and isinstance(self._content, bool):raise StreamConsumedError()elif chunk_size is not None and not isinstance(chunk_size, int):raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size))# simulate reading small chunks of the contentreused_chunks = iter_slices(self._content, chunk_size)stream_chunks = generate()chunks = reused_chunks if self._content_consumed else stream_chunksif decode_unicode:chunks = stream_decode_response_unicode(chunks, self)return chunksdef iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):"""Iterates over the response data, one line at a time. Whenstream=True is set on the request, this avoids reading thecontent at once into memory for large responses... note:: This method is not reentrant safe."""pending = Nonefor chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):if pending is not None:chunk = pending + chunkif delimiter:lines = chunk.split(delimiter)else:lines = chunk.splitlines()if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:pending = lines.pop()else:pending = Nonefor line in lines:yield lineif pending is not None:yield pending@propertydef content(self):"""Content of the response, in bytes."""if self._content is False:# Read the contents.try:if self._content_consumed:raise RuntimeError('The content for this response was already consumed')if self.status_code == 0:self._content = Noneelse:self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()except AttributeError:self._content = Noneself._content_consumed = True# don't need to release the connection; that's been handled by urllib3# since we exhausted the data.return self._content@propertydef text(self):"""Content of the response, in unicode.If Response.encoding is None, encoding will be guessed using``chardet``.The encoding of the response content is determined based solely on HTTPheaders, following RFC 2616 to the letter. If you can take advantage ofnon-HTTP knowledge to make a better guess at the encoding, you shouldset ``r.encoding`` appropriately before accessing this property."""# Try charset from content-typecontent = Noneencoding = self.encodingif not self.content:return str('')# Fallback to auto-detected encoding.if self.encoding is None:encoding = self.apparent_encoding# Decode unicode from given encoding.try:content = str(self.content, encoding, errors='replace')except (LookupError, TypeError):# A LookupError is raised if the encoding was not found which could# indicate a misspelling or similar mistake.## A TypeError can be raised if encoding is None## So we try blindly encoding.content = str(self.content, errors='replace')return contentdef json(self, **kwargs):"""Returns the json-encoded content of a response, if any.:param \*\*kwargs: Optional arguments that ``json.loads`` takes."""if not self.encoding and self.content and len(self.content) > 3:# No encoding set. JSON RFC 4627 section 3 states we should expect# UTF-8, -16 or -32. Detect which one to use; If the detection or# decoding fails, fall back to `self.text` (using chardet to make# a best guess).encoding = guess_json_utf(self.content)if encoding is not None:try:return complexjson.loads(self.content.decode(encoding), **kwargs)except UnicodeDecodeError:# Wrong UTF codec detected; usually because it's not UTF-8# but some other 8-bit codec. This is an RFC violation,# and the server didn't bother to tell us what codec *was*# used.passreturn complexjson.loads(self.text, **kwargs)@propertydef links(self):"""Returns the parsed header links of the response, if any."""header = self.headers.get('link')# l = MultiDict()l = {}if header:links = parse_header_links(header)for link in links:key = link.get('rel') or link.get('url')l[key] = linkreturn ldef raise_for_status(self):"""Raises stored :class:`HTTPError`, if one occurred."""http_error_msg = ''if isinstance(self.reason, bytes):reason = self.reason.decode('utf-8', 'ignore')else:reason = self.reasonif 400 <= self.status_code < 500:http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)elif 500 <= self.status_code < 600:http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)if http_error_msg:raise HTTPError(http_error_msg, response=self)def close(self):"""Releases the connection back to the pool. Once this method has beencalled the underlying ``raw`` object must not be accessed again.*Note: Should not normally need to be called explicitly.*"""if not self._content_consumed:self.raw.close()return self.raw.release_conn()
# -*- coding: utf-8 -*-"""requests.hooks~~~~~~~~~~~~~~This module provides the capabilities for the Requests hooks system.Available hooks:``response``:The response generated from a Request."""HOOKS = ['response']def default_hooks():return dict((event, []) for event in HOOKS)# TODO: response is the only onedef dispatch_hook(key, hooks, hook_data, **kwargs):"""Dispatches a hook dictionary on a given piece of data."""hooks = hooks or dict()hooks = hooks.get(key)if hooks:if hasattr(hooks, '__call__'):hooks = [hooks]for hook in hooks:_hook_data = hook(hook_data, **kwargs)if _hook_data is not None:hook_data = _hook_datareturn hook_data
# -*- coding: utf-8 -*-"""requests.exceptions~~~~~~~~~~~~~~~~~~~This module contains the set of Requests' exceptions."""from .packages.urllib3.exceptions import HTTPError as BaseHTTPErrorclass RequestException(IOError):"""There was an ambiguous exception that occurred while handling yourrequest."""def __init__(self, *args, **kwargs):"""Initialize RequestException with `request` and `response` objects."""response = kwargs.pop('response', None)self.response = responseself.request = kwargs.pop('request', None)if (response is not None and not self.request andhasattr(response, 'request')):self.request = self.response.requestsuper(RequestException, self).__init__(*args, **kwargs)class HTTPError(RequestException):"""An HTTP error occurred."""class ConnectionError(RequestException):"""A Connection error occurred."""class ProxyError(ConnectionError):"""A proxy error occurred."""class SSLError(ConnectionError):"""An SSL error occurred."""class Timeout(RequestException):"""The request timed out.Catching this error will catch both:exc:`~requests.exceptions.ConnectTimeout` and:exc:`~requests.exceptions.ReadTimeout` errors."""class ConnectTimeout(ConnectionError, Timeout):"""The request timed out while trying to connect to the remote server.Requests that produced this error are safe to retry."""class ReadTimeout(Timeout):"""The server did not send any data in the allotted amount of time."""class URLRequired(RequestException):"""A valid URL is required to make a request."""class TooManyRedirects(RequestException):"""Too many redirects."""class MissingSchema(RequestException, ValueError):"""The URL schema (e.g. http or https) is missing."""class InvalidSchema(RequestException, ValueError):"""See defaults.py for valid schemas."""class InvalidURL(RequestException, ValueError):"""The URL provided was somehow invalid."""class InvalidHeader(RequestException, ValueError):"""The header value provided was somehow invalid."""class ChunkedEncodingError(RequestException):"""The server declared chunked encoding but sent an invalid chunk."""class ContentDecodingError(RequestException, BaseHTTPError):"""Failed to decode response content"""class StreamConsumedError(RequestException, TypeError):"""The content for this response was already consumed"""class RetryError(RequestException):"""Custom retries logic failed"""# Warningsclass RequestsWarning(Warning):"""Base warning for Requests."""passclass FileModeWarning(RequestsWarning, DeprecationWarning):"""A file was opened in text mode, but Requests determined its binary length."""pass
# -*- coding: utf-8 -*-"""requests.cookies~~~~~~~~~~~~~~~~Compatibility code to be able to use `cookielib.CookieJar` with requests.requests.utils imports from here, so be careful with imports."""import copyimport timeimport calendarimport collectionsfrom .compat import cookielib, urlparse, urlunparse, Morseltry:import threading# grr, pyflakes: this fixes "redefinition of unused 'threading'"threadingexcept ImportError:import dummy_threading as threadingclass MockRequest(object):"""Wraps a `requests.Request` to mimic a `urllib2.Request`.The code in `cookielib.CookieJar` expects this interface in order to correctlymanage cookie policies, i.e., determine whether a cookie can be set, given thedomains of the request and the cookie.The original request object is read-only. The client is responsible for collectingthe new headers via `get_new_headers()` and interpreting them appropriately. Youprobably want `get_cookie_header`, defined below."""def __init__(self, request):self._r = requestself._new_headers = {}self.type = urlparse(self._r.url).schemedef get_type(self):return self.typedef get_host(self):return urlparse(self._r.url).netlocdef get_origin_req_host(self):return self.get_host()def get_full_url(self):# Only return the response's URL if the user hadn't set the Host# headerif not self._r.headers.get('Host'):return self._r.url# If they did set it, retrieve it and reconstruct the expected domainhost = self._r.headers['Host']parsed = urlparse(self._r.url)# Reconstruct the URL as we expect itreturn urlunparse([parsed.scheme, host, parsed.path, parsed.params, parsed.query,parsed.fragment])def is_unverifiable(self):return Truedef has_header(self, name):return name in self._r.headers or name in self._new_headersdef get_header(self, name, default=None):return self._r.headers.get(name, self._new_headers.get(name, default))def add_header(self, key, val):"""cookielib has no legitimate use for this method; add it back if you find one."""raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")def add_unredirected_header(self, name, value):self._new_headers[name] = valuedef get_new_headers(self):return self._new_headers@propertydef unverifiable(self):return self.is_unverifiable()@propertydef origin_req_host(self):return self.get_origin_req_host()@propertydef host(self):return self.get_host()class MockResponse(object):"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`....what? Basically, expose the parsed HTTP headers from the server responsethe way `cookielib` expects to see them."""def __init__(self, headers):"""Make a MockResponse for `cookielib` to read.:param headers: a httplib.HTTPMessage or analogous carrying the headers"""self._headers = headersdef info(self):return self._headersdef getheaders(self, name):self._headers.getheaders(name)def extract_cookies_to_jar(jar, request, response):"""Extract the cookies from the response into a CookieJar.:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar):param request: our own requests.Request object:param response: urllib3.HTTPResponse object"""if not (hasattr(response, '_original_response') andresponse._original_response):return# the _original_response field is the wrapped httplib.HTTPResponse object,req = MockRequest(request)# pull out the HTTPMessage with the headers and put it in the mock:res = MockResponse(response._original_response.msg)jar.extract_cookies(res, req)def get_cookie_header(jar, request):"""Produce an appropriate Cookie header string to be sent with `request`, or None.:rtype: str"""r = MockRequest(request)jar.add_cookie_header(r)return r.get_new_headers().get('Cookie')def remove_cookie_by_name(cookiejar, name, domain=None, path=None):"""Unsets a cookie by name, by default over all domains and paths.Wraps CookieJar.clear(), is O(n)."""clearables = []for cookie in cookiejar:if cookie.name != name:continueif domain is not None and domain != cookie.domain:continueif path is not None and path != cookie.path:continueclearables.append((cookie.domain, cookie.path, cookie.name))for domain, path, name in clearables:cookiejar.clear(domain, path, name)class CookieConflictError(RuntimeError):"""There are two cookies that meet the criteria specified in the cookie jar.Use .get and .set and include domain and path args in order to be more specific."""class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):"""Compatibility class; is a cookielib.CookieJar, but exposes a dictinterface.This is the CookieJar we create by default for requests and sessions thatdon't specify one, since some clients may expect response.cookies andsession.cookies to support dict operations.Requests does not use the dict interface internally; it's just forcompatibility with external client code. All requests code should workout of the box with externally provided instances of ``CookieJar``, e.g.``LWPCookieJar`` and ``FileCookieJar``.Unlike a regular CookieJar, this class is pickleable... warning:: dictionary operations that are normally O(1) may be O(n)."""def get(self, name, default=None, domain=None, path=None):"""Dict-like get() that also supports optional domain and path args inorder to resolve naming collisions from using one cookie jar overmultiple domains... warning:: operation is O(n), not O(1)."""try:return self._find_no_duplicates(name, domain, path)except KeyError:return defaultdef set(self, name, value, **kwargs):"""Dict-like set() that also supports optional domain and path args inorder to resolve naming collisions from using one cookie jar overmultiple domains."""# support client code that unsets cookies by assignment of a None value:if value is None:remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))returnif isinstance(value, Morsel):c = morsel_to_cookie(value)else:c = create_cookie(name, value, **kwargs)self.set_cookie(c)return cdef iterkeys(self):"""Dict-like iterkeys() that returns an iterator of names of cookiesfrom the jar... seealso:: itervalues() and iteritems()."""for cookie in iter(self):yield cookie.namedef keys(self):"""Dict-like keys() that returns a list of names of cookies from thejar... seealso:: values() and items()."""return list(self.iterkeys())def itervalues(self):"""Dict-like itervalues() that returns an iterator of values of cookiesfrom the jar... seealso:: iterkeys() and iteritems()."""for cookie in iter(self):yield cookie.valuedef values(self):"""Dict-like values() that returns a list of values of cookies from thejar... seealso:: keys() and items()."""return list(self.itervalues())def iteritems(self):"""Dict-like iteritems() that returns an iterator of name-value tuplesfrom the jar... seealso:: iterkeys() and itervalues()."""for cookie in iter(self):yield cookie.name, cookie.valuedef items(self):"""Dict-like items() that returns a list of name-value tuples from thejar. Allows client-code to call ``dict(RequestsCookieJar)`` and get avanilla python dict of key value pairs... seealso:: keys() and values()."""return list(self.iteritems())def list_domains(self):"""Utility method to list all the domains in the jar."""domains = []for cookie in iter(self):if cookie.domain not in domains:domains.append(cookie.domain)return domainsdef list_paths(self):"""Utility method to list all the paths in the jar."""paths = []for cookie in iter(self):if cookie.path not in paths:paths.append(cookie.path)return pathsdef multiple_domains(self):"""Returns True if there are multiple domains in the jar.Returns False otherwise.:rtype: bool"""domains = []for cookie in iter(self):if cookie.domain is not None and cookie.domain in domains:return Truedomains.append(cookie.domain)return False # there is only one domain in jardef get_dict(self, domain=None, path=None):"""Takes as an argument an optional domain and path and returns a plainold Python dict of name-value pairs of cookies that meet therequirements.:rtype: dict"""dictionary = {}for cookie in iter(self):if (domain is None or cookie.domain == domain) and (path is Noneor cookie.path == path):dictionary[cookie.name] = cookie.valuereturn dictionarydef __contains__(self, name):try:return super(RequestsCookieJar, self).__contains__(name)except CookieConflictError:return Truedef __getitem__(self, name):"""Dict-like __getitem__() for compatibility with client code. Throwsexception if there are more than one cookie with name. In that case,use the more explicit get() method instead... warning:: operation is O(n), not O(1)."""return self._find_no_duplicates(name)def __setitem__(self, name, value):"""Dict-like __setitem__ for compatibility with client code. Throwsexception if there is already a cookie of that name in the jar. In thatcase, use the more explicit set() method instead."""self.set(name, value)def __delitem__(self, name):"""Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s``remove_cookie_by_name()``."""remove_cookie_by_name(self, name)def set_cookie(self, cookie, *args, **kwargs):if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):cookie.value = cookie.value.replace('\\"', '')return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)def update(self, other):"""Updates this jar with cookies from another CookieJar or dict-like"""if isinstance(other, cookielib.CookieJar):for cookie in other:self.set_cookie(copy.copy(cookie))else:super(RequestsCookieJar, self).update(other)def _find(self, name, domain=None, path=None):"""Requests uses this method internally to get cookie values.If there are conflicting cookies, _find arbitrarily chooses one.See _find_no_duplicates if you want an exception thrown if there areconflicting cookies.:param name: a string containing name of cookie:param domain: (optional) string containing domain of cookie:param path: (optional) string containing path of cookie:return: cookie.value"""for cookie in iter(self):if cookie.name == name:if domain is None or cookie.domain == domain:if path is None or cookie.path == path:return cookie.valueraise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))def _find_no_duplicates(self, name, domain=None, path=None):"""Both ``__get_item__`` and ``get`` call this function: it's neverused elsewhere in Requests.:param name: a string containing name of cookie:param domain: (optional) string containing domain of cookie:param path: (optional) string containing path of cookie:raises KeyError: if cookie is not found:raises CookieConflictError: if there are multiple cookiesthat match name and optionally domain and path:return: cookie.value"""toReturn = Nonefor cookie in iter(self):if cookie.name == name:if domain is None or cookie.domain == domain:if path is None or cookie.path == path:if toReturn is not None: # if there are multiple cookies that meet passed in criteriaraise CookieConflictError('There are multiple cookies with name, %r' % (name))toReturn = cookie.value # we will eventually return this as long as no cookie conflictif toReturn:return toReturnraise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))def __getstate__(self):"""Unlike a normal CookieJar, this class is pickleable."""state = self.__dict__.copy()# remove the unpickleable RLock objectstate.pop('_cookies_lock')return statedef __setstate__(self, state):"""Unlike a normal CookieJar, this class is pickleable."""self.__dict__.update(state)if '_cookies_lock' not in self.__dict__:self._cookies_lock = threading.RLock()def copy(self):"""Return a copy of this RequestsCookieJar."""new_cj = RequestsCookieJar()new_cj.update(self)return new_cjdef _copy_cookie_jar(jar):if jar is None:return Noneif hasattr(jar, 'copy'):# We're dealing with an instance of RequestsCookieJarreturn jar.copy()# We're dealing with a generic CookieJar instancenew_jar = copy.copy(jar)new_jar.clear()for cookie in jar:new_jar.set_cookie(copy.copy(cookie))return new_jardef create_cookie(name, value, **kwargs):"""Make a cookie from underspecified parameters.By default, the pair of `name` and `value` will be set for the domain ''and sent on every request (this is sometimes called a "supercookie")."""result = dict(version=0,name=name,value=value,port=None,domain='',path='/',secure=False,expires=None,discard=True,comment=None,comment_url=None,rest={'HttpOnly': None},rfc2109=False,)badargs = set(kwargs) - set(result)if badargs:err = 'create_cookie() got unexpected keyword arguments: %s'raise TypeError(err % list(badargs))result.update(kwargs)result['port_specified'] = bool(result['port'])result['domain_specified'] = bool(result['domain'])result['domain_initial_dot'] = result['domain'].startswith('.')result['path_specified'] = bool(result['path'])return cookielib.Cookie(**result)def morsel_to_cookie(morsel):"""Convert a Morsel object into a Cookie containing the one k/v pair."""expires = Noneif morsel['max-age']:try:expires = int(time.time() + int(morsel['max-age']))except ValueError:raise TypeError('max-age: %s must be integer' % morsel['max-age'])elif morsel['expires']:time_template = '%a, %d-%b-%Y %H:%M:%S GMT'expires = calendar.timegm(time.strptime(morsel['expires'], time_template))return create_cookie(comment=morsel['comment'],comment_url=bool(morsel['comment']),discard=False,domain=morsel['domain'],expires=expires,name=morsel.key,path=morsel['path'],port=None,rest={'HttpOnly': morsel['httponly']},rfc2109=False,secure=bool(morsel['secure']),value=morsel.value,version=morsel['version'] or 0,)def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):"""Returns a CookieJar from a key/value dictionary.:param cookie_dict: Dict of key/values to insert into CookieJar.:param cookiejar: (optional) A cookiejar to add the cookies to.:param overwrite: (optional) If False, will not replace cookiesalready in the jar with new ones."""if cookiejar is None:cookiejar = RequestsCookieJar()if cookie_dict is not None:names_from_jar = [cookie.name for cookie in cookiejar]for name in cookie_dict:if overwrite or (name not in names_from_jar):cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))return cookiejardef merge_cookies(cookiejar, cookies):"""Add cookies to cookiejar and returns a merged CookieJar.:param cookiejar: CookieJar object to add the cookies to.:param cookies: Dictionary or CookieJar object to be added."""if not isinstance(cookiejar, cookielib.CookieJar):raise ValueError('You can only merge into CookieJar')if isinstance(cookies, dict):cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False)elif isinstance(cookies, cookielib.CookieJar):try:cookiejar.update(cookies)except AttributeError:for cookie_in_jar in cookies:cookiejar.set_cookie(cookie_in_jar)return cookiejar
# -*- coding: utf-8 -*-"""requests.compat~~~~~~~~~~~~~~~This module handles import compatibility issues between Python 2 andPython 3."""from .packages import chardetimport sys# -------# Pythons# -------# Syntax sugar._ver = sys.version_info#: Python 2.x?is_py2 = (_ver[0] == 2)#: Python 3.x?is_py3 = (_ver[0] == 3)# Note: We've patched out simplejson support in pip because it prevents# upgrading simplejson on Windows.# try:# import simplejson as json# except (ImportError, SyntaxError):# # simplejson does not support Python 3.2, it throws a SyntaxError# # because of u'...' Unicode literals.import json# ---------# Specifics# ---------if is_py2:from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypassfrom urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefragfrom urllib2 import parse_http_listimport cookielibfrom Cookie import Morselfrom StringIO import StringIOfrom .packages.urllib3.packages.ordered_dict import OrderedDictbuiltin_str = strbytes = strstr = unicodebasestring = basestringnumeric_types = (int, long, float)elif is_py3:from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefragfrom urllib.request import parse_http_list, getproxies, proxy_bypassfrom http import cookiejar as cookielibfrom http.cookies import Morselfrom io import StringIOfrom collections import OrderedDictbuiltin_str = strstr = strbytes = bytesbasestring = (str, bytes)numeric_types = (int, float)
#!/usr/bin/env python# -*- coding: utf-8 -*-"""requests.certs~~~~~~~~~~~~~~This module returns the preferred default CA certificate bundle.If you are packaging Requests, e.g., for a Linux distribution or a managedenvironment, you can change the definition of where() to return a separatelypackaged CA bundle."""import os.pathtry:from certifi import whereexcept ImportError:def where():"""Return the preferred certificate bundle."""# vendored bundle inside Requestsreturn os.path.join(os.path.dirname(__file__), 'cacert.pem')if __name__ == '__main__':print(where())
# Issuer: O=Equifax OU=Equifax Secure Certificate Authority# Subject: O=Equifax OU=Equifax Secure Certificate Authority# Label: "Equifax Secure CA"# Serial: 903804111# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78-----BEGIN CERTIFICATE-----MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJVUzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVxdWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6fBeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+AcJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kCAwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgwODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gjIBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQFMAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUAA4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4-----END CERTIFICATE-----# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA# Label: "GlobalSign Root CA"# Serial: 4835703278459707669005204# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99-----BEGIN CERTIFICATE-----MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jvb3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAwMDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxTaWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZjc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavpxy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdGsnUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJU26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N89iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0BAQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOzyj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymPAbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUadDKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbMEHMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==-----END CERTIFICATE-----# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2# Label: "GlobalSign Root CA - R2"# Serial: 4835703278459682885658125# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e-----BEGIN CERTIFICATE-----MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPLv4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklqtTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzdC9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pazq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCBmTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IHV2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5nbG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4GsJ0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavSot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxdAfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==-----END CERTIFICATE-----# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only# Label: "Verisign Class 3 Public Primary Certification Authority - G3"# Serial: 206684696279472310254277870180966723415# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44-----BEGIN CERTIFICATE-----MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQswCQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8bN3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2tKmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGukxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBmCC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJXwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWuimi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMeDGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565pF4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGtTxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==-----END CERTIFICATE-----# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only# Label: "Verisign Class 4 Public Primary Certification Authority - G3"# Serial: 314531972711909413743075096039378935511# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06-----BEGIN CERTIFICATE-----MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQswCQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0GbdU6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLmNxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XYufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mqg6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKmfjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==-----END CERTIFICATE-----# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited# Label: "Entrust.net Premium 2048 Secure Server CA"# Serial: 946069240# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77-----BEGIN CERTIFICATE-----MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChMLRW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBpbmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEGA1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgpMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQqK0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQesYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuXMlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVTXTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADubj1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExoU8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6YfzX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5bu/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/ErfF6adulZkMV8gzURZVE=-----END CERTIFICATE-----# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust# Label: "Baltimore CyberTrust Root"# Serial: 33554617# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb-----BEGIN CERTIFICATE-----MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJRTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYDVQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoXDTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9yZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVyVHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKrmD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjrIZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeKmpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSuXmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZydc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/yejl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT929hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3WgxjkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhzksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLSR9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp-----END CERTIFICATE-----# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network# Label: "AddTrust Low-Value Services Root"# Serial: 1# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7-----BEGIN CERTIFICATE-----MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMwMTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ulCDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6ntGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyldI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJchPXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC+Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0OBBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MYeDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJlpz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOAWiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk=-----END CERTIFICATE-----# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network# Label: "AddTrust External Root"# Serial: 1# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2-----BEGIN CERTIFICATE-----MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFsIFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5hbCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvtH7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzXmk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LXa0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzNE0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYDVR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsxIjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxHYINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw56wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvCNr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEXc4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5amnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=-----END CERTIFICATE-----# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network# Label: "AddTrust Public Services Root"# Serial: 1# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27-----BEGIN CERTIFICATE-----MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAxMDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNVBAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nXGCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnPdzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQWBBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRUcnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/AoGEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQmXiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY=-----END CERTIFICATE-----# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network# Label: "AddTrust Qualified Certificates Root"# Serial: 1# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16-----BEGIN CERTIFICATE-----MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAhBgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwqxBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8UWfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c10cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0GA1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6FrpGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlmaWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTvhsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlmhpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6XdgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9YiQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5noxqE=-----END CERTIFICATE-----# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.# Label: "Entrust Root Certification Authority"# Serial: 1164660820# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c-----BEGIN CERTIFICATE-----MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMWKGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIwNTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkwNwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSByZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNVBAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFoNu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf44LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGIrb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOBsDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAigA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRokORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uEvW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUAA4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9tO1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6ZuaAGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m0vdXcDazv/wor3ElhVsT/h5/WrQ8-----END CERTIFICATE-----# Issuer: O=RSA Security Inc OU=RSA Security 2048 V3# Subject: O=RSA Security Inc OU=RSA Security 2048 V3# Label: "RSA Security 2048 v3"# Serial: 13297492616345471454730593562152402946# MD5 Fingerprint: 77:0d:19:b1:21:fd:00:42:9c:3e:0c:a5:dd:0b:02:8e# SHA1 Fingerprint: 25:01:90:19:cf:fb:d9:99:1c:b7:68:25:74:8d:94:5f:30:93:95:42# SHA256 Fingerprint: af:8b:67:62:a1:e5:28:22:81:61:a9:5d:5c:55:9e:e2:66:27:8f:75:d7:9e:83:01:89:a5:03:50:6a:bd:6b:4c-----BEGIN CERTIFICATE-----MIIDYTCCAkmgAwIBAgIQCgEBAQAAAnwAAAAKAAAAAjANBgkqhkiG9w0BAQUFADA6MRkwFwYDVQQKExBSU0EgU2VjdXJpdHkgSW5jMR0wGwYDVQQLExRSU0EgU2VjdXJpdHkgMjA0OCBWMzAeFw0wMTAyMjIyMDM5MjNaFw0yNjAyMjIyMDM5MjNaMDoxGTAXBgNVBAoTEFJTQSBTZWN1cml0eSBJbmMxHTAbBgNVBAsTFFJTQSBTZWN1cml0eSAyMDQ4IFYzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt49VcdKA3XtpeafwGFAyPGJn9gqVB93mG/Oe2dJBVGutn3y+Gc37RqtBaB4Y6lXIL5F4iSj7Jylg/9+PjDvJSZu1pJTOAeo+tWN7fyb9Gd3AIb2E0S1PRsNO3Ng3OTsor8udGuorryGlwSMiuLgbWhOHV4PR8CDn6E8jQrAApX2J6elhc5SYcSa8LWrg903w8bYqODGBDSnhAMFRD0xS+ARaqn1y07iHKrtjEAMqs6FPDVpeRrc9DvV07Jmf+T0kgYim3WBU6JU2PcYJk5qjEoAAVZkZR73QpXzDuvsf9/UP+Ky5tfQ3mBMY3oVbtwyCO4dvlTlYMNpuAWgXIszACwIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQHw1EwpKrpRa41JPr/JCwz0LGdjDAdBgNVHQ4EFgQUB8NRMKSq6UWuNST6/yQsM9CxnYwwDQYJKoZIhvcNAQEFBQADggEBAF8+hnZuuDU8TjYcHnmYv/3VEhF5Ug7uMYm83X/50cYVIeiKAVQNOvtUudZj1LGqlk2iQk3UUx+LEN5/Zb5gEydxiKRz44Rj0aRV4VCT5hsOedBnvEbIvz8XDZXmxpBp3ue0L96VfdASPz0+f00/FGj1EVDVwfSQpQgdMWD/YIwjVAqv/qFuxdF6Kmh4zx6CCiC0H63lhbJqaHVOrSU3lIW+vaHU6rcMSzyd6BIA8F+sDeGscGNz9395nzIlQnQFgCi/vcEkllgVsRch6YlL2weIZ/QVrXA+L02FO8K32/6YaCOJ4XQP3vTFhGMpG8zLB8kApKnXwiJPZ9d37CAFYd4=-----END CERTIFICATE-----# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.# Label: "GeoTrust Global CA"# Serial: 144470# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a-----BEGIN CERTIFICATE-----MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9iYWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3QgR2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD99BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdqfnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDviS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoWMPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTAephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1luMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKInZ57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfStQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcFPseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Unhw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==-----END CERTIFICATE-----# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc.# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc.# Label: "GeoTrust Global CA 2"# Serial: 1# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85-----BEGIN CERTIFICATE-----MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1APRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hLTytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUapEBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6tdEPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywNA0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIFI8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz4iIprn2DQKi6bA==-----END CERTIFICATE-----# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.# Label: "GeoTrust Universal CA"# Serial: 1# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12-----BEGIN CERTIFICATE-----MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVyc2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYVVaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTTQjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFhF7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2vc7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/wmZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xdVHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCXteGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZf9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfReBi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQYMBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRcaanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fXIwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzynANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0zuzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqNPnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKjaQI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKWkoRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQtDF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/SfuvmbJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=-----END CERTIFICATE-----# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.# Label: "GeoTrust Universal CA 2"# Serial: 1# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b-----BEGIN CERTIFICATE-----MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVyc2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUGFF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdqXbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxLse4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwbKNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8FdIgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRthAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgocQIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNVHSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+zdXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQL1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgrFg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSoag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaYT1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbzGDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJVOCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwXQMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS-----END CERTIFICATE-----# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association# Label: "Visa eCommerce Root"# Serial: 25952180776285836048024890241505565794# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22-----BEGIN CERTIFICATE-----MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBrMQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1lcmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4ElpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdVZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0tvz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaLdXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUFAAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcRzCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt398znM/jra6O1I7mT1GvFpLgXPYHDw==-----END CERTIFICATE-----# Issuer: CN=Certum CA O=Unizeto Sp. z o.o.# Subject: CN=Certum CA O=Unizeto Sp. z o.o.# Label: "Certum Root CA"# Serial: 65568# MD5 Fingerprint: 2c:8f:9f:66:1d:18:90:b1:47:26:9d:8e:86:82:8c:a9# SHA1 Fingerprint: 62:52:dc:40:f7:11:43:a2:2f:de:9e:f7:34:8e:06:42:51:b1:81:18# SHA256 Fingerprint: d8:e0:fe:bc:1d:b2:e3:8d:00:94:0f:37:d2:7d:41:34:4d:99:3e:73:4b:99:d5:65:6d:97:78:d4:d8:14:36:24-----BEGIN CERTIFICATE-----MIIDDDCCAfSgAwIBAgIDAQAgMA0GCSqGSIb3DQEBBQUAMD4xCzAJBgNVBAYTAlBMMRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBDQTAeFw0wMjA2MTExMDQ2MzlaFw0yNzA2MTExMDQ2MzlaMD4xCzAJBgNVBAYTAlBMMRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6xwS7TT3zNJc4YPk/EjG+AanPIW1H4m9LcuwBcsaD8dQPugfCI7iNS6eYVM42sLQnFdvkrOYCJ5JdLkKWoePhzQ3ukYbDYWMzhbGZ+nPMJXlVjhNWo7/OxLjBos8Q82KxujZlakE403Daaj4GIULdtlkIJ89eVgw1BS7Bqa/j8D35in2fE7SZfECYPCE/wpFcozo+47UX2bu4lXapuOb7kky/ZR6By6/qmW6/KUz/iDsaWVhFu9+lmqSbYf5VT7QqFiLpPKaVCjF62/IUgAKpoC6EahQGcxEZjgoi2IrHu/qpGWX7PNSzVttpd90gzFFS269lvzs2I1qsb2pY7HVkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAuI3O7+cUus/usESSbLQ5PqKEbq24IXfS1HeCh+YgQYHu4vgRt2PRFze+GXYkHAQaTOs9qmdvLdTN/mUxcMUbpgIKumB7bVjCmkn+YzILa+M6wKyrO7Do0wlRjBCDxjTgxSvgGrZgFCdsMneMvLJymM/NzD+5yCRCFNZX/OYmQ6kd5YCQzgNUKD73P9P4Te1qCjqTE5s7FCMTY5w/0YcneeVMUeMBrYVdGjux1XMQpNPyvG5k9VpWkKjHDkx0Dy5xO/fIR/RpbxXyEV6DHpx8Uq79AtoSqFlnGNu8cN2bsWntgM6JQEhqDjXKKWYVIZQs6GAqm4VKQPNriiTsBhYscw==-----END CERTIFICATE-----# Issuer: CN=AAA Certificate Services O=Comodo CA Limited# Subject: CN=AAA Certificate Services O=Comodo CA Limited# Label: "Comodo AAA Services root"# Serial: 1# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4-----BEGIN CERTIFICATE-----MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEbMBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRowGAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMMGEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQuaBtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZRrOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cmez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQUoBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20vQUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29tb2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUFAAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1QGE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLzRt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsil2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==-----END CERTIFICATE-----# Issuer: CN=Secure Certificate Services O=Comodo CA Limited# Subject: CN=Secure Certificate Services O=Comodo CA Limited# Label: "Comodo Secure Services root"# Serial: 1# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8-----BEGIN CERTIFICATE-----MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEbMBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRowGAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRpZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowfjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNVBAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPMcm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3SHpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNVHQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2RvY2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRwOi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwDQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q05qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmjZ55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtIgKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJaD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDlizeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk=-----END CERTIFICATE-----# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited# Subject: CN=Trusted Certificate Services O=Comodo CA Limited# Label: "Comodo Trusted Services root"# Serial: 1# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69-----BEGIN CERTIFICATE-----MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEbMBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRowGAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTlaMH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAOBgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYDVQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWWfnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMtTGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7ILfhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0GA1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21vZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRodHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxISjBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/AtyjcndBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi-----END CERTIFICATE-----# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority# Label: "QuoVadis Root CA"# Serial: 985026699# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73-----BEGIN CERTIFICATE-----MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJCTTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMzMzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Ypli4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2DrOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJWCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cugF+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospUxbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCCAk4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVvdmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREwggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNlIG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBhc3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFyZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJhY3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYIKwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3TKbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rqy+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCLMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSkfnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf87C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1RcHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0ymQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQWxFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOKSnQ2+Q==-----END CERTIFICATE-----# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited# Label: "QuoVadis Root CA 2"# Serial: 1289# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86-----BEGIN CERTIFICATE-----MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJvb3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9WYWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCaGMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxgFyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55JWpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bBrrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/iUcw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIizPtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UHoycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuIyV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1UdEwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2fBluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzng/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2BlfF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5KWWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0HaB0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozchLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPRTUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWDmbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0ZohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u-----END CERTIFICATE-----# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited# Label: "QuoVadis Root CA 3"# Serial: 1478# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35-----BEGIN CERTIFICATE-----MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJvb3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9WYWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDMV0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUrH556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9CabwvvWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLTmZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhebtfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjcT5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDtWAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZc6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYDVR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMGCCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVudC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2NwczALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4GA1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJCTTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMgUm9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM07ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSemd1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadNt54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6xDYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6szHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0jWy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeTmJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK4SVhM7JZG+Ju1zdXtg2pEto=-----END CERTIFICATE-----# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1# Subject: O=SECOM Trust.net OU=Security Communication RootCA1# Label: "Security Communication Root CA"# Serial: 0# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c-----BEGIN CERTIFICATE-----MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl89f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJDKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/NQV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJxrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0GA1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vGkl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfrUj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJUJRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXotRSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==-----END CERTIFICATE-----# Issuer: CN=Sonera Class2 CA O=Sonera# Subject: CN=Sonera Class2 CA O=Sonera# Label: "Sonera Class 2 Root CA"# Serial: 29# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27-----BEGIN CERTIFICATE-----MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEPMA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAxMDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNVBAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+oZ6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2EjvOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEwDwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEGMA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zilzqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvDFNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M-----END CERTIFICATE-----# Issuer: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden# Subject: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden# Label: "Staat der Nederlanden Root CA"# Serial: 10000010# MD5 Fingerprint: 60:84:7c:5a:ce:db:0c:d4:cb:a7:e9:fe:02:c6:a9:c0# SHA1 Fingerprint: 10:1d:fa:3f:d5:0b:cb:bb:9b:b5:60:0c:19:55:a4:1a:f4:73:3a:04# SHA256 Fingerprint: d4:1d:82:9e:8c:16:59:82:2a:f9:3f:ce:62:bf:fc:de:26:4f:c8:4e:8b:95:0c:5f:f2:75:d0:52:35:46:95:a3-----BEGIN CERTIFICATE-----MIIDujCCAqKgAwIBAgIEAJiWijANBgkqhkiG9w0BAQUFADBVMQswCQYDVQQGEwJOTDEeMBwGA1UEChMVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSYwJAYDVQQDEx1TdGFhdCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQTAeFw0wMjEyMTcwOTIzNDlaFw0xNTEyMTYwOTE1MzhaMFUxCzAJBgNVBAYTAk5MMR4wHAYDVQQKExVTdGFhdCBkZXIgTmVkZXJsYW5kZW4xJjAkBgNVBAMTHVN0YWF0IGRlciBOZWRlcmxhbmRlbiBSb290IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmNK1URF6gaYUmHFtvsznExvWJw56s2oYHLZhWtVhCb/ekBPHZ+7d89rFDBKeNVU+LCeIQGv33N0iYfXCxw719tV2U02PjLwYdjeFnejKScfST5gTCaI+Ioicf9byEGW07l8Y1Rfj+MX94p2i71MOhXeiD+EwR+4A5zN9RGcaC1Hoi6CeUJhoNFIfLm0B8mBF8jHrqTFoKbt6QZ7GGX+UtFE5A3+y3qcym7RHjm+0Sq7lr7HcsBthvJly3uSJt3omXdozSVtSnA71iq3DuD3oBmrC1SoLbHuEvVYFy4ZlkuxEK7COudxwC0barbxjiDn622r+I/q85Ej0ZytqERAhSQIDAQABo4GRMIGOMAwGA1UdEwQFMAMBAf8wTwYDVR0gBEgwRjBEBgRVHSAAMDwwOgYIKwYBBQUHAgEWLmh0dHA6Ly93d3cucGtpb3ZlcmhlaWQubmwvcG9saWNpZXMvcm9vdC1wb2xpY3kwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSofeu8Y6R0E3QA7Jbg0zTBLL9s+DANBgkqhkiG9w0BAQUFAAOCAQEABYSHVXQ2YcG70dTGFagTtJ+k/rvuFbQvBgwp8qiSpGEN/KtcCFtREytNwiphyPgJWPwtArI5fZlmgb9uXJVFIGzmeafR2Bwp/MIgJ1HI8XxdNGdphREwxgDS1/PTfLbwMVcoEoJz6TMvplW0C5GUR5z6u3pCMuiufi3IvKwUv9kP2Vv8wfl6leF9fpb8cbDCTMjfRTTJzg3ynGQI0DvDKcWy7ZAEwbEpkcUwb8GpcjPM/l0WFywRaed+/sWDCN+83CI6LiBpIzlWYGeQiy52OfsRiJf2fL1LuCAWZwWN4jvBcj+UlTfHXbme2JOhF4//DGYVwSR8MnwDHTuhWEUykw==-----END CERTIFICATE-----# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com# Label: "UTN DATACorp SGC Root CA"# Serial: 91374294542884689855167577680241077609# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48-----BEGIN CERTIFICATE-----MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCBkzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3JwIFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQGEwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cudXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZD0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykqlXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulWbfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQABo4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRTMtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3JsLnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggrBgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IBAQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowftGzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyjj98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVHKWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3mfnGV/TJVTl4uix5yaaIK/QI-----END CERTIFICATE-----# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com# Label: "UTN USERFirst Hardware Root CA"# Serial: 91374294542884704022267039221184531197# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37-----BEGIN CERTIFICATE-----MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCBlzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdhcmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJM6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4aMXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNdoI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqIDsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9KsyoUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNybDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEFBQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28GpgoiskliCE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gECJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSSKqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==-----END CERTIFICATE-----# Issuer: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org# Subject: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org# Label: "Camerfirma Chambers of Commerce Root"# Serial: 0# MD5 Fingerprint: b0:01:ee:14:d9:af:29:18:94:76:8e:f1:69:33:2a:84# SHA1 Fingerprint: 6e:3a:55:a4:19:0c:19:5c:93:84:3c:c0:db:72:2e:31:30:61:f0:b1# SHA256 Fingerprint: 0c:25:8a:12:a5:67:4a:ef:25:f2:8b:a7:dc:fa:ec:ee:a3:48:e5:41:e6:f5:cc:4e:e6:3b:71:b3:61:60:6a:c3-----BEGIN CERTIFICATE-----MIIEvTCCA6WgAwIBAgIBADANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEiMCAGA1UEAxMZQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdDAeFw0wMzA5MzAxNjEzNDNaFw0zNzA5MzAxNjEzNDRaMH8xCzAJBgNVBAYTAkVVMScwJQYDVQQKEx5BQyBDYW1lcmZpcm1hIFNBIENJRiBBODI3NDMyODcxIzAhBgNVBAsTGmh0dHA6Ly93d3cuY2hhbWJlcnNpZ24ub3JnMSIwIAYDVQQDExlDaGFtYmVycyBvZiBDb21tZXJjZSBSb290MIIBIDANBgkqhkiG9w0BAQEFAAOCAQ0AMIIBCAKCAQEAtzZV5aVdGDDg2olUkfzIx1L4L1DZ77F1c2VHfRtbunXF/KGIJPov7coISjlUxFF6tdpg6jg8gbLL8bvZkSM/SAFwdakFKq0fcfPJVD0dBmpAPrMMhe5cG3nCYsS4No41XQEMIwRHNaqbYE6gZj3LJgqcQKH0XZi/caulAGgq7YN6D6IUtdQis4CwPAxaUWktWBiP7Zme8a7ileb2R6jWDA+wWFjbw2Y3npuRVDM30pQcakjJyfKl2qUMI/cjDpwyVV5xnIQFUZot/eZOKjRa3spAN2cMVCFVd9oKDMyXroDclDZK9D7ONhMeU+SsTjoF7Nuucpw4i9A5O4kKPnf+dQIBA6OCAUQwggFAMBIGA1UdEwEB/wQIMAYBAf8CAQwwPAYDVR0fBDUwMzAxoC+gLYYraHR0cDovL2NybC5jaGFtYmVyc2lnbi5vcmcvY2hhbWJlcnNyb290LmNybDAdBgNVHQ4EFgQU45T1sU3p26EpW1eLTXYGduHRooowDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzAnBgNVHREEIDAegRxjaGFtYmVyc3Jvb3RAY2hhbWJlcnNpZ24ub3JnMCcGA1UdEgQgMB6BHGNoYW1iZXJzcm9vdEBjaGFtYmVyc2lnbi5vcmcwWAYDVR0gBFEwTzBNBgsrBgEEAYGHLgoDATA+MDwGCCsGAQUFBwIBFjBodHRwOi8vY3BzLmNoYW1iZXJzaWduLm9yZy9jcHMvY2hhbWJlcnNyb290Lmh0bWwwDQYJKoZIhvcNAQEFBQADggEBAAxBl8IahsAifJ/7kPMa0QOx7xP5IV8EnNrJpY0nbJaHkb5BkAFyk+cefV/2icZdp0AJPaxJRUXcLo0waLIJuvvDL8y6C98/d3tGfToSJI6WjzwFCm/SlCgdbQzALogi1djPHRPH8EjX1wWnz8dHnjs8NMiAT9QUu/wNUPf6s+xCX6ndbcj0dc97wXImsQEcXCz9ek60AcUFV7nnPKoF2YjpB0ZBzu9Bga5Y34OirsrXdx/nADydb47kMgkdTXg0eDQ8lJsm7U9xxhl6vSAiSFr+S30Dt+dYvsYyTnQeaN2oaFuzPu5ifdmA6Ap1erfutGWaIZDgqtCYvDi1czyL+Nw=-----END CERTIFICATE-----# Issuer: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org# Subject: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org# Label: "Camerfirma Global Chambersign Root"# Serial: 0# MD5 Fingerprint: c5:e6:7b:bf:06:d0:4f:43:ed:c4:7a:65:8a:fb:6b:19# SHA1 Fingerprint: 33:9b:6b:14:50:24:9b:55:7a:01:87:72:84:d9:e0:2f:c3:d2:d8:e9# SHA256 Fingerprint: ef:3c:b4:17:fc:8e:bf:6f:97:87:6c:9e:4e:ce:39:de:1e:a5:fe:64:91:41:d1:02:8b:7d:11:c0:b2:29:8c:ed-----BEGIN CERTIFICATE-----MIIExTCCA62gAwIBAgIBADANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwHhcNMDMwOTMwMTYxNDE4WhcNMzcwOTMwMTYxNDE4WjB9MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQCicKLQn0KuWxfH2H3PFIP8T8mhtxOviteePgQKkotgVvq0Mi+ITaFgCPS3CU6gSS9J1tPfnZdan5QEcOw/Wdm3zGaLmFIoCQLfxS+EjXqXd7/sQJ0lcqu1PzKY+7e3/HKE5TWH+VX6ox8Oby4o3Wmg2UIQxvi1RMLQQ3/bvOSiPGpVeAp3qdjqGTK3L/5cPxvusZjsyq16aUXjlg9V9ubtdepl6DJWk0aJqCWKZQbua795B9Dxt6/tLE2Su8CoX6dnfQTyFQhwrJLWfQTSM/tMtgsL+xrJxI0DqX5c8lCrEqWhz0hQpe/SyBoT+rB/sYIcd2oPX9wLlY/vQ37mRQklAgEDo4IBUDCCAUwwEgYDVR0TAQH/BAgwBgEB/wIBDDA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vY3JsLmNoYW1iZXJzaWduLm9yZy9jaGFtYmVyc2lnbnJvb3QuY3JsMB0GA1UdDgQWBBRDnDafsJ4wTcbOX60Qq+UDpfqpFDAOBgNVHQ8BAf8EBAMCAQYwEQYJYIZIAYb4QgEBBAQDAgAHMCoGA1UdEQQjMCGBH2NoYW1iZXJzaWducm9vdEBjaGFtYmVyc2lnbi5vcmcwKgYDVR0SBCMwIYEfY2hhbWJlcnNpZ25yb290QGNoYW1iZXJzaWduLm9yZzBbBgNVHSAEVDBSMFAGCysGAQQBgYcuCgEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly9jcHMuY2hhbWJlcnNpZ24ub3JnL2Nwcy9jaGFtYmVyc2lnbnJvb3QuaHRtbDANBgkqhkiG9w0BAQUFAAOCAQEAPDtwkfkEVCeR4e3t/mh/YV3lQWVPMvEYBZRqHN4fcNs+ezICNLUMbKGKfKX0j//U2K0X1S0E0T9YgOKBWYi+wONGkyT+kL0mojAt6JcmVzWJdJYY9hXiryQZVgICsroPFOrGimbBhkVVi76SvpykBMdJPJ7oKXqJ1/6v/2j1pReQvayZzKWGVwlnRtvWFsJG8eSpUPWP0ZIV018+xgBJOm5YstHRJw0lyDL4IBHNfTIzSJRUTN3cecQwn+uOuFW114hcxWokPbLTBQNRxgfvzBRydD1ucs4YKIxKoHflCStFREest2d/AYoFWpO+ocH/+OcOZ6RHSXZddZAa9SaP8A==-----END CERTIFICATE-----# Issuer: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok# Subject: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok# Label: "NetLock Notary (Class A) Root"# Serial: 259# MD5 Fingerprint: 86:38:6d:5e:49:63:6c:85:5c:db:6d:dc:94:b7:d0:f7# SHA1 Fingerprint: ac:ed:5f:65:53:fd:25:ce:01:5f:1f:7a:48:3b:6a:74:9f:61:78:c6# SHA256 Fingerprint: 7f:12:cd:5f:7e:5e:29:0e:c7:d8:51:79:d5:b7:2c:20:a5:be:75:08:ff:db:5b:f8:1a:b9:68:4a:7f:c9:f6:67-----BEGIN CERTIFICATE-----MIIGfTCCBWWgAwIBAgICAQMwDQYJKoZIhvcNAQEEBQAwga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBLb3pqZWd5em9pIChDbGFzcyBBKSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNDIzMTQ0N1oXDTE5MDIxOTIzMTQ0N1owga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBLb3pqZWd5em9pIChDbGFzcyBBKSBUYW51c2l0dmFueWtpYWRvMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvHSMD7tM9DceqQWC2ObhbHDqeLVu0ThEDaiDzl3S1tWBxdRL51uUcCbbO51qTGL3cfNk1mE7PetzozfZz+qMkjvN9wfcZnSX9EUi3fRc4L9t875lM+QVOr/bmJBVOMTtplVjC7B4BPTjbsE/jvxReB+SnoPC/tmwqcm8WgD/qaiYdPv2LD4VOQ22BFWoDpggQrOxJa1+mm9dU7GrDPzr4PN6s6iz/0b2Y6LYOph7tqyF/7AlT3Rj5xMHpQqPBffAZG9+pyeAlt7ULoZgx2srXnN7F+eRP2QM2EsiNCubMvJIH5+hCoR64sKtlz2O1cH5VqNQ6ca0+pii7pXmKgOM3wIDAQABo4ICnzCCApswDgYDVR0PAQH/BAQDAgAGMBIGA1UdEwEB/wQIMAYBAf8CAQQwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1GSUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJhc2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNAbmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4IBAQBIJEb3ulZv+sgoA0BO5TE5ayZrU3/b39/zcT0mwBQOxmd7I6gMc90Bu8bKbjc5VdXHjFYgDigKDtIqpLBJUsY4B/6+CgmM0ZjPytoUMaFP0jn8DxEsQ8Pdq5PHVT5HfBgaANzze9jyf1JsIPQLX2lS9O74silg6+NJMSEN1rUQQeJBCWziGppWS3cC9qCbmieH6FUpccKQn0V4GuEVZD3QDtigdp+uxdAu6tYPVuxkf1qbFFgBJ34TUMdrKuZoPL9coAob4Q566eKAw+np9v1sEZ7Q5SgnK1QyQhSCdeZK8CtmdWOMovsEPoMOmzbwGOQmIMOM8CgHrTwXZoi1/baI-----END CERTIFICATE-----# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com# Label: "XRamp Global CA Root"# Serial: 107108908803651509692980124233745014957# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2-----BEGIN CERTIFICATE-----MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcxNDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS638eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCPKZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7QDxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRaJSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNViPvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASsjVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQADggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfARvbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxtqZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLaIR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSyi6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQO+7ETPTsJ3xCwnR8gooJybQDJbw=-----END CERTIFICATE-----# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority# Label: "Go Daddy Class 2 CA"# Serial: 0# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4-----BEGIN CERTIFICATE-----MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRoZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3MgMiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggENADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCAPVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6wwdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXiEqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMYavx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLEsNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNyOO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7PTMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQHmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mERdEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5CufReYNnyicsbkqWletNw+vHX/bvZ8=-----END CERTIFICATE-----# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority# Label: "Starfield Class 2 CA"# Serial: 0# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58-----BEGIN CERTIFICATE-----MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzElMCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQwNjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aaK4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0GA1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fRzt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56Deruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJlxy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynpVSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEYWQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=-----END CERTIFICATE-----# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing# Label: "StartCom Certification Authority"# Serial: 1# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea-----BEGIN CERTIFICATE-----MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZkpMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rfOQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/CJi/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYTKqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNiHzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMMAv+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwIDAQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9jZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3JsLnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFMBgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFydGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3RhcnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlhYmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2YgdGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFpbGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUWFjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJzewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5LEUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYuL6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+PwqyvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuCO3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6Vum0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkyShNOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=-----END CERTIFICATE-----# Issuer: O=Government Root Certification Authority# Subject: O=Government Root Certification Authority# Label: "Taiwan GRCA"# Serial: 42023070807708724159991140556527066870# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3-----BEGIN CERTIFICATE-----MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1owPzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XRIePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1qgQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKyyhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAtsF/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvxls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FCVGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHKYS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoHEgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThNXo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1UdDgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAEMTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqKUWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZTulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyfqzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaKZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFEJPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6MmnD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WXudpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44VbnzssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDeLMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAlpYYsfPQS-----END CERTIFICATE-----# Issuer: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services# Subject: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services# Label: "Swisscom Root CA 1"# Serial: 122348795730808398873664200247279986742# MD5 Fingerprint: f8:38:7c:77:88:df:2c:16:68:2e:c2:e2:52:4b:b8:f9# SHA1 Fingerprint: 5f:3a:fc:0a:8b:64:f6:86:67:34:74:df:7e:a9:a2:fe:f9:fa:7a:51# SHA256 Fingerprint: 21:db:20:12:36:60:bb:2e:d4:18:20:5d:a1:1e:e7:a8:5a:65:e2:bc:6e:55:b5:af:7e:78:99:c8:a2:66:d9:2e-----BEGIN CERTIFICATE-----MIIF2TCCA8GgAwIBAgIQXAuFXAvnWUHfV8w/f52oNjANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3QgQ0EgMTAeFw0wNTA4MTgxMjA2MjBaFw0yNTA4MTgyMjA2MjBaMGQxCzAJBgNVBAYTAmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAxMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0LmwqAzZuz8h+BvVM5OAFmUgdbI9m2BtRsiMMW8Xw/qabFbtPMWRV8PNq5ZJkCoZSx6jbVfd8StiKHVFXqrWW/oLJdihFvkcxC7mlSpnzNApbjyFNDhhSbEAn9Y6cV9Nbc5fuankiX9qUvrKm/LcqfmdmUc/TilftKaNXXsLmREDA/7n29uj/x2lzZAeAR81sH8A25Bvxn570e56eqeqDFdvpG3FEzuwpdntMhy0XmeLVNxzh+XTF3xmUHJd1BpYwdnP2IkCb6dJtDZd0KTeByy2dbcokdaXvij1mB7qWybJvbCXc9qukSbraMH5ORXWZ0sKbU/Lz7DkQnGMU3nn7uHbHaBuHYwadzVcFh4rUx80i9Fs/PJnB3r1re3WmquhsUvhzDdf/X/NTa64H5xD+SpYVUNFvJbNcA78yeNmuk6NO4HLFWR7uZToXTNShXEuT46iBhFRyePLoW4xCGQMwtI89Tbo19AOeCMgkckkKmUpWyL3Ic6DXqTz3kvTaI9GdVyDCW4pa8RwjPWd1yAv/0bSKzjCL3UcPX7ape8eYIVpQtPM+GP+HkM5haa2Y0EQs3MevNP6yn0WR+Kn1dCjigoIlmJWbjTb2QK5MHXjBNLnj8KwEUAKrNVxAmKLMb7dxiNYMUJDLXT5xp6mig/p/r+D5kNXJLrvRjSq1xIBOO0CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYwFDASBgdghXQBUwABBgdghXQBUwABMBIGA1UdEwEB/wQIMAYBAf8CAQcwHwYDVR0jBBgwFoAUAyUv3m+CATpcLNwroWm1Z9SM0/0wHQYDVR0OBBYEFAMlL95vggE6XCzcK6FptWfUjNP9MA0GCSqGSIb3DQEBBQUAA4ICAQA1EMvspgQNDQ/NwNurqPKIlwzfky9NfEBWMXrrpA9gzXrzvsMnjgM+pN0S734edAY8PzHyHHuRMSG08NBsl9Tpl7IkVh5WwzW9iAUPWxAaZOHHgjD5Mq2eUCzneAXQMbFamIp1TpBcahQq4FJHgmDmHtqBsfsUC1rxn9KVuj7QG9YVHaO+htXbD8BJZLsuUBlL0iT43R4HVtA4oJVwIHaM190e3p9xxCPvgxNcoyQVTSlAPGrEqdi3pkSlDfTgnXceQHAm/NrZNuR55LU/vJtlvrsRls/bxig5OgjOR1tTWsWZ/l2p3e9M1MalrQLmjAcSHm8D0W+go/MpvRLHUKKwf4ipmXeascClOS5cfGniLLDqN2qk4Vrh9VDlg++luyqI54zb/W1elxmofmZ1a3Hqv7HHb6D0jqTsNFFbjCYDcKF31QESVwA12yPeDooomf2xEG9L/zgtYE4snOtnta1J7ksfrK/7DZBaZmBwXarNeNQk7shBoJMBkpxqnvy5JMWzFYJ+vq6VK+uxwNrjAWALXmmshFZhvnEX/h0TD/7Gh0Xp/jKgGg0TpJRVcaUWi7rKibCyx/yP2FS1k2Kdzs9Z+z0YzirLNRWCXf9UIltxUvu3yf5gmwBBZPCqKuy2QkPOiWaByIufOVQDJdMWNY6E0F/6MBr1mmz0DlP5OlvRHA==-----END CERTIFICATE-----# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com# Label: "DigiCert Assured ID Root CA"# Serial: 17154717934120587862167794914071425081# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c-----BEGIN CERTIFICATE-----MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7cJpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYPmDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYunpyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRCdWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTffwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cmNW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPxH2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==-----END CERTIFICATE-----# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com# Label: "DigiCert Global Root CA"# Serial: 10944719598952040374951832963794454346# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61-----BEGIN CERTIFICATE-----MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBhMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBDQTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsBCSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7PT19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbRTLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUwDQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/EsrhMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJFPnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0lsYSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQkCAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=-----END CERTIFICATE-----# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com# Label: "DigiCert High Assurance EV Root CA"# Serial: 3553400076410547919724730734378100087# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf-----BEGIN CERTIFICATE-----MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBsMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5jZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDELMAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2UgRVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTWPNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEMxChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFBIk5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsgEsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaAFLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3NecnzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6zeM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jFhS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCevEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep+OkuE6N36B9K-----END CERTIFICATE-----# Issuer: CN=Class 2 Primary CA O=Certplus# Subject: CN=Class 2 Primary CA O=Certplus# Label: "Certplus Class 2 Primary CA"# Serial: 177770208045934040241468760488327595043# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb-----BEGIN CERTIFICATE-----MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAwPTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFzcyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNzIDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiRVhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyLkcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCdEgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yasH7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMuY29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMRFcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMAybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWBkJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7l7+ijrRU-----END CERTIFICATE-----# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.# Label: "DST Root CA X3"# Serial: 91299735575339953335919266965803778155# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39-----BEGIN CERTIFICATE-----MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMTDkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVowPzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQDEw5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4Orz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEqOLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9bxiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaDaeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqGSIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXrAvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZzR8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYoOb8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ-----END CERTIFICATE-----# Issuer: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES# Subject: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES# Label: "DST ACES CA X6"# Serial: 17771143917277623872238992636097467865# MD5 Fingerprint: 21:d8:4c:82:2b:99:09:33:a2:eb:14:24:8d:8e:5f:e8# SHA1 Fingerprint: 40:54:da:6f:1c:3f:40:74:ac:ed:0f:ec:cd:db:79:d1:53:fb:90:1d# SHA256 Fingerprint: 76:7c:95:5a:76:41:2c:89:af:68:8e:90:a1:c7:0f:55:6c:fd:6b:60:25:db:ea:10:41:6d:7e:b6:83:1f:8c:40-----BEGIN CERTIFICATE-----MIIECTCCAvGgAwIBAgIQDV6ZCtadt3js2AdWO4YV2TANBgkqhkiG9w0BAQUFADBbMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QxETAPBgNVBAsTCERTVCBBQ0VTMRcwFQYDVQQDEw5EU1QgQUNFUyBDQSBYNjAeFw0wMzExMjAyMTE5NThaFw0xNzExMjAyMTE5NThaMFsxCzAJBgNVBAYTAlVTMSAwHgYDVQQKExdEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdDERMA8GA1UECxMIRFNUIEFDRVMxFzAVBgNVBAMTDkRTVCBBQ0VTIENBIFg2MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuT31LMmU3HWKlV1j6IR3dma5WZFcRt2SPp/5DgO0PWGSvSMmtWPuktKe1jzIDZBfZIGxqAgNTNj50wUoUrQBJcWVHAx+PhCEdc/BGZFjz+iokYi5Q1K7gLFViYsx+tC3dr5BPTCapCIlF3PoHuLTrCq9Wzgh1SpL11V94zpVvddtawJXa+ZHfAjIgrrep4c9oW24MFbCswKBXy314powGCi4ZtPLAZZv6opFVdbgnf9nKxcCpk4aahELfrd755jWjHZvwTvbUJN+5dCOHze4vbrGn2zpfDPyMjwmR/onJALJfh1biEITajV8fTXpLmaRcpPVMibEdPVTo7NdmvYJywIDAQABo4HIMIHFMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgHGMB8GA1UdEQQYMBaBFHBraS1vcHNAdHJ1c3Rkc3QuY29tMGIGA1UdIARbMFkwVwYKYIZIAWUDAgEBATBJMEcGCCsGAQUFBwIBFjtodHRwOi8vd3d3LnRydXN0ZHN0LmNvbS9jZXJ0aWZpY2F0ZXMvcG9saWN5L0FDRVMtaW5kZXguaHRtbDAdBgNVHQ4EFgQUCXIGThhDD+XWzMNqizF7eI+og7gwDQYJKoZIhvcNAQEFBQADggEBAKPYjtay284F5zLNAdMEA+V25FYrnJmQ6AgwbN99Pe7lv7UkQIRJ4dEorsTCOlMwiPH1d25Ryvr/ma8kXxug/fKshMrfqfBfBC6tFr8hlxCBPeP/h40y3JTlR4peahPJlJU90u7INJXQgNStMgiAVDzgvVJT11J8smk/f3rPanTK+gQqnExaBqXpIK1FZg9p8d2/6eMyi/rgwYZNcjwu2JN4Cir42NInPRmJX1p7ijvMDNpRrscL9yuwNwXsvFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf29w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis=-----END CERTIFICATE-----# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005# Label: "TURKTRUST Certificate Services Provider Root 2"# Serial: 1# MD5 Fingerprint: 37:a5:6e:d4:b1:25:84:97:b7:fd:56:15:7a:f9:a2:00# SHA1 Fingerprint: b4:35:d4:e1:11:9d:1c:66:90:a7:49:eb:b3:94:bd:63:7b:a7:82:b7# SHA256 Fingerprint: c4:70:cf:54:7e:23:02:b9:77:fb:29:dd:71:a8:9a:7b:6c:1f:60:77:7b:03:29:f5:60:17:f3:28:bf:4f:6b:e6-----BEGIN CERTIFICATE-----MIIEPDCCAySgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwHhcNMDUxMTA3MTAwNzU3WhcNMTUwOTE2MTAwNzU3WjCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCpNn7DkUNMwxmYCMjHWHtPFoylzkkBH3MOrHUTpvqeLCDe2JAOCtFp0if7qnefJ1Il4std2NiDUBd9irWCPwSOtNXwSadktx4uXyCcUHVPr+G1QRT0mJKIx+XlZEdhR3n9wFHxwZnn3M5q+6+1ATDcRhzviuyV79z/rxAc653YsKpqhRgNF8k+v/Gb0AmJQv2gQrSdiVFVKc8bcLyEVK3BEx+Y9C52YItdP5qtygy/p1Zbj3e41Z55SZI/4PGXJHpsmxcPbe9TmJEr5A++WXkHeLuXlfSfadRYhwqp48y2WBmfJiGxxFmNskF1wK1pzpwACPI2/z7woQ8arBT9pmAPAgMBAAGjQzBBMB0GA1UdDgQWBBTZN7NOBf3Zz58SFq62iS/rJTqIHDAPBgNVHQ8BAf8EBQMDBwYAMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAHJglrfJ3NgpXiOFX7KzLXb7iNcX/nttRbj2hWyfIvwqECLsqrkw9qtY1jkQMZkpAL2JZkH7dN6RwRgLn7Vhy506vvWolKMiVW4XSf/SKfE4Jl3vpao6+XF75tpYHdN0wgH6PmlYX63LaL4ULptswLbcoCb6dxriJNoaN+BnrdFzgw2lGh1uEpJ+hGIAF728JRhX8tepb1mIvDS3LoV4nZbcFMMsilKbloxSZj2GFotHuFEJjOp9zYhys2AzsfAKRO8P9Qk3iCQOLGsgOqL6EfJANZxEaGM7rDNvY7wsu/LSy3Z9fYjYHcgFHW68lKlmjHdxx/qR+i9Rnuk5UrbnBEI=-----END CERTIFICATE-----# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG# Label: "SwissSign Gold CA - G2"# Serial: 13492815561806991280# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95-----BEGIN CERTIFICATE-----MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2lnbiBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBFMQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZTd2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/876LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqEemA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJdMmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdtMDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02yMszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69yFGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPiaG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxMgI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe645R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczOUYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCCbwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yvGPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCChdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid392qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEppLd6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+wZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+httQc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ-----END CERTIFICATE-----# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG# Label: "SwissSign Silver CA - G2"# Serial: 5700383053117599563# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5-----BEGIN CERTIFICATE-----MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0NlowRzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0MvFz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7brYT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieFnbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZteJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJMoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRHHTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTfjNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb65i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOBrDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUF6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0cwpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBAHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShpWJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZIseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2Xem1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQRdAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLytGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u-----END CERTIFICATE-----# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.# Label: "GeoTrust Primary Certification Authority"# Serial: 32798226551256963324313806436981982369# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c-----BEGIN CERTIFICATE-----MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMoR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjExMjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjAZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE07e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53WkBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MImO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJKoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ16CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6KoKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8FjUjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoUAT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=-----END CERTIFICATE-----# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only# Label: "thawte Primary Root CA"# Serial: 69529181992039203566298953787712940909# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f-----BEGIN CERTIFICATE-----MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFsW0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94JNqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XPr87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfUDW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mzYJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAXxPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7jVaMaA==-----END CERTIFICATE-----# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"# Serial: 33037644167568058970164719475676101450# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df-----BEGIN CERTIFICATE-----MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCByjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbext0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIzSdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQGBO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKvMzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzEp6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlKWE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8NhnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq-----END CERTIFICATE-----# Issuer: CN=SecureTrust CA O=SecureTrust Corporation# Subject: CN=SecureTrust CA O=SecureTrust Corporation# Label: "SecureTrust CA"# Serial: 17199774589125277788362757014266862032# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73-----BEGIN CERTIFICATE-----MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBIMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24xFzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIzMTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENvcnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEzZum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIaowW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjATBgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCegJYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt36Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkmD5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPSCPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=-----END CERTIFICATE-----# Issuer: CN=Secure Global CA O=SecureTrust Corporation# Subject: CN=Secure Global CA O=SecureTrust Corporation# Label: "Secure Global CA"# Serial: 9751836167731051554232119481456978597# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69-----BEGIN CERTIFICATE-----MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkxMjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJiQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJjnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnIHmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0wgZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCswKaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsGAQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0LURYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXOH0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9MmI50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbYiNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xcf8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW-----END CERTIFICATE-----# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited# Subject: CN=COMODO Certification Authority O=COMODO CA Limited# Label: "COMODO Certification Authority"# Serial: 104350513648249232941998508985834464573# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66-----BEGIN CERTIFICATE-----MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCBgTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNVBAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAwMDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01PRE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5OnKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6gPKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9uQXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIYSdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAvIC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5ddBA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IBZQ==-----END CERTIFICATE-----# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.# Label: "Network Solutions Certificate Authority"# Serial: 116697915152937497490437556386812487904# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c-----BEGIN CERTIFICATE-----MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBiMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwzc7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPPOCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rlmGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnFBgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcwgZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwubmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3JpdHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc86fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3HtvwKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHNpGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey-----END CERTIFICATE-----# Issuer: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA# Subject: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA# Label: "WellsSecure Public Root Certificate Authority"# Serial: 1# MD5 Fingerprint: 15:ac:a5:c2:92:2d:79:bc:e8:7f:cb:67:ed:02:cf:36# SHA1 Fingerprint: e7:b4:f6:9d:61:ec:90:69:db:7e:90:a7:40:1a:3c:f4:7d:4f:e8:ee# SHA256 Fingerprint: a7:12:72:ae:aa:a3:cf:e8:72:7f:7f:b3:9f:0f:b3:d1:e5:42:6e:90:60:b0:6e:e6:f1:3e:9a:3c:58:33:cd:43-----BEGIN CERTIFICATE-----MIIEvTCCA6WgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDcxMjEzMTcwNzU0WhcNMjIxMjE0MDAwNzU0WjCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDub7S9eeKPCCGeOARBJe+rWxxTkqxtnt3CxC5FlAM1iGd0V+PfjLindo8796jE2yljDpFoNoqXjopxaAkH5OjUDk/41itMpBb570OYj7OeUt9tkTmPOL13i0Nj67eT/DBMHAGTthP796EfvyXhdDcsHqRePGj4S78NuR4uNuip5Kf4D8uCdXw1LSLWwr8L87T8bJVhHlfXBIEyg1J55oNjz7fLY4sR4r1e6/aN7ZVyKLSsEmLpSjPmgzKuBXWVvYSV2ypcm44uDLiBK0HmOFafSZtsdvqKXfcBeYF8wYNABf5x/Qw/zE5gCQ5lRxAvAcAFP4/4s0HvWkJ+We/SlwxlAgMBAAGjggE0MIIBMDAPBgNVHRMBAf8EBTADAQH/MDkGA1UdHwQyMDAwLqAsoCqGKGh0dHA6Ly9jcmwucGtpLndlbGxzZmFyZ28uY29tL3dzcHJjYS5jcmwwDgYDVR0PAQH/BAQDAgHGMB0GA1UdDgQWBBQmlRkQ2eihl5H/3BnZtQQ+0nMKajCBsgYDVR0jBIGqMIGngBQmlRkQ2eihl5H/3BnZtQQ+0nMKaqGBi6SBiDCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHmCAQEwDQYJKoZIhvcNAQEFBQADggEBALkVsUSRzCPIK0134/iaeycNzXK7mQDKfGYZUMbVmO2rvwNa5U3lHshPcZeG1eMd/ZDJPHV3V3p9+N701NX3leZ0bh08rnyd2wIDBSxxSyU+B+NemvVmFymIGjifz6pBA4SXa5M4esowRBskRDPQ5NHcKDj0E0M1NSljqHyita04pO2t/caaH/+Xc/77szWnk4bGdpEA5qxRFsQnMlzbc9qlk1eOPm01JghZ1edE13YgY+esE2fDbbFwRnzVlhE9iW9dqKHrjQrawx0zbKPqZxmamX9LPYNRKh3KL4YMon4QLSvUFpULB6ouFJJJtylv2G0xffX8oRAHh84vWdw+WNs=-----END CERTIFICATE-----# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited# Label: "COMODO ECC Certification Authority"# Serial: 41578283867086692638256921589707938090# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7-----BEGIN CERTIFICATE-----MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSRFtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0JcfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQWBBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDmfQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdvGDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=-----END CERTIFICATE-----# Issuer: CN=IGC/A O=PM/SGDN OU=DCSSI# Subject: CN=IGC/A O=PM/SGDN OU=DCSSI# Label: "IGC/A"# Serial: 245102874772# MD5 Fingerprint: 0c:7f:dd:6a:f4:2a:b9:c8:9b:bd:20:7e:a9:db:5c:37# SHA1 Fingerprint: 60:d6:89:74:b5:c2:65:9e:8a:0f:c1:88:7c:88:d2:46:69:1b:18:2c# SHA256 Fingerprint: b9:be:a7:86:0a:96:2e:a3:61:1d:ab:97:ab:6d:a3:e2:1c:10:68:b9:7d:55:57:5e:d0:e1:12:79:c1:1c:89:32-----BEGIN CERTIFICATE-----MIIEAjCCAuqgAwIBAgIFORFFEJQwDQYJKoZIhvcNAQEFBQAwgYUxCzAJBgNVBAYTAkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZyMB4XDTAyMTIxMzE0MjkyM1oXDTIwMTAxNzE0MjkyMlowgYUxCzAJBgNVBAYTAkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsh/R0GLFMzvABIaIs9z4iPf930Pfeo2aSVz2TqrMHLmh6yeJ8kbpO0px1R2OLc/mratjUMdUC24SyZA2xtgv2pGqaMVy/hcKshd+ebUyiHDKcMCWSo7kVc0dJ5S/znIq7Fz5cyD+vfcuiWe4u0dzEvfRNWk68gq5rv9GQkaiv6GFGvm/5P9JhfejcIYyHF2fYPepraX/z9E0+X1bF8bc1g4oa8Ld8fUzaJ1O/Id8NhLWo4DoQw1VYZTqZDdH6nfK0LJYBcNdfrGoRpAxVs5wKpayMLh35nnAvSk7/ZR3TL0gzUEl4C7HG7vupARB0l2tEmqKm0f7yd1GQOGdPDPQtQIDAQABo3cwdTAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBRjAVBgNVHSAEDjAMMAoGCCqBegF5AQEBMB0GA1UdDgQWBBSjBS8YYFDCiQrdKyFP/45OqDAxNjAfBgNVHSMEGDAWgBSjBS8YYFDCiQrdKyFP/45OqDAxNjANBgkqhkiG9w0BAQUFAAOCAQEABdwm2Pp3FURo/C9mOnTgXeQp/wYHE4RKq89toB9RlPhJy3Q2FLwV3duJL92PoF189RLrn544pEfMs5bZvpwlqwN+Mw+VgQ39FuCIvjfwbF3QMZsyK10XZZOYYLxuj7GoPB7ZHPOpJkL5ZB3C55L29B5aqhlSXa/oovdgoPaN8In1buAKBQGVyYsgCrpa/JosPL3Dt8ldeCUFP1YUmwza+zpI/pdpXsoQhvdOlgQITeywvl3cO45Pwf2aNjSaTFR+FwNIlQgRHAdvhQh+XU3Endv7rs6y0bO4g2wdsrN58dhwmX7wEwLOXt1R0982gaEbeC9xs/FZTEYYKKuF0mBWWg==-----END CERTIFICATE-----# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1# Label: "Security Communication EV RootCA1"# Serial: 0# MD5 Fingerprint: 22:2d:a6:01:ea:7c:0a:f7:f0:6c:56:43:3f:77:76:d3# SHA1 Fingerprint: fe:b8:c4:32:dc:f9:76:9a:ce:ae:3d:d8:90:8f:fd:28:86:65:64:7d# SHA256 Fingerprint: a2:2d:ba:68:1e:97:37:6e:2d:39:7d:72:8a:ae:3a:9b:62:96:b9:fd:ba:60:bc:2e:11:f6:47:f2:c6:75:fb:37-----BEGIN CERTIFICATE-----MIIDfTCCAmWgAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJKUDElMCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEqMCgGA1UECxMhU2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBFViBSb290Q0ExMB4XDTA3MDYwNjAyMTIzMloXDTM3MDYwNjAyMTIzMlowYDELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKjAoBgNVBAsTIVNlY3VyaXR5IENvbW11bmljYXRpb24gRVYgUm9vdENBMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALx/7FebJOD+nLpCeamIivqA4PUHKUPqjgo0No0c+qe1OXj/l3X3L+SqawSERMqm4miO/VVQYg+kcQ7OBzgtQoVQrTyWb4vVog7P3kmJPdZkLjjlHmy1V4qe70gOzXppFodEtZDkBp2uoQSXWHnvIEqCa4wiv+wfD+mEce3xDuS4GBPMVjZd0ZoeUWs5bmB2iDQL87PRsJ3KYeJkHcFGB7hj3R4zZbOOCVVSPbW9/wfrrWFVGCypaZhKqkDFMxRldAD5kd6vA0jFQFTcD4SQaCDFkpbcLuUCRarAX1T4bepJz11sS6/vmsJWXMY1VkJqMF/Cq/biPT+zyRGPMUzXn0kCAwEAAaNCMEAwHQYDVR0OBBYEFDVK9U2vP9eCOKyrcWUXdYydVZPmMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBBQUAA4IBAQCoh+ns+EBnXcPBZsdAS5f8hxOQWsTvoMpfi7ent/HWtWS3irO4G8za+6xmiEHO6Pzk2x6Ipu0nUBsCMCRGef4Eh3CXQHPRwMFXGZpppSeZq51ihPZRwSzJIxXYKLerJRO1RuGGAv8mjMSIkh1W/hln8lXkgKNrnKt34VFxDSDbEJrbvXZ5B3eZKK2aXtqxT0QsNY6llsf9g/BYxnnWmHyojf6GPgcWkuF75x3sM3Z+Qi5KhfmRiWiEA4Glm5q+4zfFVKtWOxgtQaQM+ELbmaDgcm+7XeEWT1MKZPlO9L9OVL14bIjqv5wTJMJwaaJ/D8g8rQjJsJhAoyrniIPtd490-----END CERTIFICATE-----# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed# Label: "OISTE WISeKey Global Root GA CA"# Serial: 86718877871133159090080555911823548314# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5-----BEGIN CERTIFICATE-----MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHlyaWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0wNTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYDVQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxRVVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsFmQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t94B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQwEAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOxSPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXahNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZiFj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ/L7fCg0=-----END CERTIFICATE-----# Issuer: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA# Subject: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA# Label: "Microsec e-Szigno Root CA"# Serial: 272122594155480254301341951808045322001# MD5 Fingerprint: f0:96:b6:2f:c5:10:d5:67:8e:83:25:32:e8:5e:2e:e5# SHA1 Fingerprint: 23:88:c9:d3:71:cc:9e:96:3d:ff:7d:3c:a7:ce:fc:d6:25:ec:19:0d# SHA256 Fingerprint: 32:7a:3d:76:1a:ba:de:a0:34:eb:99:84:06:27:5c:b1:a4:77:6e:fd:ae:2f:df:6d:01:68:ea:1c:4f:55:67:d0-----BEGIN CERTIFICATE-----MIIHqDCCBpCgAwIBAgIRAMy4579OKRr9otxmpRwsDxEwDQYJKoZIhvcNAQEFBQAwcjELMAkGA1UEBhMCSFUxETAPBgNVBAcTCEJ1ZGFwZXN0MRYwFAYDVQQKEw1NaWNyb3NlYyBMdGQuMRQwEgYDVQQLEwtlLVN6aWdubyBDQTEiMCAGA1UEAxMZTWljcm9zZWMgZS1Temlnbm8gUm9vdCBDQTAeFw0wNTA0MDYxMjI4NDRaFw0xNzA0MDYxMjI4NDRaMHIxCzAJBgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVzdDEWMBQGA1UEChMNTWljcm9zZWMgTHRkLjEUMBIGA1UECxMLZS1Temlnbm8gQ0ExIjAgBgNVBAMTGU1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDtyADVgXvNOABHzNuEwSFpLHSQDCHZU4ftPkNEU6+r+ICbPHiN1I2uuO/TEdyB5s87lozWbxXGd36hL+BfkrYn13aaHUM86tnsL+4582pnS4uCzyL4ZVX+LMsvfUh6PXX5qqAnu3jCBspRwn5mS6/NoqdNAoI/gqyFxuEPkEeZlApxcpMqyabAvjxWTHOSJ/FrtfX9/DAFYJLG65Z+AZHCabEeHXtTRbjcQR/Ji3HWVBTji1R4P770Yjtb9aPs1ZJ04nQw7wHb4dSrmZsqa/i9phyGI0Jf7Enemotb9HI6QMVJPqW+jqpx62z69Rrkav17fVVA71hu5tnVvCSrwe+3AgMBAAGjggQ3MIIEMzBnBggrBgEFBQcBAQRbMFkwKAYIKwYBBQUHMAGGHGh0dHBzOi8vcmNhLmUtc3ppZ25vLmh1L29jc3AwLQYIKwYBBQUHMAKGIWh0dHA6Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNydDAPBgNVHRMBAf8EBTADAQH/MIIBcwYDVR0gBIIBajCCAWYwggFiBgwrBgEEAYGoGAIBAQEwggFQMCgGCCsGAQUFBwIBFhxodHRwOi8vd3d3LmUtc3ppZ25vLmh1L1NaU1ovMIIBIgYIKwYBBQUHAgIwggEUHoIBEABBACAAdABhAG4A+gBzAO0AdAB2AOEAbgB5ACAA6QByAHQAZQBsAG0AZQB6AOkAcwDpAGgAZQB6ACAA6QBzACAAZQBsAGYAbwBnAGEAZADhAHMA4QBoAG8AegAgAGEAIABTAHoAbwBsAGcA4QBsAHQAYQB0APMAIABTAHoAbwBsAGcA4QBsAHQAYQB0AOEAcwBpACAAUwB6AGEAYgDhAGwAeQB6AGEAdABhACAAcwB6AGUAcgBpAG4AdAAgAGsAZQBsAGwAIABlAGwAagDhAHIAbgBpADoAIABoAHQAdABwADoALwAvAHcAdwB3AC4AZQAtAHMAegBpAGcAbgBvAC4AaAB1AC8AUwBaAFMAWgAvMIHIBgNVHR8EgcAwgb0wgbqggbeggbSGIWh0dHA6Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNybIaBjmxkYXA6Ly9sZGFwLmUtc3ppZ25vLmh1L0NOPU1pY3Jvc2VjJTIwZS1Temlnbm8lMjBSb290JTIwQ0EsT1U9ZS1Temlnbm8lMjBDQSxPPU1pY3Jvc2VjJTIwTHRkLixMPUJ1ZGFwZXN0LEM9SFU/Y2VydGlmaWNhdGVSZXZvY2F0aW9uTGlzdDtiaW5hcnkwDgYDVR0PAQH/BAQDAgEGMIGWBgNVHREEgY4wgYuBEGluZm9AZS1zemlnbm8uaHWkdzB1MSMwIQYDVQQDDBpNaWNyb3NlYyBlLVN6aWduw7MgUm9vdCBDQTEWMBQGA1UECwwNZS1TemlnbsOzIEhTWjEWMBQGA1UEChMNTWljcm9zZWMgS2Z0LjERMA8GA1UEBxMIQnVkYXBlc3QxCzAJBgNVBAYTAkhVMIGsBgNVHSMEgaQwgaGAFMegSXUWYYTbMUuE0vE3QJDvTtz3oXakdDByMQswCQYDVQQGEwJIVTERMA8GA1UEBxMIQnVkYXBlc3QxFjAUBgNVBAoTDU1pY3Jvc2VjIEx0ZC4xFDASBgNVBAsTC2UtU3ppZ25vIENBMSIwIAYDVQQDExlNaWNyb3NlYyBlLVN6aWdubyBSb290IENBghEAzLjnv04pGv2i3GalHCwPETAdBgNVHQ4EFgQUx6BJdRZhhNsxS4TS8TdAkO9O3PcwDQYJKoZIhvcNAQEFBQADggEBANMTnGZjWS7KXHAM/IO8VbH0jgdsZifOwTsgqRy7RlRw7lrMoHfqaEQn6/Ip3Xep1fvj1KcExJW4C+FEaGAHQzAxQmHl7tnlJNUb3+FKG6qfx1/4ehHqE5MAyopYse7tDk2016g2JnzgOsHVV4Lxdbb9iV/a86g4nzUGCM4ilb7N1fy+W955a9x6qWVmvrElWl/tftOsRm1M9DKHtCAE4Gx4sHfRhUZLphK3dehKyVZs15KrnfVJONJPU+NVkBHbmJbGSfI+9J8b4PeI3CVimUTYc78/MPMMNz7UwiiAc7EBt51alhQBS6kRnSlqLtBdgcDPsiBDxwPgN05dCtxZICU=-----END CERTIFICATE-----# Issuer: CN=Certigna O=Dhimyotis# Subject: CN=Certigna O=Dhimyotis# Label: "Certigna"# Serial: 18364802974209362175# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d-----BEGIN CERTIFICATE-----MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4XDTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxnygQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbwzBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEwZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzjAQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8hbV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFncfca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuuHWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6wt0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/QwWyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==-----END CERTIFICATE-----# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center# Label: "Deutsche Telekom Root CA 2"# Serial: 38# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3-----BEGIN CERTIFICATE-----MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENBIDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJERTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEUha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhCQN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1MjwrrFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1SNNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0mocQqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoHtxa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAPBgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756AbrsptJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpaIzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mUCm26OWMohpLzGITY+9HPBVZkVw==-----END CERTIFICATE-----# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc# Label: "Cybertrust Global Root"# Serial: 4835703278459682877484360# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3-----BEGIN CERTIFICATE-----MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYGA1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN57CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozSJ8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2yHLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iPt3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNzFtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAYXSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3JsMB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUAA4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMjWqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUxXOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2oomcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuocA06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jWWL1WMRJOEcgh4LMRkWXbtKaIOM5V-----END CERTIFICATE-----# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority# Label: "ePKI Root Certification Authority"# Serial: 28956088682735189655030529057352760477# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5-----BEGIN CERTIFICATE-----MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBeMQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBLSSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAHSyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAhijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3XDZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJfzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffAsgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uUWH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLSnT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pHdmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJipNiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDECAwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQFMAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLHClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGBuvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6YlPwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkPJXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm65ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUBo2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2zGp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTEW9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+DhNQ+IIX3Sj0rnP0qCglN6oH4EZw=-----END CERTIFICATE-----# Issuer: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi# Subject: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi# Label: "T\xc3\x9c\x42\xC4\xB0TAK UEKAE K\xC3\xB6k Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 - S\xC3\xBCr\xC3\xBCm 3"# Serial: 17# MD5 Fingerprint: ed:41:f5:8c:50:c5:2b:9c:73:e6:ee:6c:eb:c2:a8:26# SHA1 Fingerprint: 1b:4b:39:61:26:27:6b:64:91:a2:68:6d:d7:02:43:21:2d:1f:1d:96# SHA256 Fingerprint: e4:c7:34:30:d7:a5:b5:09:25:df:43:37:0a:0d:21:6e:9a:79:b9:d6:db:83:73:a0:c6:9e:b1:cc:31:c7:c5:2a-----BEGIN CERTIFICATE-----MIIFFzCCA/+gAwIBAgIBETANBgkqhkiG9w0BAQUFADCCASsxCzAJBgNVBAYTAlRSMRgwFgYDVQQHDA9HZWJ6ZSAtIEtvY2FlbGkxRzBFBgNVBAoMPlTDvHJraXllIEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhxZ90xLFybWEgS3VydW11IC0gVMOcQsSwVEFLMUgwRgYDVQQLDD9VbHVzYWwgRWxla3Ryb25payB2ZSBLcmlwdG9sb2ppIEFyYcWfdMSxcm1hIEVuc3RpdMO8c8O8IC0gVUVLQUUxIzAhBgNVBAsMGkthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppMUowSAYDVQQDDEFUw5xCxLBUQUsgVUVLQUUgS8O2ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSAtIFPDvHLDvG0gMzAeFw0wNzA4MjQxMTM3MDdaFw0xNzA4MjExMTM3MDdaMIIBKzELMAkGA1UEBhMCVFIxGDAWBgNVBAcMD0dlYnplIC0gS29jYWVsaTFHMEUGA1UECgw+VMO8cmtpeWUgQmlsaW1zZWwgdmUgVGVrbm9sb2ppayBBcmHFn3TEsXJtYSBLdXJ1bXUgLSBUw5xCxLBUQUsxSDBGBgNVBAsMP1VsdXNhbCBFbGVrdHJvbmlrIHZlIEtyaXB0b2xvamkgQXJhxZ90xLFybWEgRW5zdGl0w7xzw7wgLSBVRUtBRTEjMCEGA1UECwwaS2FtdSBTZXJ0aWZpa2FzeW9uIE1lcmtlemkxSjBIBgNVBAMMQVTDnELEsFRBSyBVRUtBRSBLw7ZrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIC0gU8O8csO8bSAzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAim1L/xCIOsP2fpTo6iBkcK4hgb46ezzb8R1Sf1n68yJMlaCQvEhOEav7t7WNeoMojCZG2E6VQIdhn8WebYGHV2yKO7Rm6sxA/OOqbLLLAdsyv9Lrhc+hDVXDWzhXcLh1xnnRFDDtG1hba+818qEhTsXOfJlfbLm4IpNQp81McGq+agV/E5wrHur+R84EpW+sky58K5+eeROR6Oqeyjh1jmKwlZMq5d/pXpduIF9fhHpEORlAHLpVK/swsoHvhOPc7Jg4OQOFCKlUAwUp8MmPi+oLhmUZEdPpCSPeaJMDyTYcIW7OjGbxmTDY17PDHfiBLqi9ggtm/oLL4eAagsNAgQIDAQABo0IwQDAdBgNVHQ4EFgQUvYiHyY/2pAoLquvF/pEjnatKijIwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAB18+kmPNOm3JpIWmgV050vQbTlswyb2zrgxvMTfvCr4N5EY3ATIZJkrGG2AA1nJrvhY0D7twyOfaTyGOBye79oneNGEN3GKPEs5z35FBtYt2IpNeBLWrcLTy9LQQfMmNkqblWwM7uXRQydmwYj3erMgbOqwaSvHIOgMA8RBBZniP+Rr+KCGgceExh/VS4ESshYhLBOhgLJeDEoTniDYYkCrkOpkSi+sDQESeUWoL4cZaMjihccwsnX5OD+ywJO0a+IDRM5noN+J1q2MdqMTw5RhK2vZbMEHCiIHhWyFJEapvj+LeISCfiQMnf2BN+MlqO02TpUsyZyQ2uypQjyttgI=-----END CERTIFICATE-----# Issuer: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327# Subject: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327# Label: "Buypass Class 2 CA 1"# Serial: 1# MD5 Fingerprint: b8:08:9a:f0:03:cc:1b:0d:c8:6c:0b:76:a1:75:64:23# SHA1 Fingerprint: a0:a1:ab:90:c9:fc:84:7b:3b:12:61:e8:97:7d:5f:d3:22:61:d3:cc# SHA256 Fingerprint: 0f:4e:9c:dd:26:4b:02:55:50:d1:70:80:63:40:21:4f:e9:44:34:c9:b0:2f:69:7e:c7:10:fc:5f:ea:fb:5e:38-----BEGIN CERTIFICATE-----MIIDUzCCAjugAwIBAgIBATANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEdMBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3MgQ2xhc3MgMiBDQSAxMB4XDTA2MTAxMzEwMjUwOVoXDTE2MTAxMzEwMjUwOVowSzELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYDVQQDDBRCdXlwYXNzIENsYXNzIDIgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAIs8B0XY9t/mx8q6jUPFR42wWsE425KEHK8T1A9vNkYgxC7McXA0ojTTNy7Y3Tp3L8DrKehc0rWpkTSHIln+zNvnma+WwajHQN2lFYxuyHyXA8vmIPLXl18xoS830r7uvqmtqEyeIWZDO6i88wmjONVZJMHCR3axiFyCO7srpgTXjAePzdVBHfCuuCkslFJgNJQ72uA40Z0zPhX0kzLFANq1KWYOOngPIVJfAuWSeyXTkh4vFZ2B5J2O6O+JzhRMVB0cgRJNcKi+EAUXfh/RuFdV7c27UsKwHnjCTTZoy1YmwVLBvXb3WNVyfh9EdrsAiR0WnVE1703CVu9r4Iw7DekCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUP42aWYv8e3uco684sDntkHGA1sgwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBBQUAA4IBAQAVGn4TirnoB6NLJzKyQJHyIdFkhb5jatLPgcIV1Xp+DCmsNx4cfHZSldq1fyOhKXdlyTKdqC5Wq2B2zha0jX94wNWZUYN/Xtm+DKhQ7SLHrQVMdvvt7h5HZPb3J31cKA9FxVxiXqaakZG3Uxcu3K1gnZZkOb1naLKuBctN518fV4bVIJwo+28TOPX2EZL2fZleHwzoq0QkKXJAPTZSr4xYkHPB7GEseaHsh7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5wwDX3OaJdZtB7WZ+oRxKaJyOkLY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho-----END CERTIFICATE-----# Issuer: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.# Subject: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.# Label: "EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1"# Serial: 5525761995591021570# MD5 Fingerprint: 2c:20:26:9d:cb:1a:4a:00:85:b5:b7:5a:ae:c2:01:37# SHA1 Fingerprint: 8c:96:ba:eb:dd:2b:07:07:48:ee:30:32:66:a0:f3:98:6e:7c:ae:58# SHA256 Fingerprint: 35:ae:5b:dd:d8:f7:ae:63:5c:ff:ba:56:82:a8:f0:0b:95:f4:84:62:c7:10:8e:e9:a0:e5:29:2b:07:4a:af:b2-----BEGIN CERTIFICATE-----MIIF5zCCA8+gAwIBAgIITK9zQhyOdAIwDQYJKoZIhvcNAQEFBQAwgYAxODA2BgNVBAMML0VCRyBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMTcwNQYDVQQKDC5FQkcgQmlsacWfaW0gVGVrbm9sb2ppbGVyaSB2ZSBIaXptZXRsZXJpIEEuxZ4uMQswCQYDVQQGEwJUUjAeFw0wNjA4MTcwMDIxMDlaFw0xNjA4MTQwMDMxMDlaMIGAMTgwNgYDVQQDDC9FQkcgRWxla3Ryb25payBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTE3MDUGA1UECgwuRUJHIEJpbGnFn2ltIFRla25vbG9qaWxlcmkgdmUgSGl6bWV0bGVyaSBBLsWeLjELMAkGA1UEBhMCVFIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDuoIRh0DpqZhAy2DE4f6en5f2h4fuXd7hxlugTlkaDT7byX3JWbhNgpQGR4lvFzVcfd2NR/y8927k/qqk153nQ9dAktiHq6yOU/im/+4mRDGSaBUorzAzu8T2bgmmkTPiab+ci2hC6X5L8GCcKqKpE+i4stPtGmggDg3KriORqcsnlZR9uKg+ds+g75AxuetpX/dfreYteIAbTdgtsApWjluTLdlHRKJ2hGvxEok3MenaoDT2/F08iiFD9rrbskFBKW5+VQarKD7JK/oCZTqNGFav4c0JqwmZ2sQomFd2TkuzbqV9UIlKRcF0T6kjsbgNs2d1s/OsNA/+mgxKb8amTD8UmTDGyY5lhcucqZJnSuOl14nypqZoaqsNW2xCaPINStnuWt6yHd6i58mcLlEOzrz5z+kI2sSXFCjEmN1ZnuqMLfdb3ic1nobc6HmZP9qBVFCVMLDMNpkGMvQQxahByCp0OLna9XvNRiYuoP1Vzv9s6xiQFlpJIqkuNKgPlV5EQ9GooFW5Hd4RcUXSfGenmHmMWOeMRFeNYGkS9y8RsZteEBt8w9DeiQyJ50hBs37vmExH8nYQKE3vwO9D8owrXieqWfo1IhR5kX9tUoqzVegJ5a9KK8GfaZXINFHDk6Y54jzJ0fFfy1tb0Nokb+Clsi7n2l9GkLqq+CxnCRelwXQIDAJ3Zo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQU587GT/wWZ5b6SqMHwQSny2re2kcwHwYDVR0jBBgwFoAU587GT/wWZ5b6SqMHwQSny2re2kcwDQYJKoZIhvcNAQEFBQADggIBAJuYml2+8ygjdsZs93/mQJ7ANtyVDR2tFcU22NU57/IeIl6zgrRdu0waypIN30ckHrMk2pGI6YNw3ZPX6bqz3xZaPt7gyPvT/Wwp+BVGoGgmzJNSroIBk5DKd8pNSe/iWtkqvTDOTLKBtjDOWU/aWR1qeqRFsIImgYZ29fUQALjuswnoT4cCB64kXPBfrAowzIpAoHMEwfuJJPaaHFy3PApnNgUIMbOv2AFoKuB4j3TeuFGkjGwgPaL7s9QJ/XvCgKqTbCmYIai7FvOpEl90tYeY8pUm3zTvilORiF0alKM/fCL414i6poyWqD1SNGKfAB5UVUJnxk1Gj7sURT0KlhaOEKGXmdXTMIXM3rRyt7yKPBgpaP3ccQfuJDlq+u2lrDgv+R4QDgZxGhBM/nV+/x5XOULK1+EVoVZVWRvRo68R2E7DpSvvkL/A7IITW43WciyTTo9qKd+FPNMN4KIYEsxVL0e3p5sC/kH2iExt2qkBR4NkJ2IQgtYSe14DHzSpyZH+r11thie3I6p1GMog57AP14kOpmciY/SDQSsGS7tY1dHXt7kQY9iJSrSq3RZj9W6+YKH47ejWkE8axsWgKdOnIaj1Wjz3x0miIZpKlVIglnKaZsv30oZDfCK+lvm9AahH3eU7QPl1K5srRmSGjR70j/sHd9DqSaIcjVIUpgqT-----END CERTIFICATE-----# Issuer: O=certSIGN OU=certSIGN ROOT CA# Subject: O=certSIGN OU=certSIGN ROOT CA# Label: "certSIGN ROOT CA"# Serial: 35210227249154# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb-----BEGIN CERTIFICATE-----MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYTAlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQUySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5dRdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQOA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwvJoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0OBBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJLjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecYMnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6IJd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNwi/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN9u6wWk5JRFRYX0KD-----END CERTIFICATE-----# Issuer: CN=CNNIC ROOT O=CNNIC# Subject: CN=CNNIC ROOT O=CNNIC# Label: "CNNIC ROOT"# Serial: 1228079105# MD5 Fingerprint: 21:bc:82:ab:49:c4:13:3b:4b:b2:2b:5c:6b:90:9c:19# SHA1 Fingerprint: 8b:af:4c:9b:1d:f0:2a:92:f7:da:12:8e:b9:1b:ac:f4:98:60:4b:6f# SHA256 Fingerprint: e2:83:93:77:3d:a8:45:a6:79:f2:08:0c:c7:fb:44:a3:b7:a1:c3:79:2c:b7:eb:77:29:fd:cb:6a:8d:99:ae:a7-----BEGIN CERTIFICATE-----MIIDVTCCAj2gAwIBAgIESTMAATANBgkqhkiG9w0BAQUFADAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwHhcNMDcwNDE2MDcwOTE0WhcNMjcwNDE2MDcwOTE0WjAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDTNfc/c3et6FtzF8LRb+1VvG7q6KR5smzDo+/hn7E7SIX1mlwhIhAsxYLO2uOabjfhhyzcuQxauohV3/2q2x8x6gHx3zkBwRP9SFIhxFXf2tizVHa6dLG3fdfA6PZZxU3Iva0fFNrfWEQlMhkqx35+jq44sDB7R3IJMfAw28Mbdim7aXZOV/kbZKKTVrdvmW7bCgScEeOAH8tjlBAKqeFkgjH5jCftppkA9nCTGPihNIaj3XrCGHn2emU1z5DrvTOTn1OrczvmmzQgLx3vqR1jGqCA2wMv+SYahtKNu6m+UjqHZ0gNv7Sg2Ca+I19zN38m5pIEo3/PIKe38zrKy5nLAgMBAAGjczBxMBEGCWCGSAGG+EIBAQQEAwIABzAfBgNVHSMEGDAWgBRl8jGtKvf33VKWCscCwQ7vptU7ETAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIB/jAdBgNVHQ4EFgQUZfIxrSr3991SlgrHAsEO76bVOxEwDQYJKoZIhvcNAQEFBQADggEBAEs17szkrr/Dbq2flTtLP1se31cpolnKOOK5Gv+e5m4y3R6u6jW39ZORTtpC4cMXYFDy0VwmuYK36m3knITnA3kXr5g9lNvHugDnuL8BV8F3RTIMO/G0HAiw/VGgod2aHRM2mm23xzy54cXZF/qD1T0VoDy7HgviyJA/qIYM/PmLXoXLT1tLYhFHxUV8BS9BsZ4QaRuZluBVeftOhpm4lNqGOGqTo+fLbuXf6iFViZx9fX+Y9QCJ7uOEwFyWtcVG6kbghVW2G8kS1sHNzYDzAgE8yGnLRUhj2JTQ7IUOO04RZfSCjKY9ri4ilAnIXOo8gV0WKgOXFlUJ24pBgp5mmxE=-----END CERTIFICATE-----# Issuer: O=Japanese Government OU=ApplicationCA# Subject: O=Japanese Government OU=ApplicationCA# Label: "ApplicationCA - Japanese Government"# Serial: 49# MD5 Fingerprint: 7e:23:4e:5b:a7:a5:b4:25:e9:00:07:74:11:62:ae:d6# SHA1 Fingerprint: 7f:8a:b0:cf:d0:51:87:6a:66:f3:36:0f:47:c8:8d:8c:d3:35:fc:74# SHA256 Fingerprint: 2d:47:43:7d:e1:79:51:21:5a:12:f3:c5:8e:51:c7:29:a5:80:26:ef:1f:cc:0a:5f:b3:d9:dc:01:2f:60:0d:19-----BEGIN CERTIFICATE-----MIIDoDCCAoigAwIBAgIBMTANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJKUDEcMBoGA1UEChMTSmFwYW5lc2UgR292ZXJubWVudDEWMBQGA1UECxMNQXBwbGljYXRpb25DQTAeFw0wNzEyMTIxNTAwMDBaFw0xNzEyMTIxNTAwMDBaMEMxCzAJBgNVBAYTAkpQMRwwGgYDVQQKExNKYXBhbmVzZSBHb3Zlcm5tZW50MRYwFAYDVQQLEw1BcHBsaWNhdGlvbkNBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp23gdE6Hj6UG3mii24aZS2QNcfAKBZuOquHMLtJqO8F6tJdhjYq+xpqcBrSGUeQ3DnR4fl+Kf5Sk10cI/VBaVuRorChzoHvpfxiSQE8tnfWuREhzNgaeZCw7NCPbXCbkcXmP1G55IrmTwcrNwVbtiGrXoDkhBFcsovW8R0FPXjQilbUfKW1eSvNNcr5BViCH/OlQR9cwFO5cjFW6WY2H/CPek9AEjP3vbb3QesmlOmpyM8ZKDQUXKi17safY1vC+9D/qDihtQWEjdnjDuGWk81quzMKq2edY3rZ+nYVunyoKb58DKTCXKB28t89UKU5RMfkntigm/qJj5kEW8DOYRwIDAQABo4GeMIGbMB0GA1UdDgQWBBRUWssmP3HMlEYNllPqa0jQk/5CdTAOBgNVHQ8BAf8EBAMCAQYwWQYDVR0RBFIwUKROMEwxCzAJBgNVBAYTAkpQMRgwFgYDVQQKDA/ml6XmnKzlm73mlL/lupwxIzAhBgNVBAsMGuOCouODl+ODquOCseODvOOCt+ODp+ODs0NBMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBADlqRHZ3ODrso2dGD/mLBqj7apAxzn7s2tGJfHrrLgy9mTLnsCTWw//1sogJhyzjVOGjprIIC8CFqMjSnHH2HZ9g/DgzE+Ge3Atf2hZQKXsvcJEPmbo0NI2VdMV+eKlmXb3KIXdCEKxmJj3ekav9FfBv7WxfEPjzFvYDio+nEhEMy/0/ecGc/WLuo89UDNErXxc+4z6/wCs+CZv+iKZ+tJIX/COUgb1up8WMwusRRdv4QcmWdupwX3kSa+SjB1oF7ydJzyGfikwJcGapJsErEU4z0g781mzSDjJkaP+tBXhfAx2o45CsJOAPQKdLrosot4LKGAfmt1t06SAZf7IbiVQ=-----END CERTIFICATE-----# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only# Label: "GeoTrust Primary Certification Authority - G3"# Serial: 28809105769928564313984085209975885599# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4-----BEGIN CERTIFICATE-----MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUmhsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/WJmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exALDmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZChuOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IBAQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTBzU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQNkv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGDAWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUHSJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2Gspki4cErx5z481+oghLrGREt-----END CERTIFICATE-----# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only# Label: "thawte Primary Root CA - G2"# Serial: 71758320672825410020661621085256472406# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57-----BEGIN CERTIFICATE-----MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMpIDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAwMDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6KDPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41oxXZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==-----END CERTIFICATE-----# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only# Label: "thawte Primary Root CA - G3"# Serial: 127614157056681299805556476275995414779# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c-----BEGIN CERTIFICATE-----MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCBrjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIwMDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3RlLCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9uMTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEczMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndmgcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lfb1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS99irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2SzhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUkOQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIWoCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1but8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7cKUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fMm7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZuMdRAGmI0Nj81Aa6sY6A=-----END CERTIFICATE-----# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only# Label: "GeoTrust Primary Certification Authority - G2"# Serial: 80682863203381065782177908751794619243# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66-----BEGIN CERTIFICATE-----MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChjKSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAwNyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcLSo17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLaltJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoGCCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGTqQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBuczrD6ogRLQy7rQkgu2npaqBA+K-----END CERTIFICATE-----# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only# Label: "VeriSign Universal Root Certification Authority"# Serial: 85209574734084581917763752644031726877# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c-----BEGIN CERTIFICATE-----MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCBvTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9WZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNhbCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWHH26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+HLL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPTrJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFswWTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgsexkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1UdDgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHRlRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO37M2CYfE45k+XmCpajQ==-----END CERTIFICATE-----# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"# Serial: 63143484348153506665311985501458640051# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79-----BEGIN CERTIFICATE-----MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHmGUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3vefLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMWkf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVgaFRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==-----END CERTIFICATE-----# Issuer: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)# Subject: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)# Label: "NetLock Arany (Class Gold) Főtanúsítvány"# Serial: 80544274841616# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98-----BEGIN CERTIFICATE-----MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNlcnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWRdGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCBpzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRMb2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNzIEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrTlF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrzAZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRGILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAGAQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2MU9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRhbvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzCbLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2FuLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=-----END CERTIFICATE-----# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden# Label: "Staat der Nederlanden Root CA - G2"# Serial: 10000012# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f-----BEGIN CERTIFICATE-----MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJOTDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFhdCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oXDTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRlciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJvb3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8SpuOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPUZ5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvEpMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/MUGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTNGmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEKeN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDovL3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqGSIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLySCZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJKgnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxLvJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkmbEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvkN1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FCY7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Zywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==-----END CERTIFICATE-----# Issuer: CN=CA Disig O=Disig a.s.# Subject: CN=CA Disig O=Disig a.s.# Label: "CA Disig"# Serial: 1# MD5 Fingerprint: 3f:45:96:39:e2:50:87:f7:bb:fe:98:0c:3c:20:98:e6# SHA1 Fingerprint: 2a:c8:d5:8b:57:ce:bf:2f:49:af:f2:fc:76:8f:51:14:62:90:7a:41# SHA256 Fingerprint: 92:bf:51:19:ab:ec:ca:d0:b1:33:2d:c4:e1:d0:5f:ba:75:b5:67:90:44:ee:0c:a2:6e:93:1f:74:4f:2f:33:cf-----BEGIN CERTIFICATE-----MIIEDzCCAvegAwIBAgIBATANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQGEwJTSzETMBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwHhcNMDYwMzIyMDEzOTM0WhcNMTYwMzIyMDEzOTM0WjBKMQswCQYDVQQGEwJTSzETMBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCS9jHBfYj9mQGp2HvycXXxMcbzdWb6UShGhJd4NLxs/LxFWYgmGErENx+hSkS943EE9UQX4j/8SFhvXJ56CbpRNyIjZkMhsDxkovhqFQ4/61HhVKndBpnXmjxUizkDPw/Fzsbrg3ICqB9x8y34dQjbYkzo+s7552oftms1grrijxaSfQUMbEYDXcDtab86wYqg6I7ZuUUohwjstMoVvoLdtUSLLa2GDGhibYVW8qwUYzrG0ZmsNHhWS8+2rT+MitcE5eN4TPWGqvWP+j1scaMtymfraHtuM6kMgiioTGohQBUgDCZbg8KpFhXAJIJdKxatymP2dACw30PEEGBWZ2NFAgMBAAGjgf8wgfwwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUjbJJaJ1yCCW5wCf1UJNWSEZx+Y8wDgYDVR0PAQH/BAQDAgEGMDYGA1UdEQQvMC2BE2Nhb3BlcmF0b3JAZGlzaWcuc2uGFmh0dHA6Ly93d3cuZGlzaWcuc2svY2EwZgYDVR0fBF8wXTAtoCugKYYnaHR0cDovL3d3dy5kaXNpZy5zay9jYS9jcmwvY2FfZGlzaWcuY3JsMCygKqAohiZodHRwOi8vY2EuZGlzaWcuc2svY2EvY3JsL2NhX2Rpc2lnLmNybDAaBgNVHSAEEzARMA8GDSuBHpGT5goAAAABAQEwDQYJKoZIhvcNAQEFBQADggEBAF00dGFMrzvY/59tWDYcPQuBDRIrRhCA/ec8J9B6yKm2fnQwM6M6int0wHl5QpNt/7EpFIKrIYwvF/k/Ji/1WcbvgAa3mkkp7M5+cTxqEEHA9tOasnxakZzArFvITV734VP/Q3f8nktnbNfzg9Gg4H8l37iYC5oyOGwwoPP/CBUz91BKez6jPiCp3C9WgArtQVCwyfTssuMmRAAOb54GvCKWU3BlxFAKRmukLyeBEicTXxChds6KezfqwzlhA5WYOudsiCUI/HloDYd9Yvi0X/vF2Ey9WLw/Q1vUHgFNPGO+I++MzVpQuGhU+QqZMxEA4Z7CRneC9VkGjCFMhwnN5ag=-----END CERTIFICATE-----# Issuer: CN=Juur-SK O=AS Sertifitseerimiskeskus# Subject: CN=Juur-SK O=AS Sertifitseerimiskeskus# Label: "Juur-SK"# Serial: 999181308# MD5 Fingerprint: aa:8e:5d:d9:f8:db:0a:58:b7:8d:26:87:6c:82:35:55# SHA1 Fingerprint: 40:9d:4b:d9:17:b5:5c:27:b6:9b:64:cb:98:22:44:0d:cd:09:b8:89# SHA256 Fingerprint: ec:c3:e9:c3:40:75:03:be:e0:91:aa:95:2f:41:34:8f:f8:8b:aa:86:3b:22:64:be:fa:c8:07:90:15:74:e9:39-----BEGIN CERTIFICATE-----MIIE5jCCA86gAwIBAgIEO45L/DANBgkqhkiG9w0BAQUFADBdMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKExlBUyBTZXJ0aWZpdHNlZXJpbWlza2Vza3VzMRAwDgYDVQQDEwdKdXVyLVNLMB4XDTAxMDgzMDE0MjMwMVoXDTE2MDgyNjE0MjMwMVowXTEYMBYGCSqGSIb3DQEJARYJcGtpQHNrLmVlMQswCQYDVQQGEwJFRTEiMCAGA1UEChMZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1czEQMA4GA1UEAxMHSnV1ci1TSzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAIFxNj4zB9bjMI0TfncyRsvPGbJgMUaXhvSYRqTCZUXP00B841oiqBB4M8yIsdOBSvZiF3tfTQou0M+LI+5PAk676w7KvRhj6IAcjeEcjT3g/1tf6mTll+g/mX8MCgkzABpTpyHhOEvWgxutr2TC+Rx6jGZITWYfGAriPrsfB2WThbkasLnE+w0R9vXW+RvHLCu3GFH+4Hv2qEivbDtPL+/40UceJlfwUR0zlv/vWT3aTdEVNMfqPxZIe5EcgEMPPbgFPtGzlc3Yyg/CQ2fbt5PgIoIuvvVoKIO5wTtpeyDaTpxt4brNj3pssAki14sL2xzVWiZbDcDq5WDQn/413z8CAwEAAaOCAawwggGoMA8GA1UdEwEB/wQFMAMBAf8wggEWBgNVHSAEggENMIIBCTCCAQUGCisGAQQBzh8BAQEwgfYwgdAGCCsGAQUFBwICMIHDHoHAAFMAZQBlACAAcwBlAHIAdABpAGYAaQBrAGEAYQB0ACAAbwBuACAAdgDkAGwAagBhAHMAdABhAHQAdQBkACAAQQBTAC0AaQBzACAAUwBlAHIAdABpAGYAaQB0AHMAZQBlAHIAaQBtAGkAcwBrAGUAcwBrAHUAcwAgAGEAbABhAG0ALQBTAEsAIABzAGUAcgB0AGkAZgBpAGsAYQBhAHQAaQBkAGUAIABrAGkAbgBuAGkAdABhAG0AaQBzAGUAawBzMCEGCCsGAQUFBwIBFhVodHRwOi8vd3d3LnNrLmVlL2Nwcy8wKwYDVR0fBCQwIjAgoB6gHIYaaHR0cDovL3d3dy5zay5lZS9qdXVyL2NybC8wHQYDVR0OBBYEFASqekej5ImvGs8KQKcYP2/v6X2+MB8GA1UdIwQYMBaAFASqekej5ImvGs8KQKcYP2/v6X2+MA4GA1UdDwEB/wQEAwIB5jANBgkqhkiG9w0BAQUFAAOCAQEAe8EYlFOiCfP+JmeaUOTDBS8rNXiRTHyoERF5TElZrMj3hWVcRrs7EKACr81Ptcw2Kuxd/u+gkcm2k298gFTsxwhwDY77guwqYHhpNjbRxZyLabVAyJRld/JXIWY7zoVAtjNjGr95HvxcHdMdkxuLDF2FvZkwMhgJkVLpfKG6/2SSmuz+Ne6ML678IIbsSt4beDI3poHSna9aEhbKmVv8b20OxaAehsmR0FyYgl9jDIpaq9iVpszLita/ZEuOyoqysOkhMp6qqIWYNIE5ITuoOlIyPfZrN4YGWhWY3PARZv40ILcD9EEQfTmEeZZyY7aWAuVrua0ZTbvGRNs2yyqcjg==-----END CERTIFICATE-----# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post# Label: "Hongkong Post Root CA 1"# Serial: 1000# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2-----BEGIN CERTIFICATE-----MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3QgUm9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkGA1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQjVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEnPzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjhZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/hq5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgEDMA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsCmEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI37piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clBoiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJsEhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpOfMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbiAmvZWg==-----END CERTIFICATE-----# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.# Label: "SecureSign RootCA11"# Serial: 1# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12-----BEGIN CERTIFICATE-----MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDErMCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoGA1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZpY2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RDQTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJszi1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOVMdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsCh8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xmKbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQX5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWrQbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmNQSdJQO7e5iNEOdyhIta6A/I=-----END CERTIFICATE-----# Issuer: CN=ACEDICOM Root O=EDICOM OU=PKI# Subject: CN=ACEDICOM Root O=EDICOM OU=PKI# Label: "ACEDICOM Root"# Serial: 7029493972724711941# MD5 Fingerprint: 42:81:a0:e2:1c:e3:55:10:de:55:89:42:65:96:22:e6# SHA1 Fingerprint: e0:b4:32:2e:b2:f6:a5:68:b6:54:53:84:48:18:4a:50:36:87:43:84# SHA256 Fingerprint: 03:95:0f:b4:9a:53:1f:3e:19:91:94:23:98:df:a9:e0:ea:32:d7:ba:1c:dd:9b:c8:5d:b5:7e:d9:40:0b:43:4a-----BEGIN CERTIFICATE-----MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00xCzAJBgNVBAYTAkVTMB4XDTA4MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEWMBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHkWLn709gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7XBZXehuDYAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5PGrjm6gSSrj0RuVFCPYewMYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAKt0SdE3QrwqXrIhWYENiLxQSfHY9g5QYbm8+5eaA9oiM/Qj9r+hwDezCNzmzAv+YbX79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbkHQl/Sog4P75n/TSW9R28MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTTxKJxqvQUfecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI2Sf23EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyHK9caUPgn6C9D4zq92Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEaeZAwUswdbxcJzbPEHXEUkFDWug/FqTYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz4SsrSbbXc6GqlPUB53NlTKxQMA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU9QHnc2VMrFAwRAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNvbS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWImfQwng4/F9tqgaHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3gvoFNTPhNahXwOf9jU8/kzJPeGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKeI6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1PwkzQSulgUV1qzOMPPKC8W64iLgpq0i5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1ThCojz2GuHURwCRiipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oIKiMnMCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZo5NjEFIqnxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6zqylfDJKZ0DcMDQj3dcEI2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacNGHk0vFQYXlPKNFHtRQrmjseCNj6nOGOpMCwXEGCSn1WHElkQwg9naRHMTh5+Spqtr0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdKZ05phkOTOPu220+DkdRgfks+KzgHVZhepA==-----END CERTIFICATE-----# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.# Label: "Microsec e-Szigno Root CA 2009"# Serial: 14014712776195784473# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78-----BEGIN CERTIFICATE-----MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0yOTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvPkd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tccbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4UfIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbCxkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqGSIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8hmLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGkddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5tHMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW-----END CERTIFICATE-----# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3# Label: "GlobalSign Root CA - R3"# Serial: 4835703278459759426209954# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b-----BEGIN CERTIFICATE-----MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsTgHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmmKPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zdQQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZXriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+oLkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZURUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMpjjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQXmcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecsMx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpHWD9f-----END CERTIFICATE-----# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"# Serial: 6047274297262753887# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef-----BEGIN CERTIFICATE-----MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UEBhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEyMzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUgQ2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQMcas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefGL9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15iNA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/hX68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2bm8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCyZ/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirjaEbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/TKI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVhOSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYDVR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZpcm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBvACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBlAGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5kSeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVMZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0gStRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/iczQ0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQBjLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V-----END CERTIFICATE-----# Issuer: CN=Izenpe.com O=IZENPE S.A.# Subject: CN=Izenpe.com O=IZENPE S.A.# Label: "Izenpe.com"# Serial: 917563065490389241595536686991402621# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f-----BEGIN CERTIFICATE-----MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5jb20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVqscIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaOxdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6HLmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFXuaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQDyCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60QrLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyNBjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8Lhij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIBQFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2luZm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYgQTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBBBgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAxMCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUAA4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWblaQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwoJNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lwLDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCTVyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGkLhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJbUjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGlsQyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==-----END CERTIFICATE-----# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.# Label: "Chambers of Commerce Root - 2008"# Serial: 11806822484801597146# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0-----BEGIN CERTIFICATE-----MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJzIG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDczMTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW928sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJqVKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072QDuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfLZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05aSd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18TlUlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhjya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAxhduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNVHQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwWPJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaHFoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MDxvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QGtjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTqjktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1DefhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRgOGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZd0jQ-----END CERTIFICATE-----# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.# Label: "Global Chambersign Root - 2008"# Serial: 14541511773111788494# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca-----BEGIN CERTIFICATE-----MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzExMjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xedKYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyGHoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3VyJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3ebeksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9JhwZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsogzCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQWBBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDprru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJpZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRkcmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJtYSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiCCQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZIhvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZUohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXozX1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/xfxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVza2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yydYhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMdSqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9OAP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rsoM0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4gev8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B-----END CERTIFICATE-----# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.# Label: "Go Daddy Root Certificate Authority - G2"# Serial: 0# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da-----BEGIN CERTIFICATE-----MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMxEDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoTEUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UEAxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKDE6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7RnwyDfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVhGkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGRtDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmXWWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTrgIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPOLPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI4uJEvlz36hz1-----END CERTIFICATE-----# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.# Label: "Starfield Root Certificate Authority - G2"# Serial: 0# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5-----BEGIN CERTIFICATE-----MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMxEDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoTHFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVsZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMgnLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/NHwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dNdloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0GCSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjUsHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu34jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/KpL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0-----END CERTIFICATE-----# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.# Label: "Starfield Services Root Certificate Authority - G2"# Serial: 0# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5-----BEGIN CERTIFICATE-----MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMxEDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoTHFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVsZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2VydmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20pOsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm28xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1KTs9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufehRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+qAdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMIbw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXBve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1zqwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkdiEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCNsSi6-----END CERTIFICATE-----# Issuer: CN=AffirmTrust Commercial O=AffirmTrust# Subject: CN=AffirmTrust Commercial O=AffirmTrust# Label: "AffirmTrust Commercial"# Serial: 8608355977964138876# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7-----BEGIN CERTIFICATE-----MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKPHx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yrba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPALMeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqrVwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYGXUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNjvbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivtZ8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9gN53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YCnlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=-----END CERTIFICATE-----# Issuer: CN=AffirmTrust Networking O=AffirmTrust# Subject: CN=AffirmTrust Networking O=AffirmTrust# Label: "AffirmTrust Networking"# Serial: 8957382827206547757# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b-----BEGIN CERTIFICATE-----MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3yYJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbuakCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRLQESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndGyH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6iQLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfOtDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzuQY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZLgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4uolu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=-----END CERTIFICATE-----# Issuer: CN=AffirmTrust Premium O=AffirmTrust# Subject: CN=AffirmTrust Premium O=AffirmTrust# Label: "AffirmTrust Premium"# Serial: 7893706540734352110# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a-----BEGIN CERTIFICATE-----MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVzdCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLfqV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQJG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrSs8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d770O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauGV+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+SqHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4IaC1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TXOwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYEFJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMgNt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQMKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQu4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMFu+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpHYoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaORtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6eKeC2uAloGRwYQw==-----END CERTIFICATE-----# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust# Label: "AffirmTrust Premium ECC"# Serial: 8401224907861490260# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23-----BEGIN CERTIFICATE-----MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQcmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJBgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJtVHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0GA1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/VsaobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6Iflc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==-----END CERTIFICATE-----# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority# Label: "Certum Trusted Network CA"# Serial: 279744# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e-----BEGIN CERTIFICATE-----MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBMMSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBUcnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIwIAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rHUV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LMTXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVUBBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brMkUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8xAcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15ysHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfLI9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qYVoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=-----END CERTIFICATE-----# Issuer: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903# Subject: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903# Label: "Certinomis - Autorité Racine"# Serial: 1# MD5 Fingerprint: 7f:30:78:8c:03:e3:ca:c9:0a:e2:c9:ea:1e:aa:55:1a# SHA1 Fingerprint: 2e:14:da:ec:28:f0:fa:1e:8e:38:9a:4e:ab:eb:26:c0:0a:d3:83:c3# SHA256 Fingerprint: fc:bf:e2:88:62:06:f7:2b:27:59:3c:8b:07:02:97:e1:2d:76:9e:d1:0e:d7:93:07:05:a8:09:8e:ff:c1:4d:17-----BEGIN CERTIFICATE-----MIIFnDCCA4SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJGUjETMBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxJjAkBgNVBAMMHUNlcnRpbm9taXMgLSBBdXRvcml0w6kgUmFjaW5lMB4XDTA4MDkxNzA4Mjg1OVoXDTI4MDkxNzA4Mjg1OVowYzELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMSYwJAYDVQQDDB1DZXJ0aW5vbWlzIC0gQXV0b3JpdMOpIFJhY2luZTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAJ2Fn4bT46/HsmtuM+Cet0I0VZ35gb5j2CN2DpdUzZlMGvE5x4jYF1AMnmHawE5V3udauHpOd4cN5bjr+p5eex7Ezyh0x5P1FMYiKAT5kcOrJ3NqDi5N8y4oH3DfVS9O7cdxbwlyLu3VMpfQ8Vh30WC8Tl7bmoT2R2FFK/ZQpn9qcSdIhDWerP5pqZ56XjUl+rSnSTV3lqc2W+HN3yNw2F1MpQiD8aYkOBOo7C+ooWfHpi2GR+6K/OybDnT0K0kCe5B1jPyZOQE51kqJ5Z52qz6WKDgmi92NjMD2AR5vpTESOH2VwnHu7XSu5DaiQ3XV8QCb4uTXzEIDS3h65X27uK4uIJPT5GHfceF2Z5c/tt9qc1pkIuVC28+BA5PY9OMQ4HL2AHCs8MF6DwV/zzRpRbWT5BnbUhYjBYkOjUjkJW+zeL9i9Qf6lSTClrLooyPCXQP8w9PlfMl1I9f09bze5N/NgL+RiH2nE7Q5uiy6vdFrzPOlKO1Enn1So2+WLhl+HPNbxxaOu2B9d2ZHVIIAEWBsMsGoOBvrbpgT1u449fCfDu/+MYHB0iSVL1N6aaLwD4ZFjliCK0wi1F6g530mJ0jfJUaNSih8hp75mxpZuWW/Bd22Ql095gBIgl4g9xGC3srYn+Y3RyYe63j3YcNBZFgCQfna4NH4+ej9Uji29YnfAgMBAAGjWzBZMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBQNjLZh2kS40RR9w759XkjwzspqsDAXBgNVHSAEEDAOMAwGCiqBegFWAgIAAQEwDQYJKoZIhvcNAQEFBQADggIBACQ+YAZ+He86PtvqrxyaLAEL9MW12Ukx9F1BjYkMTv9sov3/4gbIOZ/xWqndIlgVqIrTseYyCYIDbNc/CMf4uboAbbnW/FIyXaR/pDGUu7ZMOH8oMDX/nyNTt7buFHAAQCvaR6s0fl6nVjBhK4tDrP22iCj1a7Y+YEq6QpA0Z43q619FVDsXrIvkxmUP7tCMXWY5zjKn2BCXwH40nJ+U8/aGH88bc62UeYdocMMzpXDn2NU4lG9jeeu/Cg4I58UvD0KgKxRA/yHgBcUn4YQRE7rWhh1BCxMjidPJC+iKunqjo3M3NYB9Ergzd0A4wPpeMNLytqOx1qKVl4GbUu1pTP+A5FPbVFsDbVRfsbjvJL1vnxHDx2TCDyhihWZeGnuyt++uNckZM6i4J9szVb9o4XVIRFb7zdNIu0eJOqxp9YDG5ERQL1TEqkPFMTFYvZbF6nVsmnWxTfj3l/+WFvKXTej28xH5On2KOG4Ey+HTRRWqpdEdnV1j6CTmNhTih60bWfVEm/vXd3wfAXBioSAaosUaKPQhA+4u2cGA6rnZgtZbdsLLO7XSAPCjDuGtbkD326C00EauFddEwk01+dIL8hf2rGbVJLJP0RyZwG71fet0BLj5TXcJ17TPBzAJ8bgAVtkXFhYKK4bfjwEZGuW7gmP/vgt2Fl43N+bYdJeimUV5-----END CERTIFICATE-----# Issuer: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA# Subject: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA# Label: "Root CA Generalitat Valenciana"# Serial: 994436456# MD5 Fingerprint: 2c:8c:17:5e:b1:54:ab:93:17:b5:36:5a:db:d1:c6:f2# SHA1 Fingerprint: a0:73:e5:c5:bd:43:61:0d:86:4c:21:13:0a:85:58:57:cc:9c:ea:46# SHA256 Fingerprint: 8c:4e:df:d0:43:48:f3:22:96:9e:7e:29:a4:cd:4d:ca:00:46:55:06:1c:16:e1:b0:76:42:2e:f3:42:ad:63:0e-----BEGIN CERTIFICATE-----MIIGizCCBXOgAwIBAgIEO0XlaDANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJFUzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwHhcNMDEwNzA2MTYyMjQ3WhcNMjEwNzAxMTUyMjQ3WjBoMQswCQYDVQQGEwJFUzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGKqtXETcvIorKA3Qdyu0togu8M1JAJke+WmmmO3I2F0zo37i7L3bhQEZ0ZQKQUgi0/6iMweDHiVYQOTPvaLRfX9ptI6GJXiKjSgbwJ/BXufjpTjJ3Cj9BZPPrZe52/lSqfR0grvPXdMIKX/UIKFIIzFVd0g/bmoGlu6GzwZTNVOAydTGRGmKy3nXiz0+J2ZGQD0EbtFpKd71ng+CT516nDOeB0/RSrFOyA8dEJvt55cs0YFAQexvba9dHq198aMpunUEDEO5rmXteJajCq+TA81yc477OMUxkHl6AovWDfgzWyoxVjr7gvkkHD6MkQXpYHYTqWBLI4bft75PelAgxAgMBAAGjggM7MIIDNzAyBggrBgEFBQcBAQQmMCQwIgYIKwYBBQUHMAGGFmh0dHA6Ly9vY3NwLnBraS5ndmEuZXMwEgYDVR0TAQH/BAgwBgEB/wIBAjCCAjQGA1UdIASCAiswggInMIICIwYKKwYBBAG/VQIBADCCAhMwggHoBggrBgEFBQcCAjCCAdoeggHWAEEAdQB0AG8AcgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAFIAYQDtAHoAIABkAGUAIABsAGEAIABHAGUAbgBlAHIAYQBsAGkAdABhAHQAIABWAGEAbABlAG4AYwBpAGEAbgBhAC4ADQAKAEwAYQAgAEQAZQBjAGwAYQByAGEAYwBpAPMAbgAgAGQAZQAgAFAAcgDhAGMAdABpAGMAYQBzACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAHEAdQBlACAAcgBpAGcAZQAgAGUAbAAgAGYAdQBuAGMAaQBvAG4AYQBtAGkAZQBuAHQAbwAgAGQAZQAgAGwAYQAgAHAAcgBlAHMAZQBuAHQAZQAgAEEAdQB0AG8AcgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAHMAZQAgAGUAbgBjAHUAZQBuAHQAcgBhACAAZQBuACAAbABhACAAZABpAHIAZQBjAGMAaQDzAG4AIAB3AGUAYgAgAGgAdAB0AHAAOgAvAC8AdwB3AHcALgBwAGsAaQAuAGcAdgBhAC4AZQBzAC8AYwBwAHMwJQYIKwYBBQUHAgEWGWh0dHA6Ly93d3cucGtpLmd2YS5lcy9jcHMwHQYDVR0OBBYEFHs100DSHHgZZu90ECjcPk+yeAT8MIGVBgNVHSMEgY0wgYqAFHs100DSHHgZZu90ECjcPk+yeAT8oWykajBoMQswCQYDVQQGEwJFUzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmGCBDtF5WgwDQYJKoZIhvcNAQEFBQADggEBACRhTvW1yEICKrNcda3FbcrnlD+laJWIwVTAEGmiEi8YPyVQqHxK6sYJ2fR1xkDar1CdPaUWu20xxsdzCkj+IHLtb8zog2EWRpABlUt9jppSCS/2bxzkoXHPjCpaF3ODR00PNvsETUlR4hTJZGH71BTg9J63NI8KJr2XXPR5OkowGcytT6CYirQxlyric21+eLj4iIlPsSKRZEv1UN4D2+XFducTZnV+ZfsBn5OHiJ35Rld8TWCvmHMTI6QgkYH60GFmuH3Rr9ZvHmw96RH9qfmCIoaZM3Fa6hlXPZHNqcCjbgcTpsnt+GijnsNacgmHKNHEc8RzGF9QdRYxn7fofMM=-----END CERTIFICATE-----# Issuer: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03# Subject: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03# Label: "A-Trust-nQual-03"# Serial: 93214# MD5 Fingerprint: 49:63:ae:27:f4:d5:95:3d:d8:db:24:86:b8:9c:07:53# SHA1 Fingerprint: d3:c0:63:f2:19:ed:07:3e:34:ad:5d:75:0b:32:76:29:ff:d5:9a:f2# SHA256 Fingerprint: 79:3c:bf:45:59:b9:fd:e3:8a:b2:2d:f1:68:69:f6:98:81:ae:14:c4:b0:13:9a:c7:88:a7:8a:1a:fc:ca:02:fb-----BEGIN CERTIFICATE-----MIIDzzCCAregAwIBAgIDAWweMA0GCSqGSIb3DQEBBQUAMIGNMQswCQYDVQQGEwJBVDFIMEYGA1UECgw/QS1UcnVzdCBHZXMuIGYuIFNpY2hlcmhlaXRzc3lzdGVtZSBpbSBlbGVrdHIuIERhdGVudmVya2VociBHbWJIMRkwFwYDVQQLDBBBLVRydXN0LW5RdWFsLTAzMRkwFwYDVQQDDBBBLVRydXN0LW5RdWFsLTAzMB4XDTA1MDgxNzIyMDAwMFoXDTE1MDgxNzIyMDAwMFowgY0xCzAJBgNVBAYTAkFUMUgwRgYDVQQKDD9BLVRydXN0IEdlcy4gZi4gU2ljaGVyaGVpdHNzeXN0ZW1lIGltIGVsZWt0ci4gRGF0ZW52ZXJrZWhyIEdtYkgxGTAXBgNVBAsMEEEtVHJ1c3QtblF1YWwtMDMxGTAXBgNVBAMMEEEtVHJ1c3QtblF1YWwtMDMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtPWFuA/OQO8BBC4SAzewqo51ru27CQoT3URThoKgtUaNR8t4j8DRE/5TrzAUjlUC5B3ilJfYKvUWG6Nm9wASOhURh73+nyfrBJcyFLGM/BWBzSQXgYHiVEEvc+RFZznF/QJuKqiTfC0Li21a8StKlDJu3Qz7dg9MmEALP6iPESU7l0+m0iKsMrmKS1GWH2WrX9IWf5DMiJaXlyDO6w8dB3F/GaswADm0yqLaHNgBid5seHzTLkDx4iHQF63n1k3Flyp3HaxgtPVxO59X4PzF9j4fsCiIvI+n+u33J4PTs63zEsMMtYrWacdaxaujs2e3Vcuy+VwHOBVWf3tFgiBCzAgMBAAGjNjA0MA8GA1UdEwEB/wQFMAMBAf8wEQYDVR0OBAoECERqlWdVeRFPMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAVdRU0VlIXLOThaq/Yy/kgM40ozRiPvbY7meIMQQDbwvUB/tOdQ/TLtPAF8fGKOwGDREkDg6lXb+MshOWcdzUzg4NCmgybLlBMRmrsQd7TZjTXLDR8KdCoLXEjq/+8T/0709GAHbrAvv5ndJAlseIOrifEXnzgGWovR/TeIGgUUw3tKZdJXDRZslo+S4RFGjxVJgIrCaSD96JntT6s3kr0qN51OyLrIdTaEJMUVF0HhsnLuP1Hyl0Te2v9+GSmYHovjrHF1D2t8b8m7CKa9aIA5GPBnc6hQLdmNVDeD/GMBWsm2vLV7eJUYs66MmEDNuxUCAKGkq6ahq97BvIxYSazQ==-----END CERTIFICATE-----# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA# Label: "TWCA Root Certification Authority"# Serial: 1# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44-----BEGIN CERTIFICATE-----MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMzWhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFEAcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HHK3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeXRfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/zrX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkqhkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeCMErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdlsXebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62Dlhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvnaspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZYiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==-----END CERTIFICATE-----# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2# Label: "Security Communication RootCA2"# Serial: 0# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6-----BEGIN CERTIFICATE-----MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMeU2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoXDTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmljYXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAVOVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGrzbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVMVAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQhNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWOojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSwawNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5csOPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpFcoJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXcokgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03-----END CERTIFICATE-----# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority# Label: "Hellenic Academic and Research Institutions RootCA 2011"# Serial: 0# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71-----BEGIN CERTIFICATE-----MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1IxRDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIwNjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQKEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENlcnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPzdYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJfel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEnsbgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSPFEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQub3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUAA4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8AcysNnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXIl7WdmplNsDz4SgCbZN2fOUvRJ9e4-----END CERTIFICATE-----# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967# Label: "Actalis Authentication Root CA"# Serial: 6271844772424770508# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66-----BEGIN CERTIFICATE-----MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UEBhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNvUTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1YprbrxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2Fbe8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxeKF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4Fv6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbnfpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7YnzezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbtifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQALe3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDzWochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4VSM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9jpwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyXX04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+OkfcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7RK4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btUZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJULysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaTLnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==-----END CERTIFICATE-----# Issuer: O=Trustis Limited OU=Trustis FPS Root CA# Subject: O=Trustis Limited OU=Trustis FPS Root CA# Label: "Trustis FPS Root CA"# Serial: 36053640375399034304724988975563710553# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d-----BEGIN CERTIFICATE-----MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQLExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTExMzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEcMBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihHiTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjjvSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlBOrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4EFgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmWzaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP41BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZEf1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8FjZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQENZetX2fNXlrtIzYE=-----END CERTIFICATE-----# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing# Label: "StartCom Certification Authority"# Serial: 45# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11-----BEGIN CERTIFICATE-----MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZkpMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rfOQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/CJi/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYTKqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNiHzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMMAv+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwIDAQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFulF2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCCATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVkaWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUgc2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTFwWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvSTa0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNcpRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKlCcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVFP0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLmKhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuEJnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnmfyWl8kgAwKQB2j8=-----END CERTIFICATE-----# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd.# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd.# Label: "StartCom Certification Authority G2"# Serial: 59# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95-----BEGIN CERTIFICATE-----MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJJZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsDvfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnooD/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxWRST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuKHDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxNnw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/iUUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHgTuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQELBQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfXUfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJHgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLIwpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhYXzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5lIxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoohdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulrso8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI-----END CERTIFICATE-----# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327# Label: "Buypass Class 2 Root CA"# Serial: 2# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48-----BEGIN CERTIFICATE-----MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEdMBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3MgQ2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1owTjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAwHgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPVL4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC911K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHxMlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZQmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkBarcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clrUs3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLiFRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRSP/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxPAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1RpzzuvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462sA20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3tOluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0UsH8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQI+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK75t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPzY11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=-----END CERTIFICATE-----# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327# Label: "Buypass Class 3 Root CA"# Serial: 2# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d-----BEGIN CERTIFICATE-----MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEdMBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3MgQ2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFowTjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAwHgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8YZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3EN3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8XKhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTYzIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvSO1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgPK9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwvTg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAjQTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdVcSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXSIGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsaO5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36udmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkEkbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg413OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvDu79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=-----END CERTIFICATE-----# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center# Label: "T-TeleSec GlobalRoot Class 3"# Serial: 1# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd-----BEGIN CERTIFICATE-----MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltMEnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOyWL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW306gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuImle9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4pTpPDpFQUWw==-----END CERTIFICATE-----# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus# Label: "EE Certification Centre Root CA"# Serial: 112324828676200291871926431888494945866# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76-----BEGIN CERTIFICATE-----MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYGCSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEyMTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNlZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBSb290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUyeuuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvObntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIwWFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/dMtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYBBQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEFBQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGVv9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQGE5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5uuSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIWiAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/vGVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=-----END CERTIFICATE-----# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007# Label: "TURKTRUST Certificate Services Provider Root 2007"# Serial: 1# MD5 Fingerprint: 2b:70:20:56:86:82:a0:18:c8:07:53:12:28:70:21:72# SHA1 Fingerprint: f1:7f:6f:b6:31:dc:99:e3:a3:c8:7f:fe:1c:f1:81:10:88:d9:60:33# SHA256 Fingerprint: 97:8c:d9:66:f2:fa:a0:7b:a7:aa:95:00:d9:c0:2e:9d:77:f2:cd:ad:a6:ad:6b:a7:4a:f4:b9:1c:66:59:3c:50-----BEGIN CERTIFICATE-----MIIEPTCCAyWgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvzE/MD0GA1UEAww2VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV4wXAYDVQQKDFVUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLiAoYykgQXJhbMSxayAyMDA3MB4XDTA3MTIyNTE4MzcxOVoXDTE3MTIyMjE4MzcxOVowgb8xPzA9BgNVBAMMNlTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTELMAkGA1UEBhMCVFIxDzANBgNVBAcMBkFua2FyYTFeMFwGA1UECgxVVMOcUktUUlVTVCBCaWxnaSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkgQS7Fni4gKGMpIEFyYWzEsWsgMjAwNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKu3PgqMyKVYFeaK7yc9SrToJdPNM8Ig3BnuiD9NYvDdE3ePYakqtdTyuTFYKTsvP2qcb3N2Je40IIDu6rfwxArNK4aUyeNgsURSsloptJGXg9i3phQvKUmi8wUG+7RP2qFsmmaf8EMJyupyj+sA1zU511YXRxcw9L6/P8JorzZAwan0qafoEGsIiveGHtyaKhUG9qPw9ODHFNRRf8+0222vR5YXm3dx2KdxnSQM9pQ/hTEST7ruToK4uT6PIzdezKKqdfcYbwnTrqdUKDT74eA7YH2gvnmJhsifLfkKS8RQouf9eRbHegsYz85M733WB2+Y8a+xwXrXgTW4qhe04MsCAwEAAaNCMEAwHQYDVR0OBBYEFCnFkKslrxHkYb+j/4hhkeYO/pyBMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBBQUAA4IBAQAQDdr4Ouwo0RSVgrESLFF6QSU2TJ/sPx+EnWVUXKgWAkD6bho3hO9ynYYKVZ1WKKxmLNA6VpM0ByWtCLCPyA8JWcqdmBzlVPi5RX9ql2+IaE1KBiY3iAIOtsbWcpnOa3faYjGkVh+uX4132l32iPwa2Z61gfAyuOOI0JzzaqC5mxRZNTZPz/OOXl0XrRWV2N2y1RVuAE6zS89mlOTgzbUF2mNXi+WzqtvALhyQRNsaXRik7r4EW5nVcV9VZWRi1aKbBFmGyGJ353yCRWo9F7/snXUMrqNvWtMvmDb08PUZqxFdyKbjKlhqQgnDvZImZjINXQhVdP+MmNAKpoRq0Tl9-----END CERTIFICATE-----# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH# Label: "D-TRUST Root Class 3 CA 2 2009"# Serial: 623603# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1-----BEGIN CERTIFICATE-----MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NThaME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9RySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsMlFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4GA1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUyMENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRlcmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQELBQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeniacfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4KzCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3YJohw1+qRzT65ysCQblrGXnRl11z+o+I=-----END CERTIFICATE-----# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH# Label: "D-TRUST Root Class 3 CA 2 EV 2009"# Serial: 623604# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81-----BEGIN CERTIFICATE-----MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUwNDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpnljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM03TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6ZqQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lRp75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQwggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9NteaHNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFwOi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xhc3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1ERT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDkuY3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNFCSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7naxpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqXKVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1-----END CERTIFICATE-----# Issuer: CN=Autoridad de Certificacion Raiz del Estado Venezolano O=Sistema Nacional de Certificacion Electronica OU=Superintendencia de Servicios de Certificacion Electronica# Subject: CN=PSCProcert O=Sistema Nacional de Certificacion Electronica OU=Proveedor de Certificados PROCERT# Label: "PSCProcert"# Serial: 11# MD5 Fingerprint: e6:24:e9:12:01:ae:0c:de:8e:85:c4:ce:a3:12:dd:ec# SHA1 Fingerprint: 70:c1:8d:74:b4:28:81:0a:e4:fd:a5:75:d7:01:9f:99:b0:3d:50:74# SHA256 Fingerprint: 3c:fc:3c:14:d1:f6:84:ff:17:e3:8c:43:ca:44:0c:00:b9:67:ec:93:3e:8b:fe:06:4c:a1:d7:2c:90:f2:ad:b0-----BEGIN CERTIFICATE-----MIIJhjCCB26gAwIBAgIBCzANBgkqhkiG9w0BAQsFADCCAR4xPjA8BgNVBAMTNUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIFJhaXogZGVsIEVzdGFkbyBWZW5lem9sYW5vMQswCQYDVQQGEwJWRTEQMA4GA1UEBxMHQ2FyYWNhczEZMBcGA1UECBMQRGlzdHJpdG8gQ2FwaXRhbDE2MDQGA1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9uaWNhMUMwQQYDVQQLEzpTdXBlcmludGVuZGVuY2lhIGRlIFNlcnZpY2lvcyBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9uaWNhMSUwIwYJKoZIhvcNAQkBFhZhY3JhaXpAc3VzY2VydGUuZ29iLnZlMB4XDTEwMTIyODE2NTEwMFoXDTIwMTIyNTIzNTk1OVowgdExJjAkBgkqhkiG9w0BCQEWF2NvbnRhY3RvQHByb2NlcnQubmV0LnZlMQ8wDQYDVQQHEwZDaGFjYW8xEDAOBgNVBAgTB01pcmFuZGExKjAoBgNVBAsTIVByb3ZlZWRvciBkZSBDZXJ0aWZpY2Fkb3MgUFJPQ0VSVDE2MDQGA1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9uaWNhMQswCQYDVQQGEwJWRTETMBEGA1UEAxMKUFNDUHJvY2VydDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANW39KOUM6FGqVVhSQ2oh3NekS1wwQYalNo97BVCwfWMrmoX8Yqt/ICV6oNEolt6Vc5Pp6XVurgfoCfAUFM+jbnADrgV3NZs+J74BCXfgI8Qhd19L3uA3VcAZCP4bsm+lU/hdezgfl6VzbHvvnpC2Mks0+saGiKLt38GieU89RLAu9MLmV+QfI4tL3czkkohRqipCKzx9hEC2ZUWno0vluYC3XXCFCpa1sl9JcLB/KpnheLsvtF8PPqv1W7/U0HU9TI4seJfxPmOEO8GqQKJ/+MMbpfg353bIdD0PghpbNjU5Db4g7ayNo+c7zo3Fn2/omnXO1ty0K+qP1xmk6wKImG20qCZyFSTXai20b1dCl53lKItwIKOvMoDKjSuc/HUtQy9vmebVOvh+qBa7Dh+PsHMosdEMXXqP+UH0quhJZb25uSgXTcYOWEAM11G1ADEtMo88aKjPvM6/2kwLkDd9p+cJsmWN63nOaK/6mnbVSKVUyqUtd+tFjiBdWbjxywbk5yqjKPK2Ww8F22c3HxT4CAnQzb5EuE8XL1mv6JpIzi4mWCZDlZTOpx+FIywBm/xhnaQr/2v/pDGj59/i5IjnOcVdo/Vi5QTcmn7K2FjiO/mpF7moxdqWEfLcU8UC17IAggmosvpr2uKGcfLFFb14dq12fy/czja+eevbqQ34gcnAgMBAAGjggMXMIIDEzASBgNVHRMBAf8ECDAGAQH/AgEBMDcGA1UdEgQwMC6CD3N1c2NlcnRlLmdvYi52ZaAbBgVghl4CAqASDBBSSUYtRy0yMDAwNDAzNi0wMB0GA1UdDgQWBBRBDxk4qpl/Qguk1yeYVKIXTC1RVDCCAVAGA1UdIwSCAUcwggFDgBStuyIdxuDSAaj9dlBSk+2YwU2u06GCASakggEiMIIBHjE+MDwGA1UEAxM1QXV0b3JpZGFkIGRlIENlcnRpZmljYWNpb24gUmFpeiBkZWwgRXN0YWRvIFZlbmV6b2xhbm8xCzAJBgNVBAYTAlZFMRAwDgYDVQQHEwdDYXJhY2FzMRkwFwYDVQQIExBEaXN0cml0byBDYXBpdGFsMTYwNAYDVQQKEy1TaXN0ZW1hIE5hY2lvbmFsIGRlIENlcnRpZmljYWNpb24gRWxlY3Ryb25pY2ExQzBBBgNVBAsTOlN1cGVyaW50ZW5kZW5jaWEgZGUgU2VydmljaW9zIGRlIENlcnRpZmljYWNpb24gRWxlY3Ryb25pY2ExJTAjBgkqhkiG9w0BCQEWFmFjcmFpekBzdXNjZXJ0ZS5nb2IudmWCAQowDgYDVR0PAQH/BAQDAgEGME0GA1UdEQRGMESCDnByb2NlcnQubmV0LnZloBUGBWCGXgIBoAwMClBTQy0wMDAwMDKgGwYFYIZeAgKgEgwQUklGLUotMzE2MzUzNzMtNzB2BgNVHR8EbzBtMEagRKBChkBodHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9sY3IvQ0VSVElGSUNBRE8tUkFJWi1TSEEzODRDUkxERVIuY3JsMCOgIaAfhh1sZGFwOi8vYWNyYWl6LnN1c2NlcnRlLmdvYi52ZTA3BggrBgEFBQcBAQQrMCkwJwYIKwYBBQUHMAGGG2h0dHA6Ly9vY3NwLnN1c2NlcnRlLmdvYi52ZTBBBgNVHSAEOjA4MDYGBmCGXgMBAjAsMCoGCCsGAQUFBwIBFh5odHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9kcGMwDQYJKoZIhvcNAQELBQADggIBACtZ6yKZu4SqT96QxtGGcSOeSwORR3C7wJJg7ODU523G0+1ng3dS1fLld6c2suNUvtm7CpsR72H0xpkzmfWvADmNg7+mvTV+LFwxNG9s2/NkAZiqlCxB3RWGymspThbASfzXg0gTB1GEMVKIu4YXx2sviiCtxQuPcD4quxtxj7mkoP3YldmvWb8lK5jpY5MvYB7Eqvh39YtsL+1+LrVPQA3uvFd359m21D+VJzog1eWuq2w1n8GhHVnchIHuTQfiSLaeS5UtQbHh6N5+LwUeaO6/u5BlOsju6rEYNxxik6SgMexxbJHmpHmJWhSnFFAFTKQAVzAswbVhltw+HoSvOULP5dAssSS830DD7X9jSr3hTxJkhpXzsOfIt+FTvZLm8wyWuevo5pLtp4EJFAv8lXrPj9Y0TzYS3F7RNHXGRoAvlQSMx4bEqCaJqD8Zm4G7UaRKhqsLEQ+xrmNTbSjq3TNWOByyrYDT13K9mmyZY+gAu0F2BbdbmRiKw7gSXFbPVgx96OLP7bx0R/vu0xdOIk9W/1DzLuY5poLWccret9W6aAjtmcz9opLLabid+Qqkpj5PkygqYWwHJgD/ll9ohri4zspV4KuxPX+Y1zMOWj3YeMLEYC/HYvBhkdI4sPaeVdtAgAUSM84dkpvRabP/v/GSCmE1P93+hvS84Bpxs2Km-----END CERTIFICATE-----# Issuer: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center# Subject: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center# Label: "China Internet Network Information Center EV Certificates Root"# Serial: 1218379777# MD5 Fingerprint: 55:5d:63:00:97:bd:6a:97:f5:67:ab:4b:fb:6e:63:15# SHA1 Fingerprint: 4f:99:aa:93:fb:2b:d1:37:26:a1:99:4a:ce:7f:f0:05:f2:93:5d:1e# SHA256 Fingerprint: 1c:01:c6:f4:db:b2:fe:fc:22:55:8b:2b:ca:32:56:3f:49:84:4a:cf:c3:2b:7b:e4:b0:ff:59:9f:9e:8c:7a:f7-----BEGIN CERTIFICATE-----MIID9zCCAt+gAwIBAgIESJ8AATANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCQ04xMjAwBgNVBAoMKUNoaW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24gQ2VudGVyMUcwRQYDVQQDDD5DaGluYSBJbnRlcm5ldCBOZXR3b3JrIEluZm9ybWF0aW9uIENlbnRlciBFViBDZXJ0aWZpY2F0ZXMgUm9vdDAeFw0xMDA4MzEwNzExMjVaFw0zMDA4MzEwNzExMjVaMIGKMQswCQYDVQQGEwJDTjEyMDAGA1UECgwpQ2hpbmEgSW50ZXJuZXQgTmV0d29yayBJbmZvcm1hdGlvbiBDZW50ZXIxRzBFBgNVBAMMPkNoaW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24gQ2VudGVyIEVWIENlcnRpZmljYXRlcyBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm35z7r07eKpkQ0H1UN+U8i6yjUqORlTSIRLIOTJCBumD1Z9S7eVnAztUwYyZmczpwA//DdmEEbK40ctb3B75aDFk4Zv6dOtouSCV98YPjUesWgbdYavi7NifFy2cyjw1l1VxzUOFsUcW9SxTgHbP0wBkvUCZ3czY28Sf1hNfQYOL+Q2HklY0bBoQCxfVWhyXWIQ8hBouXJE0bhlffxdpxWXvayHG1VA6v2G5BY3vbzQ6sm8UY78WO5upKv23KzhmBsUs4qpnHkWnjQRmQvaPK++IIGmPMowUc9orhpFjIpryp9vOiYurXccUwVswah+xt54ugQEC7c+WXmPbqOY4twIDAQABo2MwYTAfBgNVHSMEGDAWgBR8cks5x8DbYqVPm6oYNJKiyoOCWTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUfHJLOcfA22KlT5uqGDSSosqDglkwDQYJKoZIhvcNAQEFBQADggEBACrDx0M3j92tpLIM7twUbY8opJhJywyA6vPtI2Z1fcXTIWd50XPFtQO3WKwMVC/GVhMPMdoG52U7HW8228gd+f2ABsqjPWYWqJ1MFn3AlUa1UeTiH9fqBk1jjZaM7+czV0I664zBechNdn3e9rG3geCg+aF4RhcaVpjwTj2rHO3sOdwHSPdj/gauwqRcalsyiMXHM4WsZkJHwlgkmeHlPuV1LI5D1l08eB6olYIpUNHRFrrvwb562bTYzB5MRuF3sTGrvSrIzo9uoV1/A3U05K2JRVRevq4opbs/eHnrc7MKDf2+yfdWrPa37S+bISnHOLaVxATywy39FCqQmbkHzJ8=-----END CERTIFICATE-----# Issuer: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services# Subject: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services# Label: "Swisscom Root CA 2"# Serial: 40698052477090394928831521023204026294# MD5 Fingerprint: 5b:04:69:ec:a5:83:94:63:18:a7:86:d0:e4:f2:6e:19# SHA1 Fingerprint: 77:47:4f:c6:30:e4:0f:4c:47:64:3f:84:ba:b8:c6:95:4a:8a:41:ec# SHA256 Fingerprint: f0:9b:12:2c:71:14:f4:a0:9b:d4:ea:4f:4a:99:d5:58:b4:6e:4c:25:cd:81:14:0d:29:c0:56:13:91:4c:38:41-----BEGIN CERTIFICATE-----MIIF2TCCA8GgAwIBAgIQHp4o6Ejy5e/DfEoeWhhntjANBgkqhkiG9w0BAQsFADBkMQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3QgQ0EgMjAeFw0xMTA2MjQwODM4MTRaFw0zMTA2MjUwNzM4MTRaMGQxCzAJBgNVBAYTAmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlUJOhJ1R5tMJ6HJaI2nbeHCOFvErjw0DzpPMLgAIe6szjPTpQOYXTKueuEcUMncy3SgM3hhLX3af+Dk7/E6J2HzFZ++r0rk0X2s682Q2zsKwzxNoysjL67XiPS4h3+os1OD5cJZM/2pYmLcX5BtS5X4HAB1f2uY+lQS3aYg5oUFgJWFLlTloYhyxCwWJwDaCFCE/rtuh/bxvHGCGtlOUSbkrRsVPACu/obvLP+DHVxxX6NZp+MEkUp2IVd3Chy50I9AU/SpHWrumnf2U5NGKpV+GY3aFy6//SSj8gO1MedK75MDvAe5QQQg1I3ArqRa0jG6F6bYRzzHdUyYb3y1aSgJA/MTAtukxGggo5WDDH8SQjhBiYEQN7Aq+VRhxLKX0srwVYv8c474d2h5Xszx+zYIdkeNL6yxSNLCK/RJOlrDrcH+eOfdmQrGrrFLadkBXeyq96G4DsguAhYidDMfCd7Camlf0uPoTXGiTOmekl9AbmbeGMktg2M7v0Ax/lZ9vh0+Hio5fCHyqW/xavqGRn1V9TrALacywlKinh/LTSlDcX3KwFnUey7QYYpqwpzmqm59m2I2mbJYV4+by+PGDYmy7Velhk6M99bFXi08jsJvllGov34zflVEpYKELKeRcVVi3qPyZ7iVNTA6z00yPhOgpD/0QVAKFyPnlw4vP5w8CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYwFDASBgdghXQBUwIBBgdghXQBUwIBMBIGA1UdEwEB/wQIMAYBAf8CAQcwHQYDVR0OBBYEFE0mICKJS9PVpAqhb97iEoHF8TwuMB8GA1UdIwQYMBaAFE0mICKJS9PVpAqhb97iEoHF8TwuMA0GCSqGSIb3DQEBCwUAA4ICAQAyCrKkG8t9voJXiblqf/P0wS4RfbgZPnm3qKhyN2abGu2sEzsOv2LwnN+ee6FTSA5BesogpxcbtnjsQJHzQq0Qw1zv/2BZf82Fo4s9SBwlAjxnffUy6S8w5X2lejjQ82YqZh6NM4OKb3xuqFp1mrjX2lhIREeoTPpMSQpKwhI3qEAMw8jh0FcNlzKVxzqfl9NX+Ave5XLzo9v/tdhZsnPdTSpxsrpJ9csc1fV5yJmz/MFMdOO0vSk3FQQoHt5FRnDsr7p4DooqzgB53MBfGWcsa0vvaGgLQ+OswWIJ76bdZWGgr4RVSJFSHMYlkSrQwSIjYVmvRRGFHQEkNI/Ps/8XciATwoCqISxxOQ7Qj1zB09GOInJGTB2Wrk9xseEFKZZZ9LuedT3PDTcNYtsmjGOpI99nBjx8Oto0QuFmtEYE3saWmA9LSHokMnWRn6z3aOkquVVlzl1h0ydw2Df+n7mvoC5Wt6NlUe07qxS/TFED6F+KBZvuim6c779o+sjaC+NCydAXFJy3SuCvkychVSa1ZC+N8f+mQAWFBVzKBxlcCxMoTFh/wqXvRdpg065lYZ1Tg3TCrvJcwhbtkj6EPnNgiLx29CzP0H1907he0ZESEOnN3col49XtmS++dYFLJPlFRpTJKSFTnCZFqhMX5OfNeOI5wSsSnqaeG8XmDtkx2Q==-----END CERTIFICATE-----# Issuer: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services# Subject: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services# Label: "Swisscom Root EV CA 2"# Serial: 322973295377129385374608406479535262296# MD5 Fingerprint: 7b:30:34:9f:dd:0a:4b:6b:35:ca:31:51:28:5d:ae:ec# SHA1 Fingerprint: e7:a1:90:29:d3:d5:52:dc:0d:0f:c6:92:d3:ea:88:0d:15:2e:1a:6b# SHA256 Fingerprint: d9:5f:ea:3c:a4:ee:dc:e7:4c:d7:6e:75:fc:6d:1f:f6:2c:44:1f:0f:a8:bc:77:f0:34:b1:9e:5d:b2:58:01:5d-----BEGIN CERTIFICATE-----MIIF4DCCA8igAwIBAgIRAPL6ZOJ0Y9ON/RAdBB92ylgwDQYJKoZIhvcNAQELBQAwZzELMAkGA1UEBhMCY2gxETAPBgNVBAoTCFN3aXNzY29tMSUwIwYDVQQLExxEaWdpdGFsIENlcnRpZmljYXRlIFNlcnZpY2VzMR4wHAYDVQQDExVTd2lzc2NvbSBSb290IEVWIENBIDIwHhcNMTEwNjI0MDk0NTA4WhcNMzEwNjI1MDg0NTA4WjBnMQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxHjAcBgNVBAMTFVN3aXNzY29tIFJvb3QgRVYgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMT3HS9X6lds93BdY7BxUglgRCgzo3pOCvrY6myLURYaVa5UJsTMRQdBTxB5f3HSek4/OE6zAMaVylvNwSqD1ycfMQ4jFrclyxy0uYAyXhqdk/HoPGAsp15XGVhRXrwsVgu42O+LgrQ8uMIkqBPHoCE2G3pXKSinLr9xJZDzRINpUKTk4RtiGZQJo/PDvO/0vezbE53PnUgJUmfANykRHvvSEaeFGHR55E+FFOtSN+KxRdjMDUN/rhPSays/p8LiqG12W0OfvrSdsyaGOx9/5fLoZigWJdBLlzin5M8J0TbDC77aO0RYjb7xnglrPvMyxyuHxuxenPaHZa0zKcQvidm5y8kDnftslFGXEBuGCxobP/YCfnvUxVFkKJ3106yDgYjTdLRZncHrYTNaRdHLOdAGalNgHa/2+2m8atwBz735j9m9W8E6X47aD0upm50qKGsaCnw8qyIL5XctcfaCNYGu+HuB5ur+rPQam3Rc6I8k9l2dRsQs0h4rIWqDJ2dVSqTjyDKXZpBy2uPUZC5f46Fq9mDU5zXNysRojddxyNMkM3OxbPlq4SjbX8Y96L5V5jcb7STZDxmPX2MYWFCBUWVv8p9+agTnNCRxunZLWB4ZvRVgRaoMEkABnRDixzgHcgplwLa7JSnaFp6LNYth7eVxV4O1PHGf40+/fh6Bn0GXAgMBAAGjgYYwgYMwDgYDVR0PAQH/BAQDAgGGMB0GA1UdIQQWMBQwEgYHYIV0AVMCAgYHYIV0AVMCAjASBgNVHRMBAf8ECDAGAQH/AgEDMB0GA1UdDgQWBBRF2aWBbj2ITY1x0kbBbkUe88SAnTAfBgNVHSMEGDAWgBRF2aWBbj2ITY1x0kbBbkUe88SAnTANBgkqhkiG9w0BAQsFAAOCAgEAlDpzBp9SSzBc1P6xXCX5145v9Ydkn+0UjrgEjihLj6p7jjm02Vj2e6E1CqGdivdj5eu9OYLU43otb98TPLr+flaYC/NUn81ETm484T4VvwYmneTwkLbUwp4wLh/vx3rEUMfqe9pQy3omywC0Wqu1kx+AiYQElY2NfwmTv9SoqORjbdlk5LgpWgi/UOGED1V7XwgiG/W9mR4U9s70WBCCswo9GcG/W6uqmdjyMb3lOGbcWAXH7WMaLgqXfIeTK7KK4/HsGOV1timH59yLGn602MnTihdsfSlEvoqq9X46Lmgxk7lq2prg2+kupYTNHAq4Sgj5nPFhJpiTt3tm7JFe3VE/23MPrQRYCd0EApUKPtN236YQHoA96M2kZNEzx5LH4k5E4wnJTsJdhw4Snr8PyQUQ3nqjsTzyP6WqJ3mtMX0f/fwZacXduT98zca0wjAefm6S139hdlqP65VNvBFuIXxZN5nQBrz5Bm0yFqXZaajh3DyAHmBR3NdUIR7KYndP+tiPsys6DXhyyWhBWkdKwqPrGtcKqzwyVcgKEZzfdNbwQBUdyLmPtTbFr/giuMod89a2GQ+fYWVq6nTIfI/DT11lgh/ZDYnadXL77/FHZxOzyNEZiCcmmpl5fx7kLD977vHeTYuWl8PVP3wbI+2ksx0WckNLIOFZfsLorSa/ovc=-----END CERTIFICATE-----# Issuer: CN=CA Disig Root R1 O=Disig a.s.# Subject: CN=CA Disig Root R1 O=Disig a.s.# Label: "CA Disig Root R1"# Serial: 14052245610670616104# MD5 Fingerprint: be:ec:11:93:9a:f5:69:21:bc:d7:c1:c0:67:89:cc:2a# SHA1 Fingerprint: 8e:1c:74:f8:a6:20:b9:e5:8a:f4:61:fa:ec:2b:47:56:51:1a:52:c6# SHA256 Fingerprint: f9:6f:23:f4:c3:e7:9c:07:7a:46:98:8d:5a:f5:90:06:76:a0:f0:39:cb:64:5d:d1:75:49:b2:16:c8:24:40:ce-----BEGIN CERTIFICATE-----MIIFaTCCA1GgAwIBAgIJAMMDmu5QkG4oMA0GCSqGSIb3DQEBBQUAMFIxCzAJBgNVBAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMuMRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIxMB4XDTEyMDcxOTA5MDY1NloXDTQyMDcxOTA5MDY1NlowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmExEzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCqw3j33Jijp1pedxiy3QRkD2P9m5YJgNXoqqXinCaUOuiZc4yd39ffg/N4T0Dhf9Kn0uXKE5Pn7cZ3Xza1lK/oOI7bm+V8u8yN63Vz4STN5qctGS7Y1oprFOsIYgrY3LMATcMjfF9DCCMyEtztDK3AfQ+lekLZWnDZv6fXARz2m6uOt0qGeKAeVjGu74IKgEH3G8muqzIm1Cxr7X1r5OJeIgpFy4QxTaz+29FHuvlglzmxZcfe+5nkCiKxLU3lSCZpq+Kq8/v8kiky6bM+TR8noc2OuRf7JT7JbvN32g0S9l3HuzYQ1VTW8+DiR0jm3hTaYVKvJrT1cU/J19IG32PK/yHoWQbgCNWEFVP3Q+V8xaCJmGtzxmjOZd69fwX3se72V6FglcXM6pM6vpmumwKjrckWtc7dXpl4fho5frLABaTAgqWjR56M6ly2vGfb5ipN0gTco65F97yLnByn1tUD3AjLLhbKXEAz6GfDLuemROoRRRw1ZS0eRWEkG4IupZ0zXWX4Qfkuy5Q/H6MMMSRE7cderVC6xkGbrPAXZcD4XW9boAo0PO7X6oifmPmvTiT6l7Jkdtqr9O3jw2Dv1fkCyC2fg69naQanMVXVz0tv/wQFx1isXxYb5dKj6zHbHzMVTdDypVP1y+E9Tmgt2BLdqvLmTZtJ5cUoobqwWsagtQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUiQq0OJMa5qvum5EY+fU8PjXQ04IwDQYJKoZIhvcNAQEFBQADggIBADKL9p1Kyb4U5YysOMo6CdQbzoaz3evUuii+Eq5FLAR0rBNRxVgYZk2C2tXck8An4b58n1KeElb21Zyp9HWc+jcSjxyT7Ff+Bw+r1RL3D65hXlaASfX8MPWbTx9BLxyE04nH4toCdu0Jz2zBuByDHBb6lM19oMgY0sidbvW9adRtPTXoHqJPYNcHKfyyo6SdbhWSVhlMCrDpfNIZTUJG7L399ldb3Zh+pE3McgODWF3vkzpBemOqfDqo9ayk0d2iLbYq/J8BjuIQscTK5GfbVSUZP/3oNn6z4eGBrxEWi1CXYBmCAMBrTXO40RMHPuq2MU/wQppt4hF05ZSsjYSVPCGvxdpHyN85YmLLW1AL14FABZyb7bq2ix4Eb5YgOe2kfSnbSM6C3NQCjR0EMVrHS/BsYVLXtFHCgWzN4funodKSds+xDzdYpPJScWc/DIh4gInByLUfkmO+p3qKViwaqKactV2zY9ATIKHrkWzQjX2v3wvkF7mGnjixlAxYjOBVqjtjbZqJYLhkKpLGN/R+Q0O3c+gB53+XD9fyexn9GtePyfqFa3qdnom2piiZk4hA9z7NUaPK6u95RyG1/jLix8NRb76AdPCkwzryT+lf3xkK8jsTQ6wxpLPn6/wY1gGp8yqPNg7rtLG8t0zJa7+h89n07eLw4+1knj0vllJPgFOL-----END CERTIFICATE-----# Issuer: CN=CA Disig Root R2 O=Disig a.s.# Subject: CN=CA Disig Root R2 O=Disig a.s.# Label: "CA Disig Root R2"# Serial: 10572350602393338211# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03-----BEGIN CERTIFICATE-----MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNVBAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMuMRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQyMDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmExEzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3OeNcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNHPWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3Ix2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbeQTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfRyyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrOQG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJQfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUDi/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORsnLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZIhvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFMtCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqfGopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkblvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1balTFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0inSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18DrG5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3OszMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8xL4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL-----END CERTIFICATE-----# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV# Label: "ACCVRAIZ1"# Serial: 6828503384748696800# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13-----BEGIN CERTIFICATE-----MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UEAwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQswCQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQBgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUNDVjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCbqau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoYHtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWoG2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpAlHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhrIA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eHk6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/474KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMOm3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpacXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPluUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYIKwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmlsZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsGAQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeTVfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIGCCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUAcgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEAQwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQAcgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAAQwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUAczAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2MuaHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRtaW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1UdDwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEFBQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdpD70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gUJyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+mAM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepDvV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlmstn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5hI6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szAh1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xFd3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2HpPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7-----END CERTIFICATE-----# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA# Label: "TWCA Global Root CA"# Serial: 3262# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b-----BEGIN CERTIFICATE-----MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcxEjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMTVFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsTB1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfChMBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbHzIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCilaLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYPoA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQABDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcEqYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsnLhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WFH6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNoRI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TWnsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5jwa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWzaGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmyKwbQBM0=-----END CERTIFICATE-----# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera# Label: "TeliaSonera Root CA v1"# Serial: 199041966741090107964904287217786801558# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89-----BEGIN CERTIFICATE-----MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAwNzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJvb3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYDVQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3FVRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV17CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+XZ75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkmdtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHeOh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMusDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fsslESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQarMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbldxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1TjTQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBedY2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqIOylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReWt88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcnHL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVxSK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=-----END CERTIFICATE-----# Issuer: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi# Subject: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi# Label: "E-Tugra Certification Authority"# Serial: 7667447206703254355# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c-----BEGIN CERTIFICATE-----MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIzMDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UYB4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxHD5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSFQ9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEoq1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3Dk14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcHfC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsutdEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMMti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+LznrFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUXU8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAFNzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCRHTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqYGwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WKvJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLlyb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2PAJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpDy4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8dNL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==-----END CERTIFICATE-----# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center# Label: "T-TeleSec GlobalRoot Class 2"# Serial: 1# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52-----BEGIN CERTIFICATE-----MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUdAqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiCFoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6IavqjnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZwI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhyNsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPACuvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVwIEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlPBSeOE6Fuwg==-----END CERTIFICATE-----# Issuer: CN=Atos TrustedRoot 2011 O=Atos# Subject: CN=Atos TrustedRoot 2011 O=Atos# Label: "Atos TrustedRoot 2011"# Serial: 6643877497813316402# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74-----BEGIN CERTIFICATE-----MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UEAwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMMFUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMCREUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMpNb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rMVD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0Lcp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQieowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgGA1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8jvZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kPDpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pcmaHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2Dlmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLvKrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed-----END CERTIFICATE-----# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited# Label: "QuoVadis Root CA 1 G3"# Serial: 687049649626669250736271037606554624078720034195# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74-----BEGIN CERTIFICATE-----MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQELBQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAcBgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtVwedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWerNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF34168Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXpUhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+oabw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/GKubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSthfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KOTk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOtzCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQADggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOCMTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUNqXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwvb2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo28hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/kNSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNjZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhpq1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFtnh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD-----END CERTIFICATE-----# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited# Label: "QuoVadis Root CA 2 G3"# Serial: 390156079458959257446133169266079962026824725800# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40-----BEGIN CERTIFICATE-----MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQELBQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAcBgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFfqq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMWn4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ymc5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0jIaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKqIcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43ehvNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALGcC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQADggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RCroijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0GaW/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4nlv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHVcsaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtddbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNgKCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeMHVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M-----END CERTIFICATE-----# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited# Label: "QuoVadis Root CA 3 G3"# Serial: 268090761170461462463995952157327242137089239581# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46-----BEGIN CERTIFICATE-----MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQELBQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAcBgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNuFoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXRU7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+cra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERRORFHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/kA9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzweyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634RylsSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBpVzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0QA4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQADggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3pxKGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnIFUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5WvvoxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFgu/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HNPlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0-----END CERTIFICATE-----# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com# Label: "DigiCert Assured ID Root G2"# Serial: 15385348160840213938643033620894905419# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85-----BEGIN CERTIFICATE-----MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSAn61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4HteccbiJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9HpEgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lAbx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6YuYjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPIQW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4GnilmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCvON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwoIhNzbM8m9Yop5w==-----END CERTIFICATE-----# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com# Label: "DigiCert Assured ID Root G3"# Serial: 15459312981008553731928384953135426796# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2-----BEGIN CERTIFICATE-----MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJfZn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17QRSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQDAwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlYJjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv6pZjamVFkpUBtA==-----END CERTIFICATE-----# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com# Label: "DigiCert Global Root G2"# Serial: 4293743540046975378534879503202253541# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f-----BEGIN CERTIFICATE-----MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBhMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQq2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5WztCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQvIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NGFdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ918rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTepLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTflMrY=-----END CERTIFICATE-----# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com# Label: "DigiCert Global Root G3"# Serial: 7089244469030293291760083333884364146# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0-----BEGIN CERTIFICATE-----MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FGfp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPOZ9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIxAK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8sycX-----END CERTIFICATE-----# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com# Label: "DigiCert Trusted Root G4"# Serial: 7451500558977370777930084869016614236# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88-----BEGIN CERTIFICATE-----MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBiMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3yithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1Ifxp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDVySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiODCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQjdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCiEhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADMfRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QYuKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXKchYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQADggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWcfFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqasjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9NcCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mIr/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCmgKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+-----END CERTIFICATE-----# Issuer: CN=Certification Authority of WoSign O=WoSign CA Limited# Subject: CN=Certification Authority of WoSign O=WoSign CA Limited# Label: "WoSign"# Serial: 125491772294754854453622855443212256657# MD5 Fingerprint: a1:f2:f9:b5:d2:c8:7a:74:b8:f3:05:f1:d7:e1:84:8d# SHA1 Fingerprint: b9:42:94:bf:91:ea:8f:b6:4b:e6:10:97:c7:fb:00:13:59:b6:76:cb# SHA256 Fingerprint: 4b:22:d5:a6:ae:c9:9f:3c:db:79:aa:5e:c0:68:38:47:9c:d5:ec:ba:71:64:f7:f2:2d:c1:d6:5f:63:d8:57:08-----BEGIN CERTIFICATE-----MIIFdjCCA16gAwIBAgIQXmjWEXGUY1BWAGjzPsnFkTANBgkqhkiG9w0BAQUFADBVMQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxKjAoBgNVBAMTIUNlcnRpZmljYXRpb24gQXV0aG9yaXR5IG9mIFdvU2lnbjAeFw0wOTA4MDgwMTAwMDFaFw0zOTA4MDgwMTAwMDFaMFUxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFXb1NpZ24gQ0EgTGltaXRlZDEqMCgGA1UEAxMhQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgb2YgV29TaWduMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvcqNrLiRFVaXe2tcesLea9mhsMMQI/qnobLMMfo+2aYpbxY94Gv4uEBf2zmoAHqLoE1UfcIiePyOCbiohdfMlZdLdNiefvAA5A6JrkkoRBoQmTIPJYhTpA2zDxIIFgsDcSccf+Hb0v1naMQFXQoOXXDX2JegvFNBmpGN9J42Znp+VsGQX+axaCA2pIwkLCxHC1l2ZjC1vt7tj/id07sBMOby8w7gLJKA84X5KIq0VC6a7fd2/BVoFutKbOsuEo/Uz/4Mx1wdC34FMr5esAkqQtXJTpCzWQ27en7N1QhatH/YHGkR+ScPewavVIMYe+HdVHpRaG53/Ma/UkpmRqGyZxq7o093oL5d//xWC0Nyd5DKnvnyOfUNqfTq1+ezEC8wQjchzDBwyYaYD8xYTYO7feUapTeNtqwylwA6Y3EkHp43xP901DfA4v6IRmAR3Qg/UDaruHqklWJqbrDKaiFaafPz+x1wOZXzp26mgYmhiMU7ccqjUu6Du/2gd/Tkb+dC221KmYo0SLwX3OSACCK28jHAPwQ+658geda4BmRkAjHXqc1S+4RFaQkAKtxVi8QGRkvASh0JWzko/amrzgD5LkhLJuYwTKVYyrREgk/nkR4zw7CT/xH8gdLKH3Ep3XZPkiWvHYG3Dy+MwwbMLyejSuQOmbp8HkUff6oZRZb9/D0CAwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOFmzw7R8bNLtwYgFP6HEtX2/vs+MA0GCSqGSIb3DQEBBQUAA4ICAQCoy3JAsnbBfnv8rWTjMnvMPLZdRtP1LOJwXcgu2AZ9mNELIaCJWSQBnfmvCX0KI4I01fx8cpm5o9dU9OpScA7F9dY74ToJMuYhOZO9sxXqT2r09Ys/L3yNWC7F4TmgPsc9SnOeQHrAK2GpZ8nzJLmzbVUsWh2eJXLOC62qx1ViC777Y7NhRCOjy+EaDveaBk3e1CNOIZZbOVtXHS9dCF4Jef98l7VNg64N1uajeeAz0JmWAjCnPv/So0M/BVoG6kQC2nz4SNAzqfkHx5Xh9T71XXG68pWpdIhhWeO/yloTunK0jF02h+mmxTwTv97QRCbut+wucPrXnbes5cVAWubXbHssw1abR80LzvobtCHXt2a49CUwi1wNuepnsvRtrtWhnk/Yn+knArAdBtaP4/tIEp9/EaEQPkxROpaw0RPxx9gmrjrKkcRpnd8BKWRRb2jaFOwIQZeQjdCygPLPwj2/kWjFgGcexGATVdVhmVd8upUPYUk6ynW8yQqTP2cOEvIo4jEbwFcW3wh8GcF+Dx+FHgo2fFt+J7x6v+Db9NpSvd4MVHAxkUOVyLzwPt0JfjBkUO1/AaQzZ01oT74V77D2AhGiGxMlOtzCWfHjXEa7ZywCRuoeSKbmW9m1vFGikpbbqsY3Iqb+zCB0oy2pLmvLwIIRIbWTee5Ehr7XHuQe+w==-----END CERTIFICATE-----# Issuer: CN=CA 沃通根证书 O=WoSign CA Limited# Subject: CN=CA 沃通根证书 O=WoSign CA Limited# Label: "WoSign China"# Serial: 106921963437422998931660691310149453965# MD5 Fingerprint: 78:83:5b:52:16:76:c4:24:3b:83:78:e8:ac:da:9a:93# SHA1 Fingerprint: 16:32:47:8d:89:f9:21:3a:92:00:85:63:f5:a4:a7:d3:12:40:8a:d6# SHA256 Fingerprint: d6:f0:34:bd:94:aa:23:3f:02:97:ec:a4:24:5b:28:39:73:e4:47:aa:59:0f:31:0c:77:f4:8f:df:83:11:22:54-----BEGIN CERTIFICATE-----MIIFWDCCA0CgAwIBAgIQUHBrzdgT/BtOOzNy0hFIjTANBgkqhkiG9w0BAQsFADBGMQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxGzAZBgNVBAMMEkNBIOayg+mAmuagueivgeS5pjAeFw0wOTA4MDgwMTAwMDFaFw0zOTA4MDgwMTAwMDFaMEYxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFXb1NpZ24gQ0EgTGltaXRlZDEbMBkGA1UEAwwSQ0Eg5rKD6YCa5qC56K+B5LmmMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0EkhHiX8h8EqwqzbdoYGTufQdDTc7WU1/FDWiD+k8H/rD195L4mx/bxjWDeTmzj4t1up+thxx7S8gJeNbEvxUNUqKaqoGXqW5pWOdO2XCld19AXbbQs5uQF/qvbW2mzmBeCkTVL829B0txGMe41P/4eDrv8FAxNXUDf+jJZSEExfv5RxadmWPgxDT74wwJ85dE8GRV2j1lY5aAfMh09Qd5Nx2UQIsYo06Yms25tO4dnkUkWMLhQfkWsZHWgpLFbE4h4TV2TwYeO5Ed+w4VegG63XX9Gv2ystP9Bojg/qnw+LNVgbExz03jWhCl3W6t8Sb8D7aQdGctyB9gQjF+BNdeFyb7Ao65vh4YOhn0pdr8yb+gIgthhid5E7o9Vlrdx8kHccREGkSovrlXLp9glk3Kgtn3R46MGiCWOc76DbT52VqyBPt7D3h1ymoOQ3OMdc4zUPLK2jgKLsLl3Az+2LBcLmc272idX10kaO6m1jGx6KyX2m+Jzr5dVjhU1zZmkR/sgO9MHHZklTfuQZa/HpelmjbX7FF+Ynxu8b22/8DU0GAbQOXDBGVWCvOGU6yke6rCzMRh+yRpY/8+0mBe53oWprfi1tWFxK1I5nuPHa1UaKJ/kR8slC/k7e3x9cxKSGhxYzoacXGKUN5AXlK8IrC6KVkLn9YDxOiT7nnO4fuwECAwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOBNv9ybQV0T6GTwp+kVpOGBwboxMA0GCSqGSIb3DQEBCwUAA4ICAQBqinA4WbbaixjIvirTthnVZil6Xc1bL3McJk6jfW+rtylNpumlEYOnOXOvEESS5iVdT2H6yAa+Tkvv/vMx/sZ8cApBWNromUuWyXi8mHwCKe0JgOYKOoICKuLJL8hWGSbueBwj/feTZU7n85iYr83d2Z5AiDEoOqsuC7CsDCT6eiaY8xJhEPRdF/d+4niXVOKM6Cm6jBAyvd0zaziGfjk9DgNyp115j0WKWa5bIW4xRtVZjc8VX90xJc/bYNaBRHIpAlf2ltTW/+op2znFuCyKGo3Oy+dCMYYFaA6eFN0AkLppRQjbbpCBhqcqBT/mhDn4t/lXX0ykeVoQDF7Va/81XwVRHmyjdanPUIPTfPRm94KNPQx96N97qA4bLJyuQHCH2u2nFoJavjVsIE4iYdm8UXrNemHcSxH5/mc0zy4EZmFcV5cjjPOGG0jfKq+nwf/Yjj4Du9gqsPoUJbJRa4ZDhS4HIxaAjUz7tGM7zMN07RujHv41D198HRaG9Q7DlfEvr10lO1Hm13ZBONFLAzkopR6RctR9q5czxNM+4Gm2KHmgCY0c0f9BckgG/Jou5yD5m6Leie2uPAmvylezkolwQOQvT8Jwg0DXJCxr5wkf09XHwQj02w47HAcLQxGEIYbpgNR12KvxAmLBsX5VYc8T1yaw15zLKYs4SgsOkI26oQ==-----END CERTIFICATE-----# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited# Label: "COMODO RSA Certification Authority"# Serial: 101909084537582093308941363524873193117# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34-----BEGIN CERTIFICATE-----MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8Xpz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEfZd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7wqP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZahSL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVICu9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abfFobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiqcrxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4EFgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvlwFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2IntznaFxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZCuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiKboHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmckejkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yLS0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWbQOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHBNVOFBkpdn627G190-----END CERTIFICATE-----# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network# Label: "USERTrust RSA Certification Authority"# Serial: 2645093764781058787591871645665788717# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2-----BEGIN CERTIFICATE-----MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkYtJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmTYo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97lc6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4eeUB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeEHg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAdBgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPFUp/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KOVWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcRiQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYzeSf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZXHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRBVXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aBL6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfGjjxDah2nGN59PRbxYvnKkKj9-----END CERTIFICATE-----# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network# Label: "USERTrust ECC Certification Authority"# Serial: 123013823720199481456569720443997572134# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a-----BEGIN CERTIFICATE-----MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqfloI+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinngo4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0GA1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMBzzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbWRNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=-----END CERTIFICATE-----# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4# Label: "GlobalSign ECC Root CA - R4"# Serial: 14367148294922964480859022125800977897474# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c-----BEGIN CERTIFICATE-----MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoXDTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJFspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61FuOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGXkPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTsewv4n4Q=-----END CERTIFICATE-----# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5# Label: "GlobalSign ECC Root CA - R5"# Serial: 32785792099990507226680698011560947931244# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24-----BEGIN CERTIFICATE-----MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoXDTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8kehOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYIKoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnOxwy8p2Fp8fc74SrL+SvzZpA3-----END CERTIFICATE-----# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden# Label: "Staat der Nederlanden Root CA - G3"# Serial: 10003001# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28-----BEGIN CERTIFICATE-----MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJOTDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFhdCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloXDTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRlciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJvb3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQPcPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WWIkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqXxz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFyKJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N86U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHPbMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXtBznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTtXUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75LpdINyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BDU5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwpLiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixpgZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1AfsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM94B7IWcnMFk=-----END CERTIFICATE-----# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden# Label: "Staat der Nederlanden EV Root CA"# Serial: 10000013# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a-----BEGIN CERTIFICATE-----MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJOTDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFhdCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0yMjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIgTmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBSb290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrSM4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nCUiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3dZ//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46prfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13lpJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXbj5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxCKFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0XcgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrPpx9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsIeK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHSv4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTCwPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKyCqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2evTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIaGl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeLeG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc7uzXLg==-----END CERTIFICATE-----# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust# Label: "IdenTrust Commercial Root CA 1"# Serial: 13298821034946342390520003877796839426# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae-----BEGIN CERTIFICATE-----MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQwMTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gpS0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORiT0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCLvYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjKVsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZKdHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHTc+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hvl7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5NiGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQADggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwtLRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRKW2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pTAwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLql1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZmUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H-----END CERTIFICATE-----# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust# Label: "IdenTrust Public Sector Root CA 1"# Serial: 13298821034946342390521976156843933698# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f-----BEGIN CERTIFICATE-----MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcNMzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGyRBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlSbdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vwEUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9VGxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsVWaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gDW/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcNAQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qjt2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHVDRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8GlwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwWmhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4DfWN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJtshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhAGaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c-----END CERTIFICATE-----# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only# Label: "Entrust Root Certification Authority - G2"# Serial: 1246989352# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39-----BEGIN CERTIFICATE-----MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcyNTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/TRU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWNcCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hWwcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzANBgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZRkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4RnAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmHVHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==-----END CERTIFICATE-----# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only# Label: "Entrust Root Certification Authority - EC1"# Serial: 51543124481930649114116133369# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5-----BEGIN CERTIFICATE-----MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMqRW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEyMTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBtByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlHBz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVCR98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nXhTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G-----END CERTIFICATE-----# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority# Label: "CFCA EV ROOT"# Serial: 407555286# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd-----BEGIN CERTIFICATE-----MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkxMjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpLTIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRzEpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgthxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvPa931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqotaK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNgTnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfVPKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hvcWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNALtbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIBACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObTej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdLjOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBSESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qyP5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9dCi77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+ZAAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su-----END CERTIFICATE-----# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5"# Serial: 156233699172481# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78-----BEGIN CERTIFICATE-----MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UEBhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxnaSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkgQS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYDVQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kRGml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh34khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QIDAQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsXSDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0lVX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwqURawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nfpeYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CFYv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW+qtB4Uu2NQvAmxU=-----END CERTIFICATE-----# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6"# Serial: 138134509972618# MD5 Fingerprint: f8:c5:ee:2a:6b:be:95:8d:08:f7:25:4a:ea:71:3e:46# SHA1 Fingerprint: 8a:5c:8c:ee:a5:03:e6:05:56:ba:d8:1b:d4:f6:c9:b0:ed:e5:2f:e0# SHA256 Fingerprint: 8d:e7:86:55:e1:be:7f:78:47:80:0b:93:f6:94:d2:1d:36:8c:c0:6e:03:3e:7f:ab:04:bb:5e:b9:9d:a6:b7:00-----BEGIN CERTIFICATE-----MIIEJjCCAw6gAwIBAgIGfaHyZeyKMA0GCSqGSIb3DQEBCwUAMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYDVQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg2MB4XDTEzMTIxODA5MDQxMFoXDTIzMTIxNjA5MDQxMFowgbExCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExTTBLBgNVBAoMRFTDnFJLVFJVU1QgQmlsZ2kgxLBsZXRpxZ9pbSB2ZSBCaWxpxZ9pbSBHw7x2ZW5sacSfaSBIaXptZXRsZXJpIEEuxZ4uMUIwQAYDVQQDDDlUw5xSS1RSVVNUIEVsZWt0cm9uaWsgU2VydGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLEgSDYwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCdsGjW6L0UlqMACprx9MfMkU1xeHe59yEmFXNRFpQJRwXiM/VomjX/3EsvMsew7eKC5W/a2uqsxgbPJQ1BgfbBOCK9+bGlprMBvD9QFyv26WZV1DOzXPhDIHiTVRZwGTLmiddk671IUP320EEDwnS3/faAz1vFq6TWlRKb55cTMgPp1KtDWxbtMyJkKbbSk60vbNg9tvYdDjTu0n2pVQ8g9P0pu5FbHH3GQjhtQiht1AH7zYiXSX6484P4tZgvsycLSF5W506jM7NE1qXyGJTtHB6plVxiSvgNZ1GpryHV+DKdeboaX+UEVU0TRv/yz3THGmNtwx8XEsMeED5gCLMxAgMBAAGjQjBAMB0GA1UdDgQWBBTdVRcT9qzoSCHK77Wv0QAy7Z6MtTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAb1gNl0OqFlQ+v6nfkkU/hQu7VtMMUszIv3ZnXuaqs6fvuay0EBQNdH49ba3RfdCaqaXKGDsCQC4qnFAUi/5XfldcEQlLNkVS9z2sFP1E34uXI9TDwe7UU5X+LEr+DXCqu4svLcsyo4LyVN/Y8t3XSHLuSqMplsNEzm61kod2pLv0kmzOLBQJZo6NrRa1xxsJYTvjIKIDgI6tflEATseWhvtDmHd9KMeP2Cpu54Rvl0EpABZeTeIT6lnAY2c6RPuY/ATTMHKm9ocJV612ph1jmv3XZch4gyt1O6VbuA1df74jrlZVlFjvH4GMKrLN5ptjnhi85WsGtAuYSyher4hYyw==-----END CERTIFICATE-----# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903# Label: "Certinomis - Root CA"# Serial: 1# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58-----BEGIN CERTIFICATE-----MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjETMBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAbBgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMzMTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0gUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJflLieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQVWZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDFTKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLScCbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6RiwsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJwx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SGm/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZngWVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh02TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsFAAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWwF6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZSg081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzjqh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSNh4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8Vbtaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwjY/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvWgQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE=-----END CERTIFICATE-----# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited# Label: "Entrust.net Secure Server CA"# Serial: 927650371# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50-----BEGIN CERTIFICATE-----MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMCVVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5uZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5uZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUAA4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OCAdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHboIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVudHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8BdiE1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAaMAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZIhvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=-----END CERTIFICATE-----# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority# Label: "ValiCert Class 2 VA"# Serial: 1# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b-----BEGIN CERTIFICATE-----MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYyNjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vYdA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QSv4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9vUJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTuIYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwCW/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd-----END CERTIFICATE-----# Issuer: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok# Subject: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok# Label: "NetLock Express (Class C) Root"# Serial: 104# MD5 Fingerprint: 4f:eb:f1:f0:70:c2:80:63:5d:58:9f:da:12:3c:a9:c4# SHA1 Fingerprint: e3:92:51:2f:0a:cf:f5:05:df:f6:de:06:7f:75:37:e1:65:ea:57:4b# SHA256 Fingerprint: 0b:5e:ed:4e:84:64:03:cf:55:e0:65:84:84:40:ed:2a:82:75:8b:f5:b9:aa:1f:25:3d:46:13:cf:a0:80:ff:3f-----BEGIN CERTIFICATE-----MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUxETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQDEytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBDKSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJBgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMrTmV0TG9jayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNAOoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3ZW3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63euyucYT2BDMIJTLrdKwWRMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQwDgYDVR0PAQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEWggJNRklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0YWxhbm9zIFN6b2xnYWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFzb2sgYWxhcGphbiBrZXN6dWx0LiBBIGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBOZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1iaXp0b3NpdGFzYSB2ZWRpLiBBIGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0ZWxlIGF6IGVsb2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBsZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25sYXBqYW4gYSBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kga2VyaGV0byBheiBlbGxlbm9yemVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4gSU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5kIHRoZSB1c2Ugb2YgdGhpcyBjZXJ0aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQUyBhdmFpbGFibGUgYXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwgYXQgY3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmYta3UzbM2xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2gpO0u9f38vf5NNwgMvOOWgyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4Fp1hBWeAyNDYpQcCNJgEjTME1A==-----END CERTIFICATE-----# Issuer: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok# Subject: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok# Label: "NetLock Business (Class B) Root"# Serial: 105# MD5 Fingerprint: 39:16:aa:b9:6a:41:e1:14:69:df:9e:6c:3b:72:dc:b6# SHA1 Fingerprint: 87:9f:4b:ee:05:df:98:58:3b:e3:60:d6:33:e7:0d:3f:fe:98:71:af# SHA256 Fingerprint: 39:df:7b:68:2b:7b:93:8f:84:71:54:81:cc:de:8d:60:d8:f2:2e:c5:98:87:7d:0a:aa:c1:2b:59:18:2b:03:12-----BEGIN CERTIFICATE-----MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUxETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQDEylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikgVGFudXNpdHZhbnlraWFkbzAeFw05OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYDVQQGEwJIVTERMA8GA1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNhZ2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5ldExvY2sgVXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xKgZjupNTKihe5In+DCnVMm8Bp2GQ5o+2So/1bXHQawEfKOml2mrriRBf8TKPV/riXiK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr1nGTLbO/CVRY7QbrqHvcQ7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8EBAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1GSUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJhc2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNAbmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06sPgzTEdM43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXan3BukxowOR0w2y7jfLKRstE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKSNitjrFgBazMpUIaD8QFI-----END CERTIFICATE-----# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority# Label: "RSA Root Certificate 1"# Serial: 1# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a-----BEGIN CERTIFICATE-----MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYyNjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfDcnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqYJJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliEZwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJn0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/APhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu-----END CERTIFICATE-----# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority# Label: "ValiCert Class 1 VA"# Serial: 1# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04-----BEGIN CERTIFICATE-----MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYyNTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9YLqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8YTfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLWI8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPwnXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI-----END CERTIFICATE-----# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.# Label: "Equifax Secure eBusiness CA 1"# Serial: 4# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73-----BEGIN CERTIFICATE-----MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQwMDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRoRvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBuWqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKwEnv+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRKeDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZMzfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==-----END CERTIFICATE-----# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.# Label: "Equifax Secure Global eBusiness CA"# Serial: 1# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07-----BEGIN CERTIFICATE-----MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBTZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIwMDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2VjdXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5lc3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnCUdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAHMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1draGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUAA4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkAZ70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV-----END CERTIFICATE-----# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division# Label: "Thawte Premium Server CA"# Serial: 1# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72-----BEGIN CERTIFICATE-----MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2VydmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29tMB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpBMRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsGA1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRpb24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNlcnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkEVdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMRuHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUIhfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JMpAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==-----END CERTIFICATE-----# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division# Label: "Thawte Server CA"# Serial: 1# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9-----BEGIN CERTIFICATE-----MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAxMDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCXL+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGjEzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6eQNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZqdq5snUb9kLy78fyGPmJvKP/iiMucEc=-----END CERTIFICATE-----# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority# Label: "Verisign Class 3 Public Primary Certification Authority"# Serial: 149843929435818692848040365716851702463# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70-----BEGIN CERTIFICATE-----MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhEBarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/isI19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0GCSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Dolbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNycAA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k-----END CERTIFICATE-----# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority# Label: "Verisign Class 3 Public Primary Certification Authority"# Serial: 80507572722862485515306429940691309246# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05-----BEGIN CERTIFICATE-----MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhEBarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/isI19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ-----END CERTIFICATE-----# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network# Label: "Verisign Class 3 Public Primary Certification Authority - G2"# Serial: 167285380242319648451154478808036881606# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b-----BEGIN CERTIFICATE-----MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJBgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4XDTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg013gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSkU01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7iF6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpYoJ2daZH9-----END CERTIFICATE-----# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.# Label: "GTE CyberTrust Global Root"# Serial: 421# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36-----BEGIN CERTIFICATE-----MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYDVQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNvbHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJvb3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJVUzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJUcnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEdsb2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrHiM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTSr41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X404Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3rGwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l93PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0PlZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/-----END CERTIFICATE-----
# -*- coding: utf-8 -*-"""requests.auth~~~~~~~~~~~~~This module contains the authentication handlers for Requests."""import osimport reimport timeimport hashlibimport threadingfrom base64 import b64encodefrom .compat import urlparse, strfrom .cookies import extract_cookies_to_jarfrom .utils import parse_dict_header, to_native_stringfrom .status_codes import codesCONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'CONTENT_TYPE_MULTI_PART = 'multipart/form-data'def _basic_auth_str(username, password):"""Returns a Basic Auth string."""authstr = 'Basic ' + to_native_string(b64encode(('%s:%s' % (username, password)).encode('latin1')).strip())return authstrclass AuthBase(object):"""Base class that all auth implementations derive from"""def __call__(self, r):raise NotImplementedError('Auth hooks must be callable.')class HTTPBasicAuth(AuthBase):"""Attaches HTTP Basic Authentication to the given Request object."""def __init__(self, username, password):self.username = usernameself.password = passworddef __eq__(self, other):return all([self.username == getattr(other, 'username', None),self.password == getattr(other, 'password', None)])def __ne__(self, other):return not self == otherdef __call__(self, r):r.headers['Authorization'] = _basic_auth_str(self.username, self.password)return rclass HTTPProxyAuth(HTTPBasicAuth):"""Attaches HTTP Proxy Authentication to a given Request object."""def __call__(self, r):r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)return rclass HTTPDigestAuth(AuthBase):"""Attaches HTTP Digest Authentication to the given Request object."""def __init__(self, username, password):self.username = usernameself.password = password# Keep state in per-thread local storageself._thread_local = threading.local()def init_per_thread_state(self):# Ensure state is initialized just once per-threadif not hasattr(self._thread_local, 'init'):self._thread_local.init = Trueself._thread_local.last_nonce = ''self._thread_local.nonce_count = 0self._thread_local.chal = {}self._thread_local.pos = Noneself._thread_local.num_401_calls = Nonedef build_digest_header(self, method, url):""":rtype: str"""realm = self._thread_local.chal['realm']nonce = self._thread_local.chal['nonce']qop = self._thread_local.chal.get('qop')algorithm = self._thread_local.chal.get('algorithm')opaque = self._thread_local.chal.get('opaque')hash_utf8 = Noneif algorithm is None:_algorithm = 'MD5'else:_algorithm = algorithm.upper()# lambdas assume digest modules are imported at the top levelif _algorithm == 'MD5' or _algorithm == 'MD5-SESS':def md5_utf8(x):if isinstance(x, str):x = x.encode('utf-8')return hashlib.md5(x).hexdigest()hash_utf8 = md5_utf8elif _algorithm == 'SHA':def sha_utf8(x):if isinstance(x, str):x = x.encode('utf-8')return hashlib.sha1(x).hexdigest()hash_utf8 = sha_utf8KD = lambda s, d: hash_utf8("%s:%s" % (s, d))if hash_utf8 is None:return None# XXX not implemented yetentdig = Nonep_parsed = urlparse(url)#: path is request-uri defined in RFC 2616 which should not be emptypath = p_parsed.path or "/"if p_parsed.query:path += '?' + p_parsed.queryA1 = '%s:%s:%s' % (self.username, realm, self.password)A2 = '%s:%s' % (method, path)HA1 = hash_utf8(A1)HA2 = hash_utf8(A2)if nonce == self._thread_local.last_nonce:self._thread_local.nonce_count += 1else:self._thread_local.nonce_count = 1ncvalue = '%08x' % self._thread_local.nonce_counts = str(self._thread_local.nonce_count).encode('utf-8')s += nonce.encode('utf-8')s += time.ctime().encode('utf-8')s += os.urandom(8)cnonce = (hashlib.sha1(s).hexdigest()[:16])if _algorithm == 'MD5-SESS':HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))if not qop:respdig = KD(HA1, "%s:%s" % (nonce, HA2))elif qop == 'auth' or 'auth' in qop.split(','):noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, 'auth', HA2)respdig = KD(HA1, noncebit)else:# XXX handle auth-int.return Noneself._thread_local.last_nonce = nonce# XXX should the partial digests be encoded too?base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \'response="%s"' % (self.username, realm, nonce, path, respdig)if opaque:base += ', opaque="%s"' % opaqueif algorithm:base += ', algorithm="%s"' % algorithmif entdig:base += ', digest="%s"' % entdigif qop:base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)return 'Digest %s' % (base)def handle_redirect(self, r, **kwargs):"""Reset num_401_calls counter on redirects."""if r.is_redirect:self._thread_local.num_401_calls = 1def handle_401(self, r, **kwargs):"""Takes the given response and tries digest-auth, if needed.:rtype: requests.Response"""if self._thread_local.pos is not None:# Rewind the file position indicator of the body to where# it was to resend the request.r.request.body.seek(self._thread_local.pos)s_auth = r.headers.get('www-authenticate', '')if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:self._thread_local.num_401_calls += 1pat = re.compile(r'digest ', flags=re.IGNORECASE)self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))# Consume content and release the original connection# to allow our new request to reuse the same one.r.contentr.close()prep = r.request.copy()extract_cookies_to_jar(prep._cookies, r.request, r.raw)prep.prepare_cookies(prep._cookies)prep.headers['Authorization'] = self.build_digest_header(prep.method, prep.url)_r = r.connection.send(prep, **kwargs)_r.history.append(r)_r.request = prepreturn _rself._thread_local.num_401_calls = 1return rdef __call__(self, r):# Initialize per-thread state, if neededself.init_per_thread_state()# If we have a saved nonce, skip the 401if self._thread_local.last_nonce:r.headers['Authorization'] = self.build_digest_header(r.method, r.url)try:self._thread_local.pos = r.body.tell()except AttributeError:# In the case of HTTPDigestAuth being reused and the body of# the previous request was a file-like object, pos has the# file position of the previous body. Ensure it's set to# None.self._thread_local.pos = Noner.register_hook('response', self.handle_401)r.register_hook('response', self.handle_redirect)self._thread_local.num_401_calls = 1return rdef __eq__(self, other):return all([self.username == getattr(other, 'username', None),self.password == getattr(other, 'password', None)])def __ne__(self, other):return not self == other
# -*- coding: utf-8 -*-"""requests.api~~~~~~~~~~~~This module implements the Requests API.:copyright: (c) 2012 by Kenneth Reitz.:license: Apache2, see LICENSE for more details."""from . import sessionsdef request(method, url, **kwargs):"""Constructs and sends a :class:`Request <Request>`.:param method: method for the new :class:`Request` object.:param url: URL for the new :class:`Request` object.:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.:param json: (optional) json data to send in the body of the :class:`Request`.:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a stringdefining the content type of the given file and ``custom_headers`` a dict-like object containing additional headersto add for the file.:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.:param timeout: (optional) How long to wait for the server to send databefore giving up, as a float, or a :ref:`(connect timeout, readtimeout) <timeouts>` tuple.:type timeout: float or tuple:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.:type allow_redirects: bool:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.:param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to ``True``.:param stream: (optional) if ``False``, the response content will be immediately downloaded.:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.:return: :class:`Response <Response>` object:rtype: requests.ResponseUsage::>>> import requests>>> req = requests.request('GET', 'http://httpbin.org/get')<Response [200]>"""# By using the 'with' statement we are sure the session is closed, thus we# avoid leaving sockets open which can trigger a ResourceWarning in some# cases, and look like a memory leak in others.with sessions.Session() as session:return session.request(method=method, url=url, **kwargs)def get(url, params=None, **kwargs):"""Sends a GET request.:param url: URL for the new :class:`Request` object.:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.:param \*\*kwargs: Optional arguments that ``request`` takes.:return: :class:`Response <Response>` object:rtype: requests.Response"""kwargs.setdefault('allow_redirects', True)return request('get', url, params=params, **kwargs)def options(url, **kwargs):"""Sends a OPTIONS request.:param url: URL for the new :class:`Request` object.:param \*\*kwargs: Optional arguments that ``request`` takes.:return: :class:`Response <Response>` object:rtype: requests.Response"""kwargs.setdefault('allow_redirects', True)return request('options', url, **kwargs)def head(url, **kwargs):"""Sends a HEAD request.:param url: URL for the new :class:`Request` object.:param \*\*kwargs: Optional arguments that ``request`` takes.:return: :class:`Response <Response>` object:rtype: requests.Response"""kwargs.setdefault('allow_redirects', False)return request('head', url, **kwargs)def post(url, data=None, json=None, **kwargs):"""Sends a POST request.:param url: URL for the new :class:`Request` object.:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.:param json: (optional) json data to send in the body of the :class:`Request`.:param \*\*kwargs: Optional arguments that ``request`` takes.:return: :class:`Response <Response>` object:rtype: requests.Response"""return request('post', url, data=data, json=json, **kwargs)def put(url, data=None, **kwargs):"""Sends a PUT request.:param url: URL for the new :class:`Request` object.:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.:param \*\*kwargs: Optional arguments that ``request`` takes.:return: :class:`Response <Response>` object:rtype: requests.Response"""return request('put', url, data=data, **kwargs)def patch(url, data=None, **kwargs):"""Sends a PATCH request.:param url: URL for the new :class:`Request` object.:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.:param \*\*kwargs: Optional arguments that ``request`` takes.:return: :class:`Response <Response>` object:rtype: requests.Response"""return request('patch', url, data=data, **kwargs)def delete(url, **kwargs):"""Sends a DELETE request.:param url: URL for the new :class:`Request` object.:param \*\*kwargs: Optional arguments that ``request`` takes.:return: :class:`Response <Response>` object:rtype: requests.Response"""return request('delete', url, **kwargs)
# -*- coding: utf-8 -*-"""requests.adapters~~~~~~~~~~~~~~~~~This module contains the transport adapters that Requests uses to defineand maintain connections."""import os.pathimport socketfrom .models import Responsefrom .packages.urllib3.poolmanager import PoolManager, proxy_from_urlfrom .packages.urllib3.response import HTTPResponsefrom .packages.urllib3.util import Timeout as TimeoutSaucefrom .packages.urllib3.util.retry import Retryfrom .compat import urlparse, basestringfrom .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,prepend_scheme_if_needed, get_auth_from_url, urldefragauth,select_proxy, to_native_string)from .structures import CaseInsensitiveDictfrom .packages.urllib3.exceptions import ClosedPoolErrorfrom .packages.urllib3.exceptions import ConnectTimeoutErrorfrom .packages.urllib3.exceptions import HTTPError as _HTTPErrorfrom .packages.urllib3.exceptions import MaxRetryErrorfrom .packages.urllib3.exceptions import NewConnectionErrorfrom .packages.urllib3.exceptions import ProxyError as _ProxyErrorfrom .packages.urllib3.exceptions import ProtocolErrorfrom .packages.urllib3.exceptions import ReadTimeoutErrorfrom .packages.urllib3.exceptions import SSLError as _SSLErrorfrom .packages.urllib3.exceptions import ResponseErrorfrom .cookies import extract_cookies_to_jarfrom .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,ProxyError, RetryError, InvalidSchema)from .auth import _basic_auth_strtry:from .packages.urllib3.contrib.socks import SOCKSProxyManagerexcept ImportError:def SOCKSProxyManager(*args, **kwargs):raise InvalidSchema("Missing dependencies for SOCKS support.")DEFAULT_POOLBLOCK = FalseDEFAULT_POOLSIZE = 10DEFAULT_RETRIES = 0DEFAULT_POOL_TIMEOUT = Noneclass BaseAdapter(object):"""The Base Transport Adapter"""def __init__(self):super(BaseAdapter, self).__init__()def send(self, request, stream=False, timeout=None, verify=True,cert=None, proxies=None):"""Sends PreparedRequest object. Returns Response object.:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.:param stream: (optional) Whether to stream the request content.:param timeout: (optional) How long to wait for the server to senddata before giving up, as a float, or a :ref:`(connect timeout,read timeout) <timeouts>` tuple.:type timeout: float or tuple:param verify: (optional) Whether to verify SSL certificates.:param cert: (optional) Any user-provided SSL certificate to be trusted.:param proxies: (optional) The proxies dictionary to apply to the request."""raise NotImplementedErrordef close(self):"""Cleans up adapter specific items."""raise NotImplementedErrorclass HTTPAdapter(BaseAdapter):"""The built-in HTTP Adapter for urllib3.Provides a general-case interface for Requests sessions to contact HTTP andHTTPS urls by implementing the Transport Adapter interface. This class willusually be created by the :class:`Session <Session>` class under thecovers.:param pool_connections: The number of urllib3 connection pools to cache.:param pool_maxsize: The maximum number of connections to save in the pool.:param max_retries: The maximum number of retries each connectionshould attempt. Note, this applies only to failed DNS lookups, socketconnections and connection timeouts, never to requests where data hasmade it to the server. By default, Requests does not retry failedconnections. If you need granular control over the conditions underwhich we retry a request, import urllib3's ``Retry`` class and passthat instead.:param pool_block: Whether the connection pool should block for connections.Usage::>>> import requests>>> s = requests.Session()>>> a = requests.adapters.HTTPAdapter(max_retries=3)>>> s.mount('http://', a)"""__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize','_pool_block']def __init__(self, pool_connections=DEFAULT_POOLSIZE,pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,pool_block=DEFAULT_POOLBLOCK):if max_retries == DEFAULT_RETRIES:self.max_retries = Retry(0, read=False)else:self.max_retries = Retry.from_int(max_retries)self.config = {}self.proxy_manager = {}super(HTTPAdapter, self).__init__()self._pool_connections = pool_connectionsself._pool_maxsize = pool_maxsizeself._pool_block = pool_blockself.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)def __getstate__(self):return dict((attr, getattr(self, attr, None)) for attr inself.__attrs__)def __setstate__(self, state):# Can't handle by adding 'proxy_manager' to self.__attrs__ because# self.poolmanager uses a lambda function, which isn't pickleable.self.proxy_manager = {}self.config = {}for attr, value in state.items():setattr(self, attr, value)self.init_poolmanager(self._pool_connections, self._pool_maxsize,block=self._pool_block)def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):"""Initializes a urllib3 PoolManager.This method should not be called from user code, and is onlyexposed for use when subclassing the:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.:param connections: The number of urllib3 connection pools to cache.:param maxsize: The maximum number of connections to save in the pool.:param block: Block when no free connections are available.:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager."""# save these values for picklingself._pool_connections = connectionsself._pool_maxsize = maxsizeself._pool_block = blockself.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,block=block, strict=True, **pool_kwargs)def proxy_manager_for(self, proxy, **proxy_kwargs):"""Return urllib3 ProxyManager for the given proxy.This method should not be called from user code, and is onlyexposed for use when subclassing the:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.:param proxy: The proxy to return a urllib3 ProxyManager for.:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.:returns: ProxyManager:rtype: requests.packages.urllib3.ProxyManager"""if proxy in self.proxy_manager:manager = self.proxy_manager[proxy]elif proxy.lower().startswith('socks'):username, password = get_auth_from_url(proxy)manager = self.proxy_manager[proxy] = SOCKSProxyManager(proxy,username=username,password=password,num_pools=self._pool_connections,maxsize=self._pool_maxsize,block=self._pool_block,**proxy_kwargs)else:proxy_headers = self.proxy_headers(proxy)manager = self.proxy_manager[proxy] = proxy_from_url(proxy,proxy_headers=proxy_headers,num_pools=self._pool_connections,maxsize=self._pool_maxsize,block=self._pool_block,**proxy_kwargs)return managerdef cert_verify(self, conn, url, verify, cert):"""Verify a SSL certificate. This method should not be called from usercode, and is only exposed for use when subclassing the:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.:param conn: The urllib3 connection object associated with the cert.:param url: The requested URL.:param verify: Whether we should actually verify the certificate.:param cert: The SSL certificate to verify."""if url.lower().startswith('https') and verify:cert_loc = None# Allow self-specified cert location.if verify is not True:cert_loc = verifyif not cert_loc:cert_loc = DEFAULT_CA_BUNDLE_PATHif not cert_loc:raise Exception("Could not find a suitable SSL CA certificate bundle.")conn.cert_reqs = 'CERT_REQUIRED'if not os.path.isdir(cert_loc):conn.ca_certs = cert_locelse:conn.ca_cert_dir = cert_locelse:conn.cert_reqs = 'CERT_NONE'conn.ca_certs = Noneconn.ca_cert_dir = Noneif cert:if not isinstance(cert, basestring):conn.cert_file = cert[0]conn.key_file = cert[1]else:conn.cert_file = certdef build_response(self, req, resp):"""Builds a :class:`Response <requests.Response>` object from a urllib3response. This should not be called from user code, and is only exposedfor use when subclassing the:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.:param resp: The urllib3 response object.:rtype: requests.Response"""response = Response()# Fallback to None if there's no status_code, for whatever reason.response.status_code = getattr(resp, 'status', None)# Make headers case-insensitive.response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))# Set encoding.response.encoding = get_encoding_from_headers(response.headers)response.raw = respresponse.reason = response.raw.reasonif isinstance(req.url, bytes):response.url = req.url.decode('utf-8')else:response.url = req.url# Add new cookies from the server.extract_cookies_to_jar(response.cookies, req, resp)# Give the Response some context.response.request = reqresponse.connection = selfreturn responsedef get_connection(self, url, proxies=None):"""Returns a urllib3 connection for the given URL. This should not becalled from user code, and is only exposed for use when subclassing the:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.:param url: The URL to connect to.:param proxies: (optional) A Requests-style dictionary of proxies used on this request.:rtype: requests.packages.urllib3.ConnectionPool"""proxy = select_proxy(url, proxies)if proxy:proxy = prepend_scheme_if_needed(proxy, 'http')proxy_manager = self.proxy_manager_for(proxy)conn = proxy_manager.connection_from_url(url)else:# Only scheme should be lower caseparsed = urlparse(url)url = parsed.geturl()conn = self.poolmanager.connection_from_url(url)return conndef close(self):"""Disposes of any internal state.Currently, this closes the PoolManager and any active ProxyManager,which closes any pooled connections."""self.poolmanager.clear()for proxy in self.proxy_manager.values():proxy.clear()def request_url(self, request, proxies):"""Obtain the url to use when making the final request.If the message is being sent through a HTTP proxy, the full URL has tobe used. Otherwise, we should only use the path portion of the URL.This should not be called from user code, and is only exposed for usewhen subclassing the:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.:rtype: str"""proxy = select_proxy(request.url, proxies)scheme = urlparse(request.url).schemeis_proxied_http_request = (proxy and scheme != 'https')using_socks_proxy = Falseif proxy:proxy_scheme = urlparse(proxy).scheme.lower()using_socks_proxy = proxy_scheme.startswith('socks')url = request.path_urlif is_proxied_http_request and not using_socks_proxy:url = urldefragauth(request.url)return urldef add_headers(self, request, **kwargs):"""Add any headers needed by the connection. As of v2.0 this doesnothing by default, but is left for overriding by users that subclassthe :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.This should not be called from user code, and is only exposed for usewhen subclassing the:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.:param kwargs: The keyword arguments from the call to send()."""passdef proxy_headers(self, proxy):"""Returns a dictionary of the headers to add to any request sentthrough a proxy. This works with urllib3 magic to ensure that they arecorrectly sent to the proxy, rather than in a tunnelled request ifCONNECT is being used.This should not be called from user code, and is only exposed for usewhen subclassing the:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.:param proxies: The url of the proxy being used for this request.:rtype: dict"""headers = {}username, password = get_auth_from_url(proxy)if username and password:headers['Proxy-Authorization'] = _basic_auth_str(username,password)return headersdef send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):"""Sends PreparedRequest object. Returns Response object.:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.:param stream: (optional) Whether to stream the request content.:param timeout: (optional) How long to wait for the server to senddata before giving up, as a float, or a :ref:`(connect timeout,read timeout) <timeouts>` tuple.:type timeout: float or tuple:param verify: (optional) Whether to verify SSL certificates.:param cert: (optional) Any user-provided SSL certificate to be trusted.:param proxies: (optional) The proxies dictionary to apply to the request.:rtype: requests.Response"""conn = self.get_connection(request.url, proxies)self.cert_verify(conn, request.url, verify, cert)url = self.request_url(request, proxies)self.add_headers(request)chunked = not (request.body is None or 'Content-Length' in request.headers)if isinstance(timeout, tuple):try:connect, read = timeouttimeout = TimeoutSauce(connect=connect, read=read)except ValueError as e:# this may raise a string formatting error.err = ("Invalid timeout {0}. Pass a (connect, read) ""timeout tuple, or a single float to set ""both timeouts to the same value".format(timeout))raise ValueError(err)else:timeout = TimeoutSauce(connect=timeout, read=timeout)try:if not chunked:resp = conn.urlopen(method=request.method,url=url,body=request.body,headers=request.headers,redirect=False,assert_same_host=False,preload_content=False,decode_content=False,retries=self.max_retries,timeout=timeout)# Send the request.else:if hasattr(conn, 'proxy_pool'):conn = conn.proxy_poollow_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)try:low_conn.putrequest(request.method,url,skip_accept_encoding=True)for header, value in request.headers.items():low_conn.putheader(header, value)low_conn.endheaders()for i in request.body:low_conn.send(hex(len(i))[2:].encode('utf-8'))low_conn.send(b'\r\n')low_conn.send(i)low_conn.send(b'\r\n')low_conn.send(b'0\r\n\r\n')# Receive the response from the servertry:# For Python 2.7+ versions, use buffering of HTTP# responsesr = low_conn.getresponse(buffering=True)except TypeError:# For compatibility with Python 2.6 versions and backr = low_conn.getresponse()resp = HTTPResponse.from_httplib(r,pool=conn,connection=low_conn,preload_content=False,decode_content=False)except:# If we hit any problems here, clean up the connection.# Then, reraise so that we can handle the actual exception.low_conn.close()raiseexcept (ProtocolError, socket.error) as err:raise ConnectionError(err, request=request)except MaxRetryError as e:if isinstance(e.reason, ConnectTimeoutError):# TODO: Remove this in 3.0.0: see #2811if not isinstance(e.reason, NewConnectionError):raise ConnectTimeout(e, request=request)if isinstance(e.reason, ResponseError):raise RetryError(e, request=request)if isinstance(e.reason, _ProxyError):raise ProxyError(e, request=request)raise ConnectionError(e, request=request)except ClosedPoolError as e:raise ConnectionError(e, request=request)except _ProxyError as e:raise ProxyError(e)except (_SSLError, _HTTPError) as e:if isinstance(e, _SSLError):raise SSLError(e, request=request)elif isinstance(e, ReadTimeoutError):raise ReadTimeout(e, request=request)else:raisereturn self.build_response(request, resp)
# -*- coding: utf-8 -*-# __# /__) _ _ _ _ _/ _# / ( (- (/ (/ (- _) / _)# /"""Requests HTTP library~~~~~~~~~~~~~~~~~~~~~Requests is an HTTP library, written in Python, for human beings. Basic GETusage:>>> import requests>>> r = requests.get('https://www.python.org')>>> r.status_code200>>> 'Python is a programming language' in r.contentTrue... or POST:>>> payload = dict(key1='value1', key2='value2')>>> r = requests.post('http://httpbin.org/post', data=payload)>>> print(r.text){..."form": {"key2": "value2","key1": "value1"},...}The other HTTP methods are supported - see `requests.api`. Full documentationis at <http://python-requests.org>.:copyright: (c) 2016 by Kenneth Reitz.:license: Apache 2.0, see LICENSE for more details."""__title__ = 'requests'__version__ = '2.11.1'__build__ = 0x021101__author__ = 'Kenneth Reitz'__license__ = 'Apache 2.0'__copyright__ = 'Copyright 2016 Kenneth Reitz'# Attempt to enable urllib3's SNI support, if possible# Note: Patched by pip to prevent using the PyOpenSSL module. On Windows this# prevents upgrading cryptography.# try:# from .packages.urllib3.contrib import pyopenssl# pyopenssl.inject_into_urllib3()# except ImportError:# passimport warnings# urllib3's DependencyWarnings should be silenced.from .packages.urllib3.exceptions import DependencyWarningwarnings.simplefilter('ignore', DependencyWarning)from . import utilsfrom .models import Request, Response, PreparedRequestfrom .api import request, get, head, post, patch, put, delete, optionsfrom .sessions import session, Sessionfrom .status_codes import codesfrom .exceptions import (RequestException, Timeout, URLRequired,TooManyRedirects, HTTPError, ConnectionError,FileModeWarning, ConnectTimeout, ReadTimeout)# Set default logging handler to avoid "No handler found" warnings.import loggingtry: # Python 2.7+from logging import NullHandlerexcept ImportError:class NullHandler(logging.Handler):def emit(self, record):passlogging.getLogger(__name__).addHandler(NullHandler())# FileModeWarnings go off per the default.warnings.simplefilter('default', FileModeWarning, append=True)
import osimport sysimport pipimport globimport shutilhere = os.path.abspath(os.path.dirname(__file__))def usage():print("Usage: re-vendor.py [clean|vendor]")sys.exit(1)def clean():for fn in os.listdir(here):dirname = os.path.join(here, fn)if os.path.isdir(dirname):shutil.rmtree(dirname)# six is a single file, not a packageos.unlink(os.path.join(here, 'six.py'))def vendor():pip.main(['install', '-t', here, '-r', 'vendor.txt'])for dirname in glob.glob('*.egg-info'):shutil.rmtree(dirname)if __name__ == '__main__':if len(sys.argv) != 2:usage()if sys.argv[1] == 'clean':clean()elif sys.argv[1] == 'vendor':vendor()else:usage()
# module pyparsing.py## Copyright (c) 2003-2016 Paul T. McGuire## Permission is hereby granted, free of charge, to any person obtaining# a copy of this software and associated documentation files (the# "Software"), to deal in the Software without restriction, including# without limitation the rights to use, copy, modify, merge, publish,# distribute, sublicense, and/or sell copies of the Software, and to# permit persons to whom the Software is furnished to do so, subject to# the following conditions:## The above copyright notice and this permission notice shall be# included in all copies or substantial portions of the Software.## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.#__doc__ = \"""pyparsing module - Classes and methods to define and execute parsing grammarsThe pyparsing module is an alternative approach to creating and executing simple grammars,vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, youdon't need to learn a new syntax for defining grammars or matching expressions - the parsing moduleprovides a library of classes that you use to construct the grammar directly in Python.Here is a program to parse "Hello, World!" (or any greeting of the formC{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted toL{Literal} expressions)::from pyparsing import Word, alphas# define grammar of a greetinggreet = Word(alphas) + "," + Word(alphas) + "!"hello = "Hello, World!"print (hello, "->", greet.parseString(hello))The program outputs the following::Hello, World! -> ['Hello', ',', 'World', '!']The Python representation of the grammar is quite readable, owing to the self-explanatoryclass names, and the use of '+', '|' and '^' operators.The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or anobject with named attributes.The pyparsing module handles some of the problems that are typically vexing when writing text parsers:- extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.)- quoted strings- embedded comments"""__version__ = "2.1.10"__versionTime__ = "07 Oct 2016 01:31 UTC"__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"import stringfrom weakref import ref as wkrefimport copyimport sysimport warningsimport reimport sre_constantsimport collectionsimport pprintimport tracebackimport typesfrom datetime import datetimetry:from _thread import RLockexcept ImportError:from threading import RLocktry:from collections import OrderedDict as _OrderedDictexcept ImportError:try:from ordereddict import OrderedDict as _OrderedDictexcept ImportError:_OrderedDict = None#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )__all__ = ['And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty','FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal','MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or','ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException','ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException','Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter','White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore','alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col','commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString','dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums','htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno','makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral','nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables','punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity','replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd','stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute','indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass','CloseMatch', 'tokenMap', 'pyparsing_common',]system_version = tuple(sys.version_info)[:3]PY_3 = system_version[0] == 3if PY_3:_MAX_INT = sys.maxsizebasestring = strunichr = chr_ustr = str# build list of single arg builtins, that can be used as parse actionssingleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]else:_MAX_INT = sys.maxintrange = xrangedef _ustr(obj):"""Drop-in replacement for str(obj) that tries to be Unicode friendly. It first triesstr(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). Itthen < returns the unicode object | encodes it with the default encoding | ... >."""if isinstance(obj,unicode):return objtry:# If this works, then _ustr(obj) has the same behaviour as str(obj), so# it won't break any existing code.return str(obj)except UnicodeEncodeError:# Else encode itret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')xmlcharref = Regex('&#\d+;')xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])return xmlcharref.transformString(ret)# build list of single arg builtins, tolerant of Python version, that can be used as parse actionssingleArgBuiltins = []import __builtin__for fname in "sum len sorted reversed list tuple set any all min max".split():try:singleArgBuiltins.append(getattr(__builtin__,fname))except AttributeError:continue_generatorType = type((y for y in range(1)))def _xml_escape(data):"""Escape &, <, >, ", ', etc. in a string of data."""# ampersand must be replaced firstfrom_symbols = '&><"\''to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split())for from_,to_ in zip(from_symbols, to_symbols):data = data.replace(from_, to_)return dataclass _Constants(object):passalphas = string.ascii_uppercase + string.ascii_lowercasenums = "0123456789"hexnums = nums + "ABCDEFabcdef"alphanums = alphas + nums_bslash = chr(92)printables = "".join(c for c in string.printable if c not in string.whitespace)class ParseBaseException(Exception):"""base exception class for all parsing runtime exceptions"""# Performance tuning: we construct a *lot* of these, so keep this# constructor as small and fast as possibledef __init__( self, pstr, loc=0, msg=None, elem=None ):self.loc = locif msg is None:self.msg = pstrself.pstr = ""else:self.msg = msgself.pstr = pstrself.parserElement = elemself.args = (pstr, loc, msg)@classmethoddef _from_exception(cls, pe):"""internal factory method to simplify creating one type of ParseExceptionfrom another - avoids having __init__ signature conflicts among subclasses"""return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)def __getattr__( self, aname ):"""supported attributes by name are:- lineno - returns the line number of the exception text- col - returns the column number of the exception text- line - returns the line containing the exception text"""if( aname == "lineno" ):return lineno( self.loc, self.pstr )elif( aname in ("col", "column") ):return col( self.loc, self.pstr )elif( aname == "line" ):return line( self.loc, self.pstr )else:raise AttributeError(aname)def __str__( self ):return "%s (at char %d), (line:%d, col:%d)" % \( self.msg, self.loc, self.lineno, self.column )def __repr__( self ):return _ustr(self)def markInputline( self, markerString = ">!<" ):"""Extracts the exception line from the input string, and marksthe location of the exception with a special symbol."""line_str = self.lineline_column = self.column - 1if markerString:line_str = "".join((line_str[:line_column],markerString, line_str[line_column:]))return line_str.strip()def __dir__(self):return "lineno col line".split() + dir(type(self))class ParseException(ParseBaseException):"""Exception thrown when parse expressions don't match class;supported attributes by name are:- lineno - returns the line number of the exception text- col - returns the column number of the exception text- line - returns the line containing the exception textExample::try:Word(nums).setName("integer").parseString("ABC")except ParseException as pe:print(pe)print("column: {}".format(pe.col))prints::Expected integer (at char 0), (line:1, col:1)column: 1"""passclass ParseFatalException(ParseBaseException):"""user-throwable exception thrown when inconsistent parse contentis found; stops all parsing immediately"""passclass ParseSyntaxException(ParseFatalException):"""just like L{ParseFatalException}, but thrown internally when anL{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stopimmediately because an unbacktrackable syntax error has been found"""pass#~ class ReparseException(ParseBaseException):#~ """Experimental class - parse actions can raise this exception to cause#~ pyparsing to reparse the input string:#~ - with a modified input string, and/or#~ - with a modified start location#~ Set the values of the ReparseException in the constructor, and raise the#~ exception in a parse action to cause pyparsing to use the new string/location.#~ Setting the values as None causes no change to be made.#~ """#~ def __init_( self, newstring, restartLoc ):#~ self.newParseText = newstring#~ self.reparseLoc = restartLocclass RecursiveGrammarException(Exception):"""exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive"""def __init__( self, parseElementList ):self.parseElementTrace = parseElementListdef __str__( self ):return "RecursiveGrammarException: %s" % self.parseElementTraceclass _ParseResultsWithOffset(object):def __init__(self,p1,p2):self.tup = (p1,p2)def __getitem__(self,i):return self.tup[i]def __repr__(self):return repr(self.tup[0])def setOffset(self,i):self.tup = (self.tup[0],i)class ParseResults(object):"""Structured parse results, to provide multiple means of access to the parsed data:- as a list (C{len(results)})- by list index (C{results[0], results[1]}, etc.)- by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName})Example::integer = Word(nums)date_str = (integer.setResultsName("year") + '/'+ integer.setResultsName("month") + '/'+ integer.setResultsName("day"))# equivalent form:# date_str = integer("year") + '/' + integer("month") + '/' + integer("day")# parseString returns a ParseResults objectresult = date_str.parseString("1999/12/31")def test(s, fn=repr):print("%s -> %s" % (s, fn(eval(s))))test("list(result)")test("result[0]")test("result['month']")test("result.day")test("'month' in result")test("'minutes' in result")test("result.dump()", str)prints::list(result) -> ['1999', '/', '12', '/', '31']result[0] -> '1999'result['month'] -> '12'result.day -> '31''month' in result -> True'minutes' in result -> Falseresult.dump() -> ['1999', '/', '12', '/', '31']- day: 31- month: 12- year: 1999"""def __new__(cls, toklist=None, name=None, asList=True, modal=True ):if isinstance(toklist, cls):return toklistretobj = object.__new__(cls)retobj.__doinit = Truereturn retobj# Performance tuning: we construct a *lot* of these, so keep this# constructor as small and fast as possibledef __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ):if self.__doinit:self.__doinit = Falseself.__name = Noneself.__parent = Noneself.__accumNames = {}self.__asList = asListself.__modal = modalif toklist is None:toklist = []if isinstance(toklist, list):self.__toklist = toklist[:]elif isinstance(toklist, _generatorType):self.__toklist = list(toklist)else:self.__toklist = [toklist]self.__tokdict = dict()if name is not None and name:if not modal:self.__accumNames[name] = 0if isinstance(name,int):name = _ustr(name) # will always return a str, but use _ustr for consistencyself.__name = nameif not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])):if isinstance(toklist,basestring):toklist = [ toklist ]if asList:if isinstance(toklist,ParseResults):self[name] = _ParseResultsWithOffset(toklist.copy(),0)else:self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)self[name].__name = nameelse:try:self[name] = toklist[0]except (KeyError,TypeError,IndexError):self[name] = toklistdef __getitem__( self, i ):if isinstance( i, (int,slice) ):return self.__toklist[i]else:if i not in self.__accumNames:return self.__tokdict[i][-1][0]else:return ParseResults([ v[0] for v in self.__tokdict[i] ])def __setitem__( self, k, v, isinstance=isinstance ):if isinstance(v,_ParseResultsWithOffset):self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]sub = v[0]elif isinstance(k,(int,slice)):self.__toklist[k] = vsub = velse:self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]sub = vif isinstance(sub,ParseResults):sub.__parent = wkref(self)def __delitem__( self, i ):if isinstance(i,(int,slice)):mylen = len( self.__toklist )del self.__toklist[i]# convert int to sliceif isinstance(i, int):if i < 0:i += myleni = slice(i, i+1)# get removed indicesremoved = list(range(*i.indices(mylen)))removed.reverse()# fixup indices in token dictionaryfor name,occurrences in self.__tokdict.items():for j in removed:for k, (value, position) in enumerate(occurrences):occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))else:del self.__tokdict[i]def __contains__( self, k ):return k in self.__tokdictdef __len__( self ): return len( self.__toklist )def __bool__(self): return ( not not self.__toklist )__nonzero__ = __bool__def __iter__( self ): return iter( self.__toklist )def __reversed__( self ): return iter( self.__toklist[::-1] )def _iterkeys( self ):if hasattr(self.__tokdict, "iterkeys"):return self.__tokdict.iterkeys()else:return iter(self.__tokdict)def _itervalues( self ):return (self[k] for k in self._iterkeys())def _iteritems( self ):return ((k, self[k]) for k in self._iterkeys())if PY_3:keys = _iterkeys"""Returns an iterator of all named result keys (Python 3.x only)."""values = _itervalues"""Returns an iterator of all named result values (Python 3.x only)."""items = _iteritems"""Returns an iterator of all named result key-value tuples (Python 3.x only)."""else:iterkeys = _iterkeys"""Returns an iterator of all named result keys (Python 2.x only)."""itervalues = _itervalues"""Returns an iterator of all named result values (Python 2.x only)."""iteritems = _iteritems"""Returns an iterator of all named result key-value tuples (Python 2.x only)."""def keys( self ):"""Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""return list(self.iterkeys())def values( self ):"""Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""return list(self.itervalues())def items( self ):"""Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""return list(self.iteritems())def haskeys( self ):"""Since keys() returns an iterator, this method is helpful in bypassingcode that looks for the existence of any defined results names."""return bool(self.__tokdict)def pop( self, *args, **kwargs):"""Removes and returns item at specified index (default=C{last}).Supports both C{list} and C{dict} semantics for C{pop()}. If passed noargument or an integer argument, it will use C{list} semanticsand pop tokens from the list of parsed tokens. If passed anon-integer argument (most likely a string), it will use C{dict}semantics and pop the corresponding value from any definedresults names. A second default return value argument issupported, just as in C{dict.pop()}.Example::def remove_first(tokens):tokens.pop(0)print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']label = Word(alphas)patt = label("LABEL") + OneOrMore(Word(nums))print(patt.parseString("AAB 123 321").dump())# Use pop() in a parse action to remove named result (note that corresponding value is not# removed from list form of results)def remove_LABEL(tokens):tokens.pop("LABEL")return tokenspatt.addParseAction(remove_LABEL)print(patt.parseString("AAB 123 321").dump())prints::['AAB', '123', '321']- LABEL: AAB['AAB', '123', '321']"""if not args:args = [-1]for k,v in kwargs.items():if k == 'default':args = (args[0], v)else:raise TypeError("pop() got an unexpected keyword argument '%s'" % k)if (isinstance(args[0], int) orlen(args) == 1 orargs[0] in self):index = args[0]ret = self[index]del self[index]return retelse:defaultvalue = args[1]return defaultvaluedef get(self, key, defaultValue=None):"""Returns named result matching the given key, or if there is nosuch name, then returns the given C{defaultValue} or C{None} if noC{defaultValue} is specified.Similar to C{dict.get()}.Example::integer = Word(nums)date_str = integer("year") + '/' + integer("month") + '/' + integer("day")result = date_str.parseString("1999/12/31")print(result.get("year")) # -> '1999'print(result.get("hour", "not specified")) # -> 'not specified'print(result.get("hour")) # -> None"""if key in self:return self[key]else:return defaultValuedef insert( self, index, insStr ):"""Inserts new element at location index in the list of parsed tokens.Similar to C{list.insert()}.Example::print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']# use a parse action to insert the parse location in the front of the parsed resultsdef insert_locn(locn, tokens):tokens.insert(0, locn)print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']"""self.__toklist.insert(index, insStr)# fixup indices in token dictionaryfor name,occurrences in self.__tokdict.items():for k, (value, position) in enumerate(occurrences):occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))def append( self, item ):"""Add single element to end of ParseResults list of elements.Example::print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']# use a parse action to compute the sum of the parsed integers, and add it to the enddef append_sum(tokens):tokens.append(sum(map(int, tokens)))print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]"""self.__toklist.append(item)def extend( self, itemseq ):"""Add sequence of elements to end of ParseResults list of elements.Example::patt = OneOrMore(Word(alphas))# use a parse action to append the reverse of the matched strings, to make a palindromedef make_palindrome(tokens):tokens.extend(reversed([t[::-1] for t in tokens]))return ''.join(tokens)print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'"""if isinstance(itemseq, ParseResults):self += itemseqelse:self.__toklist.extend(itemseq)def clear( self ):"""Clear all elements and results names."""del self.__toklist[:]self.__tokdict.clear()def __getattr__( self, name ):try:return self[name]except KeyError:return ""if name in self.__tokdict:if name not in self.__accumNames:return self.__tokdict[name][-1][0]else:return ParseResults([ v[0] for v in self.__tokdict[name] ])else:return ""def __add__( self, other ):ret = self.copy()ret += otherreturn retdef __iadd__( self, other ):if other.__tokdict:offset = len(self.__toklist)addoffset = lambda a: offset if a<0 else a+offsetotheritems = other.__tokdict.items()otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )for (k,vlist) in otheritems for v in vlist]for k,v in otherdictitems:self[k] = vif isinstance(v[0],ParseResults):v[0].__parent = wkref(self)self.__toklist += other.__toklistself.__accumNames.update( other.__accumNames )return selfdef __radd__(self, other):if isinstance(other,int) and other == 0:# useful for merging many ParseResults using sum() builtinreturn self.copy()else:# this may raise a TypeError - so be itreturn other + selfdef __repr__( self ):return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )def __str__( self ):return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'def _asStringList( self, sep='' ):out = []for item in self.__toklist:if out and sep:out.append(sep)if isinstance( item, ParseResults ):out += item._asStringList()else:out.append( _ustr(item) )return outdef asList( self ):"""Returns the parse results as a nested list of matching tokens, all converted to strings.Example::patt = OneOrMore(Word(alphas))result = patt.parseString("sldkj lsdkj sldkj")# even though the result prints in string-like form, it is actually a pyparsing ParseResultsprint(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj']# Use asList() to create an actual listresult_list = result.asList()print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj']"""return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]def asDict( self ):"""Returns the named parse results as a nested dictionary.Example::integer = Word(nums)date_str = integer("year") + '/' + integer("month") + '/' + integer("day")result = date_str.parseString('12/31/1999')print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})result_dict = result.asDict()print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'}# even though a ParseResults supports dict-like access, sometime you just need to have a dictimport jsonprint(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializableprint(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}"""if PY_3:item_fn = self.itemselse:item_fn = self.iteritemsdef toItem(obj):if isinstance(obj, ParseResults):if obj.haskeys():return obj.asDict()else:return [toItem(v) for v in obj]else:return objreturn dict((k,toItem(v)) for k,v in item_fn())def copy( self ):"""Returns a new copy of a C{ParseResults} object."""ret = ParseResults( self.__toklist )ret.__tokdict = self.__tokdict.copy()ret.__parent = self.__parentret.__accumNames.update( self.__accumNames )ret.__name = self.__namereturn retdef asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):"""(Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names."""nl = "\n"out = []namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items()for v in vlist)nextLevelIndent = indent + " "# collapse out indents if formatting is not desiredif not formatted:indent = ""nextLevelIndent = ""nl = ""selfTag = Noneif doctag is not None:selfTag = doctagelse:if self.__name:selfTag = self.__nameif not selfTag:if namedItemsOnly:return ""else:selfTag = "ITEM"out += [ nl, indent, "<", selfTag, ">" ]for i,res in enumerate(self.__toklist):if isinstance(res,ParseResults):if i in namedItems:out += [ res.asXML(namedItems[i],namedItemsOnly and doctag is None,nextLevelIndent,formatted)]else:out += [ res.asXML(None,namedItemsOnly and doctag is None,nextLevelIndent,formatted)]else:# individual token, see if there is a name for itresTag = Noneif i in namedItems:resTag = namedItems[i]if not resTag:if namedItemsOnly:continueelse:resTag = "ITEM"xmlBodyText = _xml_escape(_ustr(res))out += [ nl, nextLevelIndent, "<", resTag, ">",xmlBodyText,"</", resTag, ">" ]out += [ nl, indent, "</", selfTag, ">" ]return "".join(out)def __lookup(self,sub):for k,vlist in self.__tokdict.items():for v,loc in vlist:if sub is v:return kreturn Nonedef getName(self):"""Returns the results name for this token expression. Useful when severaldifferent expressions might match at a particular location.Example::integer = Word(nums)ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")house_number_expr = Suppress('#') + Word(nums, alphanums)user_data = (Group(house_number_expr)("house_number")| Group(ssn_expr)("ssn")| Group(integer)("age"))user_info = OneOrMore(user_data)result = user_info.parseString("22 111-22-3333 #221B")for item in result:print(item.getName(), ':', item[0])prints::age : 22ssn : 111-22-3333house_number : 221B"""if self.__name:return self.__nameelif self.__parent:par = self.__parent()if par:return par.__lookup(self)else:return Noneelif (len(self) == 1 andlen(self.__tokdict) == 1 andnext(iter(self.__tokdict.values()))[0][1] in (0,-1)):return next(iter(self.__tokdict.keys()))else:return Nonedef dump(self, indent='', depth=0, full=True):"""Diagnostic method for listing out the contents of a C{ParseResults}.Accepts an optional C{indent} argument so that this string can be embeddedin a nested display of other data.Example::integer = Word(nums)date_str = integer("year") + '/' + integer("month") + '/' + integer("day")result = date_str.parseString('12/31/1999')print(result.dump())prints::['12', '/', '31', '/', '1999']- day: 1999- month: 31- year: 12"""out = []NL = '\n'out.append( indent+_ustr(self.asList()) )if full:if self.haskeys():items = sorted((str(k), v) for k,v in self.items())for k,v in items:if out:out.append(NL)out.append( "%s%s- %s: " % (indent,(' '*depth), k) )if isinstance(v,ParseResults):if v:out.append( v.dump(indent,depth+1) )else:out.append(_ustr(v))else:out.append(repr(v))elif any(isinstance(vv,ParseResults) for vv in self):v = selffor i,vv in enumerate(v):if isinstance(vv,ParseResults):out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) ))else:out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv)))return "".join(out)def pprint(self, *args, **kwargs):"""Pretty-printer for parsed results as a list, using the C{pprint} module.Accepts additional positional or keyword args as defined for theC{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})Example::ident = Word(alphas, alphanums)num = Word(nums)func = Forward()term = ident | num | Group('(' + func + ')')func <<= ident + Group(Optional(delimitedList(term)))result = func.parseString("fna a,b,(fnb c,d,200),100")result.pprint(width=40)prints::['fna',['a','b',['(', 'fnb', ['c', 'd', '200'], ')'],'100']]"""pprint.pprint(self.asList(), *args, **kwargs)# add support for pickle protocoldef __getstate__(self):return ( self.__toklist,( self.__tokdict.copy(),self.__parent is not None and self.__parent() or None,self.__accumNames,self.__name ) )def __setstate__(self,state):self.__toklist = state[0](self.__tokdict,par,inAccumNames,self.__name) = state[1]self.__accumNames = {}self.__accumNames.update(inAccumNames)if par is not None:self.__parent = wkref(par)else:self.__parent = Nonedef __getnewargs__(self):return self.__toklist, self.__name, self.__asList, self.__modaldef __dir__(self):return (dir(type(self)) + list(self.keys()))collections.MutableMapping.register(ParseResults)def col (loc,strg):"""Returns current column within a string, counting newlines as line separators.The first column is number 1.Note: the default parsing behavior is to expand tabs in the input stringbefore starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more informationon parsing strings containing C{<TAB>}s, and suggested methods to maintain aconsistent view of the parsed string, the parse location, and line and columnpositions within the parsed string."""s = strgreturn 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc)def lineno(loc,strg):"""Returns current line number within a string, counting newlines as line separators.The first line is number 1.Note: the default parsing behavior is to expand tabs in the input stringbefore starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more informationon parsing strings containing C{<TAB>}s, and suggested methods to maintain aconsistent view of the parsed string, the parse location, and line and columnpositions within the parsed string."""return strg.count("\n",0,loc) + 1def line( loc, strg ):"""Returns the line of text containing loc within a string, counting newlines as line separators."""lastCR = strg.rfind("\n", 0, loc)nextCR = strg.find("\n", loc)if nextCR >= 0:return strg[lastCR+1:nextCR]else:return strg[lastCR+1:]def _defaultStartDebugAction( instring, loc, expr ):print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )))def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))def _defaultExceptionDebugAction( instring, loc, expr, exc ):print ("Exception raised:" + _ustr(exc))def nullDebugAction(*args):"""'Do-nothing' debug action, to suppress debugging output during parsing."""pass# Only works on Python 3.x - nonlocal is toxic to Python 2 installs#~ 'decorator to trim function calls to match the arity of the target'#~ def _trim_arity(func, maxargs=3):#~ if func in singleArgBuiltins:#~ return lambda s,l,t: func(t)#~ limit = 0#~ foundArity = False#~ def wrapper(*args):#~ nonlocal limit,foundArity#~ while 1:#~ try:#~ ret = func(*args[limit:])#~ foundArity = True#~ return ret#~ except TypeError:#~ if limit == maxargs or foundArity:#~ raise#~ limit += 1#~ continue#~ return wrapper# this version is Python 2.x-3.x cross-compatible'decorator to trim function calls to match the arity of the target'def _trim_arity(func, maxargs=2):if func in singleArgBuiltins:return lambda s,l,t: func(t)limit = [0]foundArity = [False]# traceback return data structure changed in Py3.5 - normalize back to plain tuplesif system_version[:2] >= (3,5):def extract_stack(limit=0):# special handling for Python 3.5.0 - extra deep call stack by 1offset = -3 if system_version == (3,5,0) else -2frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]return [(frame_summary.filename, frame_summary.lineno)]def extract_tb(tb, limit=0):frames = traceback.extract_tb(tb, limit=limit)frame_summary = frames[-1]return [(frame_summary.filename, frame_summary.lineno)]else:extract_stack = traceback.extract_stackextract_tb = traceback.extract_tb# synthesize what would be returned by traceback.extract_stack at the call to# user's parse action 'func', so that we don't incur call penalty at parse timeLINE_DIFF = 6# IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND# THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!this_line = extract_stack(limit=2)[-1]pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF)def wrapper(*args):while 1:try:ret = func(*args[limit[0]:])foundArity[0] = Truereturn retexcept TypeError:# re-raise TypeErrors if they did not come from our arity testingif foundArity[0]:raiseelse:try:tb = sys.exc_info()[-1]if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:raisefinally:del tbif limit[0] <= maxargs:limit[0] += 1continueraise# copy func name to wrapper for sensible debug outputfunc_name = "<parse action>"try:func_name = getattr(func, '__name__',getattr(func, '__class__').__name__)except Exception:func_name = str(func)wrapper.__name__ = func_namereturn wrapperclass ParserElement(object):"""Abstract base level parser element class."""DEFAULT_WHITE_CHARS = " \n\t\r"verbose_stacktrace = False@staticmethoddef setDefaultWhitespaceChars( chars ):r"""Overrides the default whitespace charsExample::# default whitespace chars are space, <TAB> and newlineOneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl']# change to just treat newline as significantParserElement.setDefaultWhitespaceChars(" \t")OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def']"""ParserElement.DEFAULT_WHITE_CHARS = chars@staticmethoddef inlineLiteralsUsing(cls):"""Set class to be used for inclusion of string literals into a parser.Example::# default literal class used is Literalinteger = Word(nums)date_str = integer("year") + '/' + integer("month") + '/' + integer("day")date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31']# change to SuppressParserElement.inlineLiteralsUsing(Suppress)date_str = integer("year") + '/' + integer("month") + '/' + integer("day")date_str.parseString("1999/12/31") # -> ['1999', '12', '31']"""ParserElement._literalStringClass = clsdef __init__( self, savelist=False ):self.parseAction = list()self.failAction = None#~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcallself.strRepr = Noneself.resultsName = Noneself.saveAsList = savelistself.skipWhitespace = Trueself.whiteChars = ParserElement.DEFAULT_WHITE_CHARSself.copyDefaultWhiteChars = Trueself.mayReturnEmpty = False # used when checking for left-recursionself.keepTabs = Falseself.ignoreExprs = list()self.debug = Falseself.streamlined = Falseself.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse indexself.errmsg = ""self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)self.debugActions = ( None, None, None ) #custom debug actionsself.re = Noneself.callPreparse = True # used to avoid redundant calls to preParseself.callDuringTry = Falsedef copy( self ):"""Make a copy of this C{ParserElement}. Useful for defining different parse actionsfor the same parsing pattern, using copies of the original parse element.Example::integer = Word(nums).setParseAction(lambda toks: int(toks[0]))integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))prints::[5120, 100, 655360, 268435456]Equivalent form of C{expr.copy()} is just C{expr()}::integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")"""cpy = copy.copy( self )cpy.parseAction = self.parseAction[:]cpy.ignoreExprs = self.ignoreExprs[:]if self.copyDefaultWhiteChars:cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARSreturn cpydef setName( self, name ):"""Define name for this expression, makes debugging and exception messages clearer.Example::Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1)"""self.name = nameself.errmsg = "Expected " + self.nameif hasattr(self,"exception"):self.exception.msg = self.errmsgreturn selfdef setResultsName( self, name, listAllMatches=False ):"""Define name for referencing matching tokens as a nested attributeof the returned parse results.NOTE: this returns a *copy* of the original C{ParserElement} object;this is so that the client can define a basic element, such as aninteger, and reference it in multiple places with different names.You can also set results names using the abbreviated syntax,C{expr("name")} in place of C{expr.setResultsName("name")} -see L{I{__call__}<__call__>}.Example::date_str = (integer.setResultsName("year") + '/'+ integer.setResultsName("month") + '/'+ integer.setResultsName("day"))# equivalent form:date_str = integer("year") + '/' + integer("month") + '/' + integer("day")"""newself = self.copy()if name.endswith("*"):name = name[:-1]listAllMatches=Truenewself.resultsName = namenewself.modalResults = not listAllMatchesreturn newselfdef setBreak(self,breakFlag = True):"""Method to invoke the Python pdb debugger when this element isabout to be parsed. Set C{breakFlag} to True to enable, False todisable."""if breakFlag:_parseMethod = self._parsedef breaker(instring, loc, doActions=True, callPreParse=True):import pdbpdb.set_trace()return _parseMethod( instring, loc, doActions, callPreParse )breaker._originalParseMethod = _parseMethodself._parse = breakerelse:if hasattr(self._parse,"_originalParseMethod"):self._parse = self._parse._originalParseMethodreturn selfdef setParseAction( self, *fns, **kwargs ):"""Define action to perform when successfully matching parse element definition.Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:- s = the original string being parsed (see note below)- loc = the location of the matching substring- toks = a list of the matched tokens, packaged as a C{L{ParseResults}} objectIf the functions in fns modify the tokens, they can return them as the returnvalue from fn, and the modified list of tokens will replace the original.Otherwise, fn does not need to return any value.Optional keyword arguments:- callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testingNote: the default parsing behavior is to expand tabs in the input stringbefore starting the parsing process. See L{I{parseString}<parseString>} for more informationon parsing strings containing C{<TAB>}s, and suggested methods to maintain aconsistent view of the parsed string, the parse location, and line and columnpositions within the parsed string.Example::integer = Word(nums)date_str = integer + '/' + integer + '/' + integerdate_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31']# use parse action to convert to ints at parse timeinteger = Word(nums).setParseAction(lambda toks: int(toks[0]))date_str = integer + '/' + integer + '/' + integer# note that integer fields are now ints, not stringsdate_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31]"""self.parseAction = list(map(_trim_arity, list(fns)))self.callDuringTry = kwargs.get("callDuringTry", False)return selfdef addParseAction( self, *fns, **kwargs ):"""Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.See examples in L{I{copy}<copy>}."""self.parseAction += list(map(_trim_arity, list(fns)))self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)return selfdef addCondition(self, *fns, **kwargs):"""Add a boolean predicate function to expression's list of parse actions. SeeL{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction},functions passed to C{addCondition} need to return boolean success/fail of the condition.Optional keyword arguments:- message = define a custom message to be used in the raised exception- fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseExceptionExample::integer = Word(nums).setParseAction(lambda toks: int(toks[0]))year_int = integer.copy()year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")date_str = year_int + '/' + integer + '/' + integerresult = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)"""msg = kwargs.get("message", "failed user-defined condition")exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseExceptionfor fn in fns:def pa(s,l,t):if not bool(_trim_arity(fn)(s,l,t)):raise exc_type(s,l,msg)self.parseAction.append(pa)self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)return selfdef setFailAction( self, fn ):"""Define action to perform if parsing fails at this expression.Fail acton fn is a callable function that takes the argumentsC{fn(s,loc,expr,err)} where:- s = string being parsed- loc = location where expression match was attempted and failed- expr = the parse expression that failed- err = the exception thrownThe function returns no value. It may throw C{L{ParseFatalException}}if it is desired to stop parsing immediately."""self.failAction = fnreturn selfdef _skipIgnorables( self, instring, loc ):exprsFound = Truewhile exprsFound:exprsFound = Falsefor e in self.ignoreExprs:try:while 1:loc,dummy = e._parse( instring, loc )exprsFound = Trueexcept ParseException:passreturn locdef preParse( self, instring, loc ):if self.ignoreExprs:loc = self._skipIgnorables( instring, loc )if self.skipWhitespace:wt = self.whiteCharsinstrlen = len(instring)while loc < instrlen and instring[loc] in wt:loc += 1return locdef parseImpl( self, instring, loc, doActions=True ):return loc, []def postParse( self, instring, loc, tokenlist ):return tokenlist#~ @profiledef _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):debugging = ( self.debug ) #and doActions )if debugging or self.failAction:#~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))if (self.debugActions[0] ):self.debugActions[0]( instring, loc, self )if callPreParse and self.callPreparse:preloc = self.preParse( instring, loc )else:preloc = loctokensStart = preloctry:try:loc,tokens = self.parseImpl( instring, preloc, doActions )except IndexError:raise ParseException( instring, len(instring), self.errmsg, self )except ParseBaseException as err:#~ print ("Exception raised:", err)if self.debugActions[2]:self.debugActions[2]( instring, tokensStart, self, err )if self.failAction:self.failAction( instring, tokensStart, self, err )raiseelse:if callPreParse and self.callPreparse:preloc = self.preParse( instring, loc )else:preloc = loctokensStart = prelocif self.mayIndexError or loc >= len(instring):try:loc,tokens = self.parseImpl( instring, preloc, doActions )except IndexError:raise ParseException( instring, len(instring), self.errmsg, self )else:loc,tokens = self.parseImpl( instring, preloc, doActions )tokens = self.postParse( instring, loc, tokens )retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )if self.parseAction and (doActions or self.callDuringTry):if debugging:try:for fn in self.parseAction:tokens = fn( instring, tokensStart, retTokens )if tokens is not None:retTokens = ParseResults( tokens,self.resultsName,asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),modal=self.modalResults )except ParseBaseException as err:#~ print "Exception raised in user parse action:", errif (self.debugActions[2] ):self.debugActions[2]( instring, tokensStart, self, err )raiseelse:for fn in self.parseAction:tokens = fn( instring, tokensStart, retTokens )if tokens is not None:retTokens = ParseResults( tokens,self.resultsName,asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),modal=self.modalResults )if debugging:#~ print ("Matched",self,"->",retTokens.asList())if (self.debugActions[1] ):self.debugActions[1]( instring, tokensStart, loc, self, retTokens )return loc, retTokensdef tryParse( self, instring, loc ):try:return self._parse( instring, loc, doActions=False )[0]except ParseFatalException:raise ParseException( instring, loc, self.errmsg, self)def canParseNext(self, instring, loc):try:self.tryParse(instring, loc)except (ParseException, IndexError):return Falseelse:return Trueclass _UnboundedCache(object):def __init__(self):cache = {}self.not_in_cache = not_in_cache = object()def get(self, key):return cache.get(key, not_in_cache)def set(self, key, value):cache[key] = valuedef clear(self):cache.clear()self.get = types.MethodType(get, self)self.set = types.MethodType(set, self)self.clear = types.MethodType(clear, self)if _OrderedDict is not None:class _FifoCache(object):def __init__(self, size):self.not_in_cache = not_in_cache = object()cache = _OrderedDict()def get(self, key):return cache.get(key, not_in_cache)def set(self, key, value):cache[key] = valueif len(cache) > size:cache.popitem(False)def clear(self):cache.clear()self.get = types.MethodType(get, self)self.set = types.MethodType(set, self)self.clear = types.MethodType(clear, self)else:class _FifoCache(object):def __init__(self, size):self.not_in_cache = not_in_cache = object()cache = {}key_fifo = collections.deque([], size)def get(self, key):return cache.get(key, not_in_cache)def set(self, key, value):cache[key] = valueif len(cache) > size:cache.pop(key_fifo.popleft(), None)key_fifo.append(key)def clear(self):cache.clear()key_fifo.clear()self.get = types.MethodType(get, self)self.set = types.MethodType(set, self)self.clear = types.MethodType(clear, self)# argument cache for optimizing repeated calls when backtracking through recursive expressionspackrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't failpackrat_cache_lock = RLock()packrat_cache_stats = [0, 0]# this method gets repeatedly called during backtracking with the same arguments -# we can cache these arguments and save ourselves the trouble of re-parsing the contained expressiondef _parseCache( self, instring, loc, doActions=True, callPreParse=True ):HIT, MISS = 0, 1lookup = (self, instring, loc, callPreParse, doActions)with ParserElement.packrat_cache_lock:cache = ParserElement.packrat_cachevalue = cache.get(lookup)if value is cache.not_in_cache:ParserElement.packrat_cache_stats[MISS] += 1try:value = self._parseNoCache(instring, loc, doActions, callPreParse)except ParseBaseException as pe:# cache a copy of the exception, without the tracebackcache.set(lookup, pe.__class__(*pe.args))raiseelse:cache.set(lookup, (value[0], value[1].copy()))return valueelse:ParserElement.packrat_cache_stats[HIT] += 1if isinstance(value, Exception):raise valuereturn (value[0], value[1].copy())_parse = _parseNoCache@staticmethoddef resetCache():ParserElement.packrat_cache.clear()ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)_packratEnabled = False@staticmethoddef enablePackrat(cache_size_limit=128):"""Enables "packrat" parsing, which adds memoizing to the parsing logic.Repeated parse attempts at the same string location (which happensoften in many complex grammars) can immediately return a cached value,instead of re-executing parsing/validating code. Memoizing is done ofboth valid results and parsing exceptions.Parameters:- cache_size_limit - (default=C{128}) - if an integer value is providedwill limit the size of the packrat cache; if None is passed, thenthe cache size will be unbounded; if 0 is passed, the cache willbe effectively disabled.This speedup may break existing programs that use parse actions thathave side-effects. For this reason, packrat parsing is disabled whenyou first import pyparsing. To activate the packrat feature, yourprogram must call the class method C{ParserElement.enablePackrat()}. Ifyour program uses C{psyco} to "compile as you go", you must callC{enablePackrat} before calling C{psyco.full()}. If you do not do this,Python will crash. For best results, call C{enablePackrat()} immediatelyafter importing pyparsing.Example::import pyparsingpyparsing.ParserElement.enablePackrat()"""if not ParserElement._packratEnabled:ParserElement._packratEnabled = Trueif cache_size_limit is None:ParserElement.packrat_cache = ParserElement._UnboundedCache()else:ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)ParserElement._parse = ParserElement._parseCachedef parseString( self, instring, parseAll=False ):"""Execute the parse expression with the given string.This is the main interface to the client code, once the completeexpression has been built.If you want the grammar to require that the entire input string besuccessfully parsed, then set C{parseAll} to True (equivalent to endingthe grammar with C{L{StringEnd()}}).Note: C{parseString} implicitly calls C{expandtabs()} on the input string,in order to report proper column numbers in parse actions.If the input string contains tabs andthe grammar uses parse actions that use the C{loc} argument to index into thestring being parsed, you can ensure you have a consistent view of the inputstring by:- calling C{parseWithTabs} on your grammar before calling C{parseString}(see L{I{parseWithTabs}<parseWithTabs>})- define your parse action using the full C{(s,loc,toks)} signature, andreference the input string using the parse action's C{s} argument- explictly expand the tabs in your input string before callingC{parseString}Example::Word('a').parseString('aaaaabaaa') # -> ['aaaaa']Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text"""ParserElement.resetCache()if not self.streamlined:self.streamline()#~ self.saveAsList = Truefor e in self.ignoreExprs:e.streamline()if not self.keepTabs:instring = instring.expandtabs()try:loc, tokens = self._parse( instring, 0 )if parseAll:loc = self.preParse( instring, loc )se = Empty() + StringEnd()se._parse( instring, loc )except ParseBaseException as exc:if ParserElement.verbose_stacktrace:raiseelse:# catch and re-raise exception from here, clears out pyparsing internal stack traceraise excelse:return tokensdef scanString( self, instring, maxMatches=_MAX_INT, overlap=False ):"""Scan the input string for expression matches. Each match will return thematching tokens, start location, and end location. May be called with optionalC{maxMatches} argument, to clip scanning after 'n' matches are found. IfC{overlap} is specified, then overlapping matches will be reported.Note that the start and end locations are reported relative to the stringbeing parsed. See L{I{parseString}<parseString>} for more information on parsingstrings with embedded tabs.Example::source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"print(source)for tokens,start,end in Word(alphas).scanString(source):print(' '*start + '^'*(end-start))print(' '*start + tokens[0])prints::sldjf123lsdjjkf345sldkjf879lkjsfd987^^^^^sldjf^^^^^^^lsdjjkf^^^^^^sldkjf^^^^^^lkjsfd"""if not self.streamlined:self.streamline()for e in self.ignoreExprs:e.streamline()if not self.keepTabs:instring = _ustr(instring).expandtabs()instrlen = len(instring)loc = 0preparseFn = self.preParseparseFn = self._parseParserElement.resetCache()matches = 0try:while loc <= instrlen and matches < maxMatches:try:preloc = preparseFn( instring, loc )nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )except ParseException:loc = preloc+1else:if nextLoc > loc:matches += 1yield tokens, preloc, nextLocif overlap:nextloc = preparseFn( instring, loc )if nextloc > loc:loc = nextLocelse:loc += 1else:loc = nextLocelse:loc = preloc+1except ParseBaseException as exc:if ParserElement.verbose_stacktrace:raiseelse:# catch and re-raise exception from here, clears out pyparsing internal stack traceraise excdef transformString( self, instring ):"""Extension to C{L{scanString}}, to modify matching text with modified tokens that maybe returned from a parse action. To use C{transformString}, define a grammar andattach a parse action to it that modifies the returned token list.Invoking C{transformString()} on a target string will then scan for matches,and replace the matched text patterns according to the logic in the parseaction. C{transformString()} returns the resulting transformed string.Example::wd = Word(alphas)wd.setParseAction(lambda toks: toks[0].title())print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))Prints::Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York."""out = []lastE = 0# force preservation of <TAB>s, to minimize unwanted transformation of string, and to# keep string locs straight between transformString and scanStringself.keepTabs = Truetry:for t,s,e in self.scanString( instring ):out.append( instring[lastE:s] )if t:if isinstance(t,ParseResults):out += t.asList()elif isinstance(t,list):out += telse:out.append(t)lastE = eout.append(instring[lastE:])out = [o for o in out if o]return "".join(map(_ustr,_flatten(out)))except ParseBaseException as exc:if ParserElement.verbose_stacktrace:raiseelse:# catch and re-raise exception from here, clears out pyparsing internal stack traceraise excdef searchString( self, instring, maxMatches=_MAX_INT ):"""Another extension to C{L{scanString}}, simplifying the access to the tokens foundto match the given parse expression. May be called with optionalC{maxMatches} argument, to clip searching after 'n' matches are found.Example::# a capitalized word starts with an uppercase letter, followed by zero or more lowercase letterscap_word = Word(alphas.upper(), alphas.lower())print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))prints::['More', 'Iron', 'Lead', 'Gold', 'I']"""try:return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])except ParseBaseException as exc:if ParserElement.verbose_stacktrace:raiseelse:# catch and re-raise exception from here, clears out pyparsing internal stack traceraise excdef split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):"""Generator method to split a string using the given expression as a separator.May be called with optional C{maxsplit} argument, to limit the number of splits;and the optional C{includeSeparators} argument (default=C{False}), if the separatingmatching text should be included in the split results.Example::punc = oneOf(list(".,;:/-!?"))print(list(punc.split("This, this?, this sentence, is badly punctuated!")))prints::['This', ' this', '', ' this sentence', ' is badly punctuated', '']"""splits = 0last = 0for t,s,e in self.scanString(instring, maxMatches=maxsplit):yield instring[last:s]if includeSeparators:yield t[0]last = eyield instring[last:]def __add__(self, other ):"""Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElementconverts them to L{Literal}s by default.Example::greet = Word(alphas) + "," + Word(alphas) + "!"hello = "Hello, World!"print (hello, "->", greet.parseString(hello))Prints::Hello, World! -> ['Hello', ',', 'World', '!']"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn And( [ self, other ] )def __radd__(self, other ):"""Implementation of + operator when left operand is not a C{L{ParserElement}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn other + selfdef __sub__(self, other):"""Implementation of - operator, returns C{L{And}} with error stop"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn And( [ self, And._ErrorStop(), other ] )def __rsub__(self, other ):"""Implementation of - operator when left operand is not a C{L{ParserElement}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn other - selfdef __mul__(self,other):"""Implementation of * operator, allows use of C{expr * 3} in place ofC{expr + expr + expr}. Expressions may also me multiplied by a 2-integertuple, similar to C{{min,max}} multipliers in regular expressions. Tuplesmay also include C{None} as in:- C{expr*(n,None)} or C{expr*(n,)} is equivalentto C{expr*n + L{ZeroOrMore}(expr)}(read as "at least n instances of C{expr}")- C{expr*(None,n)} is equivalent to C{expr*(0,n)}(read as "0 to n instances of C{expr}")- C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}- C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}Note that C{expr*(None,n)} does not raise an exception ifmore than n exprs exist in the input stream; that is,C{expr*(None,n)} does not enforce a maximum number of exproccurrences. If this behavior is desired, then writeC{expr*(None,n) + ~expr}"""if isinstance(other,int):minElements, optElements = other,0elif isinstance(other,tuple):other = (other + (None, None))[:2]if other[0] is None:other = (0, other[1])if isinstance(other[0],int) and other[1] is None:if other[0] == 0:return ZeroOrMore(self)if other[0] == 1:return OneOrMore(self)else:return self*other[0] + ZeroOrMore(self)elif isinstance(other[0],int) and isinstance(other[1],int):minElements, optElements = otheroptElements -= minElementselse:raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))else:raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))if minElements < 0:raise ValueError("cannot multiply ParserElement by negative value")if optElements < 0:raise ValueError("second tuple value must be greater or equal to first tuple value")if minElements == optElements == 0:raise ValueError("cannot multiply ParserElement by 0 or (0,0)")if (optElements):def makeOptionalList(n):if n>1:return Optional(self + makeOptionalList(n-1))else:return Optional(self)if minElements:if minElements == 1:ret = self + makeOptionalList(optElements)else:ret = And([self]*minElements) + makeOptionalList(optElements)else:ret = makeOptionalList(optElements)else:if minElements == 1:ret = selfelse:ret = And([self]*minElements)return retdef __rmul__(self, other):return self.__mul__(other)def __or__(self, other ):"""Implementation of | operator - returns C{L{MatchFirst}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn MatchFirst( [ self, other ] )def __ror__(self, other ):"""Implementation of | operator when left operand is not a C{L{ParserElement}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn other | selfdef __xor__(self, other ):"""Implementation of ^ operator - returns C{L{Or}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn Or( [ self, other ] )def __rxor__(self, other ):"""Implementation of ^ operator when left operand is not a C{L{ParserElement}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn other ^ selfdef __and__(self, other ):"""Implementation of & operator - returns C{L{Each}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn Each( [ self, other ] )def __rand__(self, other ):"""Implementation of & operator when left operand is not a C{L{ParserElement}}"""if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )if not isinstance( other, ParserElement ):warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),SyntaxWarning, stacklevel=2)return Nonereturn other & selfdef __invert__( self ):"""Implementation of ~ operator - returns C{L{NotAny}}"""return NotAny( self )def __call__(self, name=None):"""Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will bepassed as C{True}.If C{name} is omitted, same as calling C{L{copy}}.Example::# these are equivalentuserdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")"""if name is not None:return self.setResultsName(name)else:return self.copy()def suppress( self ):"""Suppresses the output of this C{ParserElement}; useful to keep punctuation fromcluttering up returned output."""return Suppress( self )def leaveWhitespace( self ):"""Disables the skipping of whitespace before matching the characters in theC{ParserElement}'s defined pattern. This is normally only used internally bythe pyparsing module, but may be needed in some whitespace-sensitive grammars."""self.skipWhitespace = Falsereturn selfdef setWhitespaceChars( self, chars ):"""Overrides the default whitespace chars"""self.skipWhitespace = Trueself.whiteChars = charsself.copyDefaultWhiteChars = Falsereturn selfdef parseWithTabs( self ):"""Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string.Must be called before C{parseString} when the input grammar contains elements thatmatch C{<TAB>} characters."""self.keepTabs = Truereturn selfdef ignore( self, other ):"""Define expression to be ignored (e.g., comments) while doing patternmatching; may be called repeatedly, to define multiple comment or otherignorable patterns.Example::patt = OneOrMore(Word(alphas))patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']patt.ignore(cStyleComment)patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']"""if isinstance(other, basestring):other = Suppress(other)if isinstance( other, Suppress ):if other not in self.ignoreExprs:self.ignoreExprs.append(other)else:self.ignoreExprs.append( Suppress( other.copy() ) )return selfdef setDebugActions( self, startAction, successAction, exceptionAction ):"""Enable display of debugging messages while doing pattern matching."""self.debugActions = (startAction or _defaultStartDebugAction,successAction or _defaultSuccessDebugAction,exceptionAction or _defaultExceptionDebugAction)self.debug = Truereturn selfdef setDebug( self, flag=True ):"""Enable display of debugging messages while doing pattern matching.Set C{flag} to True to enable, False to disable.Example::wd = Word(alphas).setName("alphaword")integer = Word(nums).setName("numword")term = wd | integer# turn on debugging for wdwd.setDebug()OneOrMore(term).parseString("abc 123 xyz 890")prints::Match alphaword at loc 0(1,1)Matched alphaword -> ['abc']Match alphaword at loc 3(1,4)Exception raised:Expected alphaword (at char 4), (line:1, col:5)Match alphaword at loc 7(1,8)Matched alphaword -> ['xyz']Match alphaword at loc 11(1,12)Exception raised:Expected alphaword (at char 12), (line:1, col:13)Match alphaword at loc 15(1,16)Exception raised:Expected alphaword (at char 15), (line:1, col:16)The output shown is that produced by the default debug actions - custom debug actions can bespecified using L{setDebugActions}. Prior to attemptingto match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"}is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,which makes debugging and exception messages easier to understand - for instance, the defaultname created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}."""if flag:self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )else:self.debug = Falsereturn selfdef __str__( self ):return self.namedef __repr__( self ):return _ustr(self)def streamline( self ):self.streamlined = Trueself.strRepr = Nonereturn selfdef checkRecursion( self, parseElementList ):passdef validate( self, validateTrace=[] ):"""Check defined expressions for valid structure, check for infinite recursive definitions."""self.checkRecursion( [] )def parseFile( self, file_or_filename, parseAll=False ):"""Execute the parse expression on the given file or filename.If a filename is specified (instead of a file object),the entire file is opened, read, and closed before parsing."""try:file_contents = file_or_filename.read()except AttributeError:with open(file_or_filename, "r") as f:file_contents = f.read()try:return self.parseString(file_contents, parseAll)except ParseBaseException as exc:if ParserElement.verbose_stacktrace:raiseelse:# catch and re-raise exception from here, clears out pyparsing internal stack traceraise excdef __eq__(self,other):if isinstance(other, ParserElement):return self is other or vars(self) == vars(other)elif isinstance(other, basestring):return self.matches(other)else:return super(ParserElement,self)==otherdef __ne__(self,other):return not (self == other)def __hash__(self):return hash(id(self))def __req__(self,other):return self == otherdef __rne__(self,other):return not (self == other)def matches(self, testString, parseAll=True):"""Method for quick testing of a parser against a test string. Good for simpleinline microtests of sub expressions while building up larger parser.Parameters:- testString - to test against this expression for a match- parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running testsExample::expr = Word(nums)assert expr.matches("100")"""try:self.parseString(_ustr(testString), parseAll=parseAll)return Trueexcept ParseBaseException:return Falsedef runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False):"""Execute the parse expression on a series of test strings, showing eachtest, the parsed results or where the parse failed. Quick and easy way torun a parse expression against a list of sample strings.Parameters:- tests - a list of separate test strings, or a multiline string of test strings- parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests- comment - (default=C{'#'}) - expression for indicating embedded comments in the teststring; pass None to disable comment filtering- fullDump - (default=C{True}) - dump results as list followed by results names in nested outline;if False, only dump nested list- printResults - (default=C{True}) prints test output to stdout- failureTests - (default=C{False}) indicates if these tests are expected to fail parsingReturns: a (success, results) tuple, where success indicates that all tests succeeded(or failed if C{failureTests} is True), and the results contain a list of lines of eachtest's outputExample::number_expr = pyparsing_common.number.copy()result = number_expr.runTests('''# unsigned integer100# negative integer-100# float with scientific notation6.02e23# integer with scientific notation1e-12''')print("Success" if result[0] else "Failed!")result = number_expr.runTests('''# stray character100Z# missing leading digit before '.'-.100# too many '.'3.14.159''', failureTests=True)print("Success" if result[0] else "Failed!")prints::# unsigned integer100[100]# negative integer-100[-100]# float with scientific notation6.02e23[6.02e+23]# integer with scientific notation1e-12[1e-12]Success# stray character100Z^FAIL: Expected end of text (at char 3), (line:1, col:4)# missing leading digit before '.'-.100^FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)# too many '.'3.14.159^FAIL: Expected end of text (at char 4), (line:1, col:5)SuccessEach test string must be on a single line. If you want to test a string that spans multiplelines, create a test like this::expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")(Note that this is a raw string literal, you must include the leading 'r'.)"""if isinstance(tests, basestring):tests = list(map(str.strip, tests.rstrip().splitlines()))if isinstance(comment, basestring):comment = Literal(comment)allResults = []comments = []success = Truefor t in tests:if comment is not None and comment.matches(t, False) or comments and not t:comments.append(t)continueif not t:continueout = ['\n'.join(comments), t]comments = []try:t = t.replace(r'\n','\n')result = self.parseString(t, parseAll=parseAll)out.append(result.dump(full=fullDump))success = success and not failureTestsexcept ParseBaseException as pe:fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""if '\n' in t:out.append(line(pe.loc, t))out.append(' '*(col(pe.loc,t)-1) + '^' + fatal)else:out.append(' '*pe.loc + '^' + fatal)out.append("FAIL: " + str(pe))success = success and failureTestsresult = peexcept Exception as exc:out.append("FAIL-EXCEPTION: " + str(exc))success = success and failureTestsresult = excif printResults:if fullDump:out.append('')print('\n'.join(out))allResults.append((t, result))return success, allResultsclass Token(ParserElement):"""Abstract C{ParserElement} subclass, for defining atomic matching patterns."""def __init__( self ):super(Token,self).__init__( savelist=False )class Empty(Token):"""An empty token, will always match."""def __init__( self ):super(Empty,self).__init__()self.name = "Empty"self.mayReturnEmpty = Trueself.mayIndexError = Falseclass NoMatch(Token):"""A token that will never match."""def __init__( self ):super(NoMatch,self).__init__()self.name = "NoMatch"self.mayReturnEmpty = Trueself.mayIndexError = Falseself.errmsg = "Unmatchable token"def parseImpl( self, instring, loc, doActions=True ):raise ParseException(instring, loc, self.errmsg, self)class Literal(Token):"""Token to exactly match a specified string.Example::Literal('blah').parseString('blah') # -> ['blah']Literal('blah').parseString('blahfooblah') # -> ['blah']Literal('blah').parseString('bla') # -> Exception: Expected "blah"For case-insensitive matching, use L{CaselessLiteral}.For keyword matching (force word break before and after the matched string),use L{Keyword} or L{CaselessKeyword}."""def __init__( self, matchString ):super(Literal,self).__init__()self.match = matchStringself.matchLen = len(matchString)try:self.firstMatchChar = matchString[0]except IndexError:warnings.warn("null string passed to Literal; use Empty() instead",SyntaxWarning, stacklevel=2)self.__class__ = Emptyself.name = '"%s"' % _ustr(self.match)self.errmsg = "Expected " + self.nameself.mayReturnEmpty = Falseself.mayIndexError = False# Performance tuning: this routine gets called a *lot*# if this is a single character match string and the first character matches,# short-circuit as quickly as possible, and avoid calling startswith#~ @profiledef parseImpl( self, instring, loc, doActions=True ):if (instring[loc] == self.firstMatchChar and(self.matchLen==1 or instring.startswith(self.match,loc)) ):return loc+self.matchLen, self.matchraise ParseException(instring, loc, self.errmsg, self)_L = LiteralParserElement._literalStringClass = Literalclass Keyword(Token):"""Token to exactly match a specified string as a keyword, that is, it must beimmediately followed by a non-keyword character. Compare with C{L{Literal}}:- C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}.- C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}Accepts two optional constructor arguments in addition to the keyword string:- C{identChars} is a string of characters that would be valid identifier characters,defaulting to all alphanumerics + "_" and "$"- C{caseless} allows case-insensitive matching, default is C{False}.Example::Keyword("start").parseString("start") # -> ['start']Keyword("start").parseString("starting") # -> ExceptionFor case-insensitive matching, use L{CaselessKeyword}."""DEFAULT_KEYWORD_CHARS = alphanums+"_$"def __init__( self, matchString, identChars=None, caseless=False ):super(Keyword,self).__init__()if identChars is None:identChars = Keyword.DEFAULT_KEYWORD_CHARSself.match = matchStringself.matchLen = len(matchString)try:self.firstMatchChar = matchString[0]except IndexError:warnings.warn("null string passed to Keyword; use Empty() instead",SyntaxWarning, stacklevel=2)self.name = '"%s"' % self.matchself.errmsg = "Expected " + self.nameself.mayReturnEmpty = Falseself.mayIndexError = Falseself.caseless = caselessif caseless:self.caselessmatch = matchString.upper()identChars = identChars.upper()self.identChars = set(identChars)def parseImpl( self, instring, loc, doActions=True ):if self.caseless:if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and(loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and(loc == 0 or instring[loc-1].upper() not in self.identChars) ):return loc+self.matchLen, self.matchelse:if (instring[loc] == self.firstMatchChar and(self.matchLen==1 or instring.startswith(self.match,loc)) and(loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and(loc == 0 or instring[loc-1] not in self.identChars) ):return loc+self.matchLen, self.matchraise ParseException(instring, loc, self.errmsg, self)def copy(self):c = super(Keyword,self).copy()c.identChars = Keyword.DEFAULT_KEYWORD_CHARSreturn c@staticmethoddef setDefaultKeywordChars( chars ):"""Overrides the default Keyword chars"""Keyword.DEFAULT_KEYWORD_CHARS = charsclass CaselessLiteral(Literal):"""Token to match a specified string, ignoring case of letters.Note: the matched results will always be in the case of the givenmatch string, NOT the case of the input text.Example::OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'](Contrast with example for L{CaselessKeyword}.)"""def __init__( self, matchString ):super(CaselessLiteral,self).__init__( matchString.upper() )# Preserve the defining literal.self.returnString = matchStringself.name = "'%s'" % self.returnStringself.errmsg = "Expected " + self.namedef parseImpl( self, instring, loc, doActions=True ):if instring[ loc:loc+self.matchLen ].upper() == self.match:return loc+self.matchLen, self.returnStringraise ParseException(instring, loc, self.errmsg, self)class CaselessKeyword(Keyword):"""Caseless version of L{Keyword}.Example::OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'](Contrast with example for L{CaselessLiteral}.)"""def __init__( self, matchString, identChars=None ):super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )def parseImpl( self, instring, loc, doActions=True ):if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and(loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ):return loc+self.matchLen, self.matchraise ParseException(instring, loc, self.errmsg, self)class CloseMatch(Token):"""A variation on L{Literal} which matches "close" matches, that is,strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:- C{match_string} - string to be matched- C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a matchThe results from a successful parse will contain the matched text from the input string and the following named results:- C{mismatches} - a list of the positions within the match_string where mismatches were found- C{original} - the original match_string used to compare against the input stringIf C{mismatches} is an empty list, then the match was an exact match.Example::patt = CloseMatch("ATCATCGAATGGA")patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)# exact matchpatt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})# close match allowing up to 2 mismatchespatt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})"""def __init__(self, match_string, maxMismatches=1):super(CloseMatch,self).__init__()self.name = match_stringself.match_string = match_stringself.maxMismatches = maxMismatchesself.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)self.mayIndexError = Falseself.mayReturnEmpty = Falsedef parseImpl( self, instring, loc, doActions=True ):start = locinstrlen = len(instring)maxloc = start + len(self.match_string)if maxloc <= instrlen:match_string = self.match_stringmatch_stringloc = 0mismatches = []maxMismatches = self.maxMismatchesfor match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)):src,mat = s_mif src != mat:mismatches.append(match_stringloc)if len(mismatches) > maxMismatches:breakelse:loc = match_stringloc + 1results = ParseResults([instring[start:loc]])results['original'] = self.match_stringresults['mismatches'] = mismatchesreturn loc, resultsraise ParseException(instring, loc, self.errmsg, self)class Word(Token):"""Token for matching words composed of allowed character sets.Defined with string containing all allowed initial characters,an optional string containing allowed body characters (if omitted,defaults to the initial character set), and an optional minimum,maximum, and/or exact length. The default value for C{min} is 1 (aminimum value < 1 is not valid); the default values for C{max} and C{exact}are 0, meaning no maximum or exact length restriction. An optionalC{excludeChars} parameter can list characters that might be found inthe input C{bodyChars} string; useful to define a word of all printablesexcept for one or two characters, for instance.L{srange} is useful for defining custom character set strings for definingC{Word} expressions, using range notation from regular expression character sets.A common mistake is to use C{Word} to match a specific literal string, as inC{Word("Address")}. Remember that C{Word} uses the string argument to defineI{sets} of matchable characters. This expression would match "Add", "AAA","dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.To match an exact literal string, use L{Literal} or L{Keyword}.pyparsing includes helper strings for building Words:- L{alphas}- L{nums}- L{alphanums}- L{hexnums}- L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)- L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)- L{printables} (any non-whitespace character)Example::# a word composed of digitsinteger = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))# a word with a leading capital, and zero or more lowercasecapital_word = Word(alphas.upper(), alphas.lower())# hostnames are alphanumeric, with leading alpha, and '-'hostname = Word(alphas, alphanums+'-')# roman numeral (not a strict parser, accepts invalid mix of characters)roman = Word("IVXLCDM")# any string of non-whitespace characters, except for ','csv_value = Word(printables, excludeChars=",")"""def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):super(Word,self).__init__()if excludeChars:initChars = ''.join(c for c in initChars if c not in excludeChars)if bodyChars:bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)self.initCharsOrig = initCharsself.initChars = set(initChars)if bodyChars :self.bodyCharsOrig = bodyCharsself.bodyChars = set(bodyChars)else:self.bodyCharsOrig = initCharsself.bodyChars = set(initChars)self.maxSpecified = max > 0if min < 1:raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")self.minLen = minif max > 0:self.maxLen = maxelse:self.maxLen = _MAX_INTif exact > 0:self.maxLen = exactself.minLen = exactself.name = _ustr(self)self.errmsg = "Expected " + self.nameself.mayIndexError = Falseself.asKeyword = asKeywordif ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):if self.bodyCharsOrig == self.initCharsOrig:self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)elif len(self.initCharsOrig) == 1:self.reString = "%s[%s]*" % \(re.escape(self.initCharsOrig),_escapeRegexRangeChars(self.bodyCharsOrig),)else:self.reString = "[%s][%s]*" % \(_escapeRegexRangeChars(self.initCharsOrig),_escapeRegexRangeChars(self.bodyCharsOrig),)if self.asKeyword:self.reString = r"\b"+self.reString+r"\b"try:self.re = re.compile( self.reString )except Exception:self.re = Nonedef parseImpl( self, instring, loc, doActions=True ):if self.re:result = self.re.match(instring,loc)if not result:raise ParseException(instring, loc, self.errmsg, self)loc = result.end()return loc, result.group()if not(instring[ loc ] in self.initChars):raise ParseException(instring, loc, self.errmsg, self)start = locloc += 1instrlen = len(instring)bodychars = self.bodyCharsmaxloc = start + self.maxLenmaxloc = min( maxloc, instrlen )while loc < maxloc and instring[loc] in bodychars:loc += 1throwException = Falseif loc - start < self.minLen:throwException = Trueif self.maxSpecified and loc < instrlen and instring[loc] in bodychars:throwException = Trueif self.asKeyword:if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars):throwException = Trueif throwException:raise ParseException(instring, loc, self.errmsg, self)return loc, instring[start:loc]def __str__( self ):try:return super(Word,self).__str__()except Exception:passif self.strRepr is None:def charsAsStr(s):if len(s)>4:return s[:4]+"..."else:return sif ( self.initCharsOrig != self.bodyCharsOrig ):self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )else:self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)return self.strReprclass Regex(Token):"""Token for matching strings that match a given regular expression.Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved asnamed parse results.Example::realnum = Regex(r"[+-]?\d+\.\d*")date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)')# ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expressionroman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")"""compiledREtype = type(re.compile("[A-Z]"))def __init__( self, pattern, flags=0):"""The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags."""super(Regex,self).__init__()if isinstance(pattern, basestring):if not pattern:warnings.warn("null string passed to Regex; use Empty() instead",SyntaxWarning, stacklevel=2)self.pattern = patternself.flags = flagstry:self.re = re.compile(self.pattern, self.flags)self.reString = self.patternexcept sre_constants.error:warnings.warn("invalid pattern (%s) passed to Regex" % pattern,SyntaxWarning, stacklevel=2)raiseelif isinstance(pattern, Regex.compiledREtype):self.re = patternself.pattern = \self.reString = str(pattern)self.flags = flagselse:raise ValueError("Regex may only be constructed with a string or a compiled RE object")self.name = _ustr(self)self.errmsg = "Expected " + self.nameself.mayIndexError = Falseself.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):result = self.re.match(instring,loc)if not result:raise ParseException(instring, loc, self.errmsg, self)loc = result.end()d = result.groupdict()ret = ParseResults(result.group())if d:for k in d:ret[k] = d[k]return loc,retdef __str__( self ):try:return super(Regex,self).__str__()except Exception:passif self.strRepr is None:self.strRepr = "Re:(%s)" % repr(self.pattern)return self.strReprclass QuotedString(Token):r"""Token for matching strings that are delimited by quoting characters.Defined with the following parameters:- quoteChar - string of one or more characters defining the quote delimiting string- escChar - character to escape quotes, typically backslash (default=C{None})- escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None})- multiline - boolean indicating whether quotes can span multiple lines (default=C{False})- unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})- endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)- convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True})Example::qs = QuotedString('"')print(qs.searchString('lsjdf "This is the quote" sldjf'))complex_qs = QuotedString('{{', endQuoteChar='}}')print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))sql_qs = QuotedString('"', escQuote='""')print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))prints::[['This is the quote']][['This is the "quote"']][['This is the quote with "embedded" quotes']]"""def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True):super(QuotedString,self).__init__()# remove white space from quote chars - wont work anywayquoteChar = quoteChar.strip()if not quoteChar:warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)raise SyntaxError()if endQuoteChar is None:endQuoteChar = quoteCharelse:endQuoteChar = endQuoteChar.strip()if not endQuoteChar:warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)raise SyntaxError()self.quoteChar = quoteCharself.quoteCharLen = len(quoteChar)self.firstQuoteChar = quoteChar[0]self.endQuoteChar = endQuoteCharself.endQuoteCharLen = len(endQuoteChar)self.escChar = escCharself.escQuote = escQuoteself.unquoteResults = unquoteResultsself.convertWhitespaceEscapes = convertWhitespaceEscapesif multiline:self.flags = re.MULTILINE | re.DOTALLself.pattern = r'%s(?:[^%s%s]' % \( re.escape(self.quoteChar),_escapeRegexRangeChars(self.endQuoteChar[0]),(escChar is not None and _escapeRegexRangeChars(escChar) or '') )else:self.flags = 0self.pattern = r'%s(?:[^%s\n\r%s]' % \( re.escape(self.quoteChar),_escapeRegexRangeChars(self.endQuoteChar[0]),(escChar is not None and _escapeRegexRangeChars(escChar) or '') )if len(self.endQuoteChar) > 1:self.pattern += ('|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),_escapeRegexRangeChars(self.endQuoteChar[i]))for i in range(len(self.endQuoteChar)-1,0,-1)) + ')')if escQuote:self.pattern += (r'|(?:%s)' % re.escape(escQuote))if escChar:self.pattern += (r'|(?:%s.)' % re.escape(escChar))self.escCharReplacePattern = re.escape(self.escChar)+"(.)"self.pattern += (r')*%s' % re.escape(self.endQuoteChar))try:self.re = re.compile(self.pattern, self.flags)self.reString = self.patternexcept sre_constants.error:warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,SyntaxWarning, stacklevel=2)raiseself.name = _ustr(self)self.errmsg = "Expected " + self.nameself.mayIndexError = Falseself.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or Noneif not result:raise ParseException(instring, loc, self.errmsg, self)loc = result.end()ret = result.group()if self.unquoteResults:# strip off quotesret = ret[self.quoteCharLen:-self.endQuoteCharLen]if isinstance(ret,basestring):# replace escaped whitespaceif '\\' in ret and self.convertWhitespaceEscapes:ws_map = {r'\t' : '\t',r'\n' : '\n',r'\f' : '\f',r'\r' : '\r',}for wslit,wschar in ws_map.items():ret = ret.replace(wslit, wschar)# replace escaped charactersif self.escChar:ret = re.sub(self.escCharReplacePattern,"\g<1>",ret)# replace escaped quotesif self.escQuote:ret = ret.replace(self.escQuote, self.endQuoteChar)return loc, retdef __str__( self ):try:return super(QuotedString,self).__str__()except Exception:passif self.strRepr is None:self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)return self.strReprclass CharsNotIn(Token):"""Token for matching words composed of characters I{not} in a given set (willinclude whitespace in matched characters if not listed in the provided exclusion set - see example).Defined with string containing all disallowed characters, and an optionalminimum, maximum, and/or exact length. The default value for C{min} is 1 (aminimum value < 1 is not valid); the default values for C{max} and C{exact}are 0, meaning no maximum or exact length restriction.Example::# define a comma-separated-value as anything that is not a ','csv_value = CharsNotIn(',')print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))prints::['dkls', 'lsdkjf', 's12 34', '@!#', '213']"""def __init__( self, notChars, min=1, max=0, exact=0 ):super(CharsNotIn,self).__init__()self.skipWhitespace = Falseself.notChars = notCharsif min < 1:raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted")self.minLen = minif max > 0:self.maxLen = maxelse:self.maxLen = _MAX_INTif exact > 0:self.maxLen = exactself.minLen = exactself.name = _ustr(self)self.errmsg = "Expected " + self.nameself.mayReturnEmpty = ( self.minLen == 0 )self.mayIndexError = Falsedef parseImpl( self, instring, loc, doActions=True ):if instring[loc] in self.notChars:raise ParseException(instring, loc, self.errmsg, self)start = locloc += 1notchars = self.notCharsmaxlen = min( start+self.maxLen, len(instring) )while loc < maxlen and \(instring[loc] not in notchars):loc += 1if loc - start < self.minLen:raise ParseException(instring, loc, self.errmsg, self)return loc, instring[start:loc]def __str__( self ):try:return super(CharsNotIn, self).__str__()except Exception:passif self.strRepr is None:if len(self.notChars) > 4:self.strRepr = "!W:(%s...)" % self.notChars[:4]else:self.strRepr = "!W:(%s)" % self.notCharsreturn self.strReprclass White(Token):"""Special matching class for matching whitespace. Normally, whitespace is ignoredby pyparsing grammars. This class is included when some whitespace structuresare significant. Define with a string containing the whitespace characters to bematched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments,as defined for the C{L{Word}} class."""whiteStrs = {" " : "<SPC>","\t": "<TAB>","\n": "<LF>","\r": "<CR>","\f": "<FF>",}def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):super(White,self).__init__()self.matchWhite = wsself.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) )#~ self.leaveWhitespace()self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))self.mayReturnEmpty = Trueself.errmsg = "Expected " + self.nameself.minLen = minif max > 0:self.maxLen = maxelse:self.maxLen = _MAX_INTif exact > 0:self.maxLen = exactself.minLen = exactdef parseImpl( self, instring, loc, doActions=True ):if not(instring[ loc ] in self.matchWhite):raise ParseException(instring, loc, self.errmsg, self)start = locloc += 1maxloc = start + self.maxLenmaxloc = min( maxloc, len(instring) )while loc < maxloc and instring[loc] in self.matchWhite:loc += 1if loc - start < self.minLen:raise ParseException(instring, loc, self.errmsg, self)return loc, instring[start:loc]class _PositionToken(Token):def __init__( self ):super(_PositionToken,self).__init__()self.name=self.__class__.__name__self.mayReturnEmpty = Trueself.mayIndexError = Falseclass GoToColumn(_PositionToken):"""Token to advance to a specific column of input text; useful for tabular report scraping."""def __init__( self, colno ):super(GoToColumn,self).__init__()self.col = colnodef preParse( self, instring, loc ):if col(loc,instring) != self.col:instrlen = len(instring)if self.ignoreExprs:loc = self._skipIgnorables( instring, loc )while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :loc += 1return locdef parseImpl( self, instring, loc, doActions=True ):thiscol = col( loc, instring )if thiscol > self.col:raise ParseException( instring, loc, "Text not in expected column", self )newloc = loc + self.col - thiscolret = instring[ loc: newloc ]return newloc, retclass LineStart(_PositionToken):"""Matches if current position is at the beginning of a line within the parse stringExample::test = '''\AAA this lineAAA and this lineAAA but not this oneB AAA and definitely not this one'''for t in (LineStart() + 'AAA' + restOfLine).searchString(test):print(t)Prints::['AAA', ' this line']['AAA', ' and this line']"""def __init__( self ):super(LineStart,self).__init__()self.errmsg = "Expected start of line"def parseImpl( self, instring, loc, doActions=True ):if col(loc, instring) == 1:return loc, []raise ParseException(instring, loc, self.errmsg, self)class LineEnd(_PositionToken):"""Matches if current position is at the end of a line within the parse string"""def __init__( self ):super(LineEnd,self).__init__()self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )self.errmsg = "Expected end of line"def parseImpl( self, instring, loc, doActions=True ):if loc<len(instring):if instring[loc] == "\n":return loc+1, "\n"else:raise ParseException(instring, loc, self.errmsg, self)elif loc == len(instring):return loc+1, []else:raise ParseException(instring, loc, self.errmsg, self)class StringStart(_PositionToken):"""Matches if current position is at the beginning of the parse string"""def __init__( self ):super(StringStart,self).__init__()self.errmsg = "Expected start of text"def parseImpl( self, instring, loc, doActions=True ):if loc != 0:# see if entire string up to here is just whitespace and ignoreablesif loc != self.preParse( instring, 0 ):raise ParseException(instring, loc, self.errmsg, self)return loc, []class StringEnd(_PositionToken):"""Matches if current position is at the end of the parse string"""def __init__( self ):super(StringEnd,self).__init__()self.errmsg = "Expected end of text"def parseImpl( self, instring, loc, doActions=True ):if loc < len(instring):raise ParseException(instring, loc, self.errmsg, self)elif loc == len(instring):return loc+1, []elif loc > len(instring):return loc, []else:raise ParseException(instring, loc, self.errmsg, self)class WordStart(_PositionToken):"""Matches if the current position is at the beginning of a Word, andis not preceded by any character in a given set of C{wordChars}(default=C{printables}). To emulate the C{\b} behavior of regular expressions,use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning ofthe string being parsed, or at the beginning of a line."""def __init__(self, wordChars = printables):super(WordStart,self).__init__()self.wordChars = set(wordChars)self.errmsg = "Not at the start of a word"def parseImpl(self, instring, loc, doActions=True ):if loc != 0:if (instring[loc-1] in self.wordChars orinstring[loc] not in self.wordChars):raise ParseException(instring, loc, self.errmsg, self)return loc, []class WordEnd(_PositionToken):"""Matches if the current position is at the end of a Word, andis not followed by any character in a given set of C{wordChars}(default=C{printables}). To emulate the C{\b} behavior of regular expressions,use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end ofthe string being parsed, or at the end of a line."""def __init__(self, wordChars = printables):super(WordEnd,self).__init__()self.wordChars = set(wordChars)self.skipWhitespace = Falseself.errmsg = "Not at the end of a word"def parseImpl(self, instring, loc, doActions=True ):instrlen = len(instring)if instrlen>0 and loc<instrlen:if (instring[loc] in self.wordChars orinstring[loc-1] not in self.wordChars):raise ParseException(instring, loc, self.errmsg, self)return loc, []class ParseExpression(ParserElement):"""Abstract subclass of ParserElement, for combining and post-processing parsed tokens."""def __init__( self, exprs, savelist = False ):super(ParseExpression,self).__init__(savelist)if isinstance( exprs, _generatorType ):exprs = list(exprs)if isinstance( exprs, basestring ):self.exprs = [ ParserElement._literalStringClass( exprs ) ]elif isinstance( exprs, collections.Iterable ):exprs = list(exprs)# if sequence of strings provided, wrap with Literalif all(isinstance(expr, basestring) for expr in exprs):exprs = map(ParserElement._literalStringClass, exprs)self.exprs = list(exprs)else:try:self.exprs = list( exprs )except TypeError:self.exprs = [ exprs ]self.callPreparse = Falsedef __getitem__( self, i ):return self.exprs[i]def append( self, other ):self.exprs.append( other )self.strRepr = Nonereturn selfdef leaveWhitespace( self ):"""Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} onall contained expressions."""self.skipWhitespace = Falseself.exprs = [ e.copy() for e in self.exprs ]for e in self.exprs:e.leaveWhitespace()return selfdef ignore( self, other ):if isinstance( other, Suppress ):if other not in self.ignoreExprs:super( ParseExpression, self).ignore( other )for e in self.exprs:e.ignore( self.ignoreExprs[-1] )else:super( ParseExpression, self).ignore( other )for e in self.exprs:e.ignore( self.ignoreExprs[-1] )return selfdef __str__( self ):try:return super(ParseExpression,self).__str__()except Exception:passif self.strRepr is None:self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) )return self.strReprdef streamline( self ):super(ParseExpression,self).streamline()for e in self.exprs:e.streamline()# collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d )# but only if there are no parse actions or resultsNames on the nested And's# (likewise for Or's and MatchFirst's)if ( len(self.exprs) == 2 ):other = self.exprs[0]if ( isinstance( other, self.__class__ ) andnot(other.parseAction) andother.resultsName is None andnot other.debug ):self.exprs = other.exprs[:] + [ self.exprs[1] ]self.strRepr = Noneself.mayReturnEmpty |= other.mayReturnEmptyself.mayIndexError |= other.mayIndexErrorother = self.exprs[-1]if ( isinstance( other, self.__class__ ) andnot(other.parseAction) andother.resultsName is None andnot other.debug ):self.exprs = self.exprs[:-1] + other.exprs[:]self.strRepr = Noneself.mayReturnEmpty |= other.mayReturnEmptyself.mayIndexError |= other.mayIndexErrorself.errmsg = "Expected " + _ustr(self)return selfdef setResultsName( self, name, listAllMatches=False ):ret = super(ParseExpression,self).setResultsName(name,listAllMatches)return retdef validate( self, validateTrace=[] ):tmp = validateTrace[:]+[self]for e in self.exprs:e.validate(tmp)self.checkRecursion( [] )def copy(self):ret = super(ParseExpression,self).copy()ret.exprs = [e.copy() for e in self.exprs]return retclass And(ParseExpression):"""Requires all given C{ParseExpression}s to be found in the given order.Expressions may be separated by whitespace.May be constructed using the C{'+'} operator.May also be constructed using the C{'-'} operator, which will suppress backtracking.Example::integer = Word(nums)name_expr = OneOrMore(Word(alphas))expr = And([integer("id"),name_expr("name"),integer("age")])# more easily written as:expr = integer("id") + name_expr("name") + integer("age")"""class _ErrorStop(Empty):def __init__(self, *args, **kwargs):super(And._ErrorStop,self).__init__(*args, **kwargs)self.name = '-'self.leaveWhitespace()def __init__( self, exprs, savelist = True ):super(And,self).__init__(exprs, savelist)self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)self.setWhitespaceChars( self.exprs[0].whiteChars )self.skipWhitespace = self.exprs[0].skipWhitespaceself.callPreparse = Truedef parseImpl( self, instring, loc, doActions=True ):# pass False as last arg to _parse for first element, since we already# pre-parsed the string as part of our And pre-parsingloc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False )errorStop = Falsefor e in self.exprs[1:]:if isinstance(e, And._ErrorStop):errorStop = Truecontinueif errorStop:try:loc, exprtokens = e._parse( instring, loc, doActions )except ParseSyntaxException:raiseexcept ParseBaseException as pe:pe.__traceback__ = Noneraise ParseSyntaxException._from_exception(pe)except IndexError:raise ParseSyntaxException(instring, len(instring), self.errmsg, self)else:loc, exprtokens = e._parse( instring, loc, doActions )if exprtokens or exprtokens.haskeys():resultlist += exprtokensreturn loc, resultlistdef __iadd__(self, other ):if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )return self.append( other ) #And( [ self, other ] )def checkRecursion( self, parseElementList ):subRecCheckList = parseElementList[:] + [ self ]for e in self.exprs:e.checkRecursion( subRecCheckList )if not e.mayReturnEmpty:breakdef __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}"return self.strReprclass Or(ParseExpression):"""Requires that at least one C{ParseExpression} is found.If two expressions match, the expression that matches the longest string will be used.May be constructed using the C{'^'} operator.Example::# construct Or using '^' operatornumber = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums))print(number.searchString("123 3.1416 789"))prints::[['123'], ['3.1416'], ['789']]"""def __init__( self, exprs, savelist = False ):super(Or,self).__init__(exprs, savelist)if self.exprs:self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)else:self.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):maxExcLoc = -1maxException = Nonematches = []for e in self.exprs:try:loc2 = e.tryParse( instring, loc )except ParseException as err:err.__traceback__ = Noneif err.loc > maxExcLoc:maxException = errmaxExcLoc = err.locexcept IndexError:if len(instring) > maxExcLoc:maxException = ParseException(instring,len(instring),e.errmsg,self)maxExcLoc = len(instring)else:# save match among all matches, to retry longest to shortestmatches.append((loc2, e))if matches:matches.sort(key=lambda x: -x[0])for _,e in matches:try:return e._parse( instring, loc, doActions )except ParseException as err:err.__traceback__ = Noneif err.loc > maxExcLoc:maxException = errmaxExcLoc = err.locif maxException is not None:maxException.msg = self.errmsgraise maxExceptionelse:raise ParseException(instring, loc, "no defined alternatives to match", self)def __ixor__(self, other ):if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )return self.append( other ) #Or( [ self, other ] )def __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"return self.strReprdef checkRecursion( self, parseElementList ):subRecCheckList = parseElementList[:] + [ self ]for e in self.exprs:e.checkRecursion( subRecCheckList )class MatchFirst(ParseExpression):"""Requires that at least one C{ParseExpression} is found.If two expressions match, the first one listed is the one that will match.May be constructed using the C{'|'} operator.Example::# construct MatchFirst using '|' operator# watch the order of expressions to matchnumber = Word(nums) | Combine(Word(nums) + '.' + Word(nums))print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']]# put more selective expression firstnumber = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']]"""def __init__( self, exprs, savelist = False ):super(MatchFirst,self).__init__(exprs, savelist)if self.exprs:self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)else:self.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):maxExcLoc = -1maxException = Nonefor e in self.exprs:try:ret = e._parse( instring, loc, doActions )return retexcept ParseException as err:if err.loc > maxExcLoc:maxException = errmaxExcLoc = err.locexcept IndexError:if len(instring) > maxExcLoc:maxException = ParseException(instring,len(instring),e.errmsg,self)maxExcLoc = len(instring)# only got here if no expression matched, raise exception for match that made it the furthestelse:if maxException is not None:maxException.msg = self.errmsgraise maxExceptionelse:raise ParseException(instring, loc, "no defined alternatives to match", self)def __ior__(self, other ):if isinstance( other, basestring ):other = ParserElement._literalStringClass( other )return self.append( other ) #MatchFirst( [ self, other ] )def __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"return self.strReprdef checkRecursion( self, parseElementList ):subRecCheckList = parseElementList[:] + [ self ]for e in self.exprs:e.checkRecursion( subRecCheckList )class Each(ParseExpression):"""Requires all given C{ParseExpression}s to be found, but in any order.Expressions may be separated by whitespace.May be constructed using the C{'&'} operator.Example::color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")integer = Word(nums)shape_attr = "shape:" + shape_type("shape")posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")color_attr = "color:" + color("color")size_attr = "size:" + integer("size")# use Each (using operator '&') to accept attributes in any order# (shape and posn are required, color and size are optional)shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)shape_spec.runTests('''shape: SQUARE color: BLACK posn: 100, 120shape: CIRCLE size: 50 color: BLUE posn: 50,80color:GREEN size:20 shape:TRIANGLE posn:20,40''')prints::shape: SQUARE color: BLACK posn: 100, 120['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]- color: BLACK- posn: ['100', ',', '120']- x: 100- y: 120- shape: SQUAREshape: CIRCLE size: 50 color: BLUE posn: 50,80['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]- color: BLUE- posn: ['50', ',', '80']- x: 50- y: 80- shape: CIRCLE- size: 50color: GREEN size: 20 shape: TRIANGLE posn: 20,40['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]- color: GREEN- posn: ['20', ',', '40']- x: 20- y: 40- shape: TRIANGLE- size: 20"""def __init__( self, exprs, savelist = True ):super(Each,self).__init__(exprs, savelist)self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)self.skipWhitespace = Trueself.initExprGroups = Truedef parseImpl( self, instring, loc, doActions=True ):if self.initExprGroups:self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional))opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ]opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)]self.optionals = opt1 + opt2self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]self.required += self.multirequiredself.initExprGroups = FalsetmpLoc = loctmpReqd = self.required[:]tmpOpt = self.optionals[:]matchOrder = []keepMatching = Truewhile keepMatching:tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequiredfailed = []for e in tmpExprs:try:tmpLoc = e.tryParse( instring, tmpLoc )except ParseException:failed.append(e)else:matchOrder.append(self.opt1map.get(id(e),e))if e in tmpReqd:tmpReqd.remove(e)elif e in tmpOpt:tmpOpt.remove(e)if len(failed) == len(tmpExprs):keepMatching = Falseif tmpReqd:missing = ", ".join(_ustr(e) for e in tmpReqd)raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )# add any unmatched Optionals, in case they have default values definedmatchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt]resultlist = []for e in matchOrder:loc,results = e._parse(instring,loc,doActions)resultlist.append(results)finalResults = sum(resultlist, ParseResults([]))return loc, finalResultsdef __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"return self.strReprdef checkRecursion( self, parseElementList ):subRecCheckList = parseElementList[:] + [ self ]for e in self.exprs:e.checkRecursion( subRecCheckList )class ParseElementEnhance(ParserElement):"""Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens."""def __init__( self, expr, savelist=False ):super(ParseElementEnhance,self).__init__(savelist)if isinstance( expr, basestring ):if issubclass(ParserElement._literalStringClass, Token):expr = ParserElement._literalStringClass(expr)else:expr = ParserElement._literalStringClass(Literal(expr))self.expr = exprself.strRepr = Noneif expr is not None:self.mayIndexError = expr.mayIndexErrorself.mayReturnEmpty = expr.mayReturnEmptyself.setWhitespaceChars( expr.whiteChars )self.skipWhitespace = expr.skipWhitespaceself.saveAsList = expr.saveAsListself.callPreparse = expr.callPreparseself.ignoreExprs.extend(expr.ignoreExprs)def parseImpl( self, instring, loc, doActions=True ):if self.expr is not None:return self.expr._parse( instring, loc, doActions, callPreParse=False )else:raise ParseException("",loc,self.errmsg,self)def leaveWhitespace( self ):self.skipWhitespace = Falseself.expr = self.expr.copy()if self.expr is not None:self.expr.leaveWhitespace()return selfdef ignore( self, other ):if isinstance( other, Suppress ):if other not in self.ignoreExprs:super( ParseElementEnhance, self).ignore( other )if self.expr is not None:self.expr.ignore( self.ignoreExprs[-1] )else:super( ParseElementEnhance, self).ignore( other )if self.expr is not None:self.expr.ignore( self.ignoreExprs[-1] )return selfdef streamline( self ):super(ParseElementEnhance,self).streamline()if self.expr is not None:self.expr.streamline()return selfdef checkRecursion( self, parseElementList ):if self in parseElementList:raise RecursiveGrammarException( parseElementList+[self] )subRecCheckList = parseElementList[:] + [ self ]if self.expr is not None:self.expr.checkRecursion( subRecCheckList )def validate( self, validateTrace=[] ):tmp = validateTrace[:]+[self]if self.expr is not None:self.expr.validate(tmp)self.checkRecursion( [] )def __str__( self ):try:return super(ParseElementEnhance,self).__str__()except Exception:passif self.strRepr is None and self.expr is not None:self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )return self.strReprclass FollowedBy(ParseElementEnhance):"""Lookahead matching of the given parse expression. C{FollowedBy}does I{not} advance the parsing position within the input string, it onlyverifies that the specified parse expression matches at the currentposition. C{FollowedBy} always returns a null token list.Example::# use FollowedBy to match a label only if it is followed by a ':'data_word = Word(alphas)label = data_word + FollowedBy(':')attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()prints::[['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]"""def __init__( self, expr ):super(FollowedBy,self).__init__(expr)self.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):self.expr.tryParse( instring, loc )return loc, []class NotAny(ParseElementEnhance):"""Lookahead to disallow matching with the given parse expression. C{NotAny}does I{not} advance the parsing position within the input string, it onlyverifies that the specified parse expression does I{not} match at the currentposition. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny}always returns a null token list. May be constructed using the '~' operator.Example::"""def __init__( self, expr ):super(NotAny,self).__init__(expr)#~ self.leaveWhitespace()self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprsself.mayReturnEmpty = Trueself.errmsg = "Found unwanted token, "+_ustr(self.expr)def parseImpl( self, instring, loc, doActions=True ):if self.expr.canParseNext(instring, loc):raise ParseException(instring, loc, self.errmsg, self)return loc, []def __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "~{" + _ustr(self.expr) + "}"return self.strReprclass _MultipleMatch(ParseElementEnhance):def __init__( self, expr, stopOn=None):super(_MultipleMatch, self).__init__(expr)self.saveAsList = Trueender = stopOnif isinstance(ender, basestring):ender = ParserElement._literalStringClass(ender)self.not_ender = ~ender if ender is not None else Nonedef parseImpl( self, instring, loc, doActions=True ):self_expr_parse = self.expr._parseself_skip_ignorables = self._skipIgnorablescheck_ender = self.not_ender is not Noneif check_ender:try_not_ender = self.not_ender.tryParse# must be at least one (but first see if we are the stopOn sentinel;# if so, fail)if check_ender:try_not_ender(instring, loc)loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False )try:hasIgnoreExprs = (not not self.ignoreExprs)while 1:if check_ender:try_not_ender(instring, loc)if hasIgnoreExprs:preloc = self_skip_ignorables( instring, loc )else:preloc = locloc, tmptokens = self_expr_parse( instring, preloc, doActions )if tmptokens or tmptokens.haskeys():tokens += tmptokensexcept (ParseException,IndexError):passreturn loc, tokensclass OneOrMore(_MultipleMatch):"""Repetition of one or more of the given expression.Parameters:- expr - expression that must match one or more times- stopOn - (default=C{None}) - expression for a terminating sentinel(only required if the sentinel would ordinarily match the repetitionexpression)Example::data_word = Word(alphas)label = data_word + FollowedBy(':')attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))text = "shape: SQUARE posn: upper left color: BLACK"OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]# use stopOn attribute for OneOrMore to avoid reading label string as part of the dataattr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]# could also be written as(attr_expr * (1,)).parseString(text).pprint()"""def __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "{" + _ustr(self.expr) + "}..."return self.strReprclass ZeroOrMore(_MultipleMatch):"""Optional repetition of zero or more of the given expression.Parameters:- expr - expression that must match zero or more times- stopOn - (default=C{None}) - expression for a terminating sentinel(only required if the sentinel would ordinarily match the repetitionexpression)Example: similar to L{OneOrMore}"""def __init__( self, expr, stopOn=None):super(ZeroOrMore,self).__init__(expr, stopOn=stopOn)self.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):try:return super(ZeroOrMore, self).parseImpl(instring, loc, doActions)except (ParseException,IndexError):return loc, []def __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "[" + _ustr(self.expr) + "]..."return self.strReprclass _NullToken(object):def __bool__(self):return False__nonzero__ = __bool__def __str__(self):return ""_optionalNotMatched = _NullToken()class Optional(ParseElementEnhance):"""Optional matching of the given expression.Parameters:- expr - expression that must match zero or more times- default (optional) - value to be returned if the optional expression is not found.Example::# US postal code can be a 5-digit zip, plus optional 4-digit qualifierzip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))zip.runTests('''# traditional ZIP code12345# ZIP+4 form12101-0001# invalid ZIP98765-''')prints::# traditional ZIP code12345['12345']# ZIP+4 form12101-0001['12101-0001']# invalid ZIP98765-^FAIL: Expected end of text (at char 5), (line:1, col:6)"""def __init__( self, expr, default=_optionalNotMatched ):super(Optional,self).__init__( expr, savelist=False )self.saveAsList = self.expr.saveAsListself.defaultValue = defaultself.mayReturnEmpty = Truedef parseImpl( self, instring, loc, doActions=True ):try:loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )except (ParseException,IndexError):if self.defaultValue is not _optionalNotMatched:if self.expr.resultsName:tokens = ParseResults([ self.defaultValue ])tokens[self.expr.resultsName] = self.defaultValueelse:tokens = [ self.defaultValue ]else:tokens = []return loc, tokensdef __str__( self ):if hasattr(self,"name"):return self.nameif self.strRepr is None:self.strRepr = "[" + _ustr(self.expr) + "]"return self.strReprclass SkipTo(ParseElementEnhance):"""Token for skipping over all undefined text until the matched expression is found.Parameters:- expr - target expression marking the end of the data to be skipped- include - (default=C{False}) if True, the target expression is also parsed(the skipped text and target expression are returned as a 2-element list).- ignore - (default=C{None}) used to define grammars (typically quoted strings andcomments) that might contain false matches to the target expression- failOn - (default=C{None}) define expressions that are not allowed to beincluded in the skipped test; if found before the target expression is found,the SkipTo is not a matchExample::report = '''Outstanding Issues Report - 1 Jan 2000# | Severity | Description | Days Open-----+----------+-------------------------------------------+-----------101 | Critical | Intermittent system crash | 694 | Cosmetic | Spelling error on Login ('log|n') | 1479 | Minor | System slow when running too many reports | 47'''integer = Word(nums)SEP = Suppress('|')# use SkipTo to simply match everything up until the next SEP# - ignore quoted strings, so that a '|' character inside a quoted string does not match# - parse action will call token.strip() for each matched token, i.e., the description bodystring_data = SkipTo(SEP, ignore=quotedString)string_data.setParseAction(tokenMap(str.strip))ticket_expr = (integer("issue_num") + SEP+ string_data("sev") + SEP+ string_data("desc") + SEP+ integer("days_open"))for tkt in ticket_expr.searchString(report):print tkt.dump()prints::['101', 'Critical', 'Intermittent system crash', '6']- days_open: 6- desc: Intermittent system crash- issue_num: 101- sev: Critical['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']- days_open: 14- desc: Spelling error on Login ('log|n')- issue_num: 94- sev: Cosmetic['79', 'Minor', 'System slow when running too many reports', '47']- days_open: 47- desc: System slow when running too many reports- issue_num: 79- sev: Minor"""def __init__( self, other, include=False, ignore=None, failOn=None ):super( SkipTo, self ).__init__( other )self.ignoreExpr = ignoreself.mayReturnEmpty = Trueself.mayIndexError = Falseself.includeMatch = includeself.asList = Falseif isinstance(failOn, basestring):self.failOn = ParserElement._literalStringClass(failOn)else:self.failOn = failOnself.errmsg = "No match found for "+_ustr(self.expr)def parseImpl( self, instring, loc, doActions=True ):startloc = locinstrlen = len(instring)expr = self.exprexpr_parse = self.expr._parseself_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else Noneself_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else Nonetmploc = locwhile tmploc <= instrlen:if self_failOn_canParseNext is not None:# break if failOn expression matchesif self_failOn_canParseNext(instring, tmploc):breakif self_ignoreExpr_tryParse is not None:# advance past ignore expressionswhile 1:try:tmploc = self_ignoreExpr_tryParse(instring, tmploc)except ParseBaseException:breaktry:expr_parse(instring, tmploc, doActions=False, callPreParse=False)except (ParseException, IndexError):# no match, advance loc in stringtmploc += 1else:# matched skipto expr, donebreakelse:# ran off the end of the input string without matching skipto expr, failraise ParseException(instring, loc, self.errmsg, self)# build up return valuesloc = tmplocskiptext = instring[startloc:loc]skipresult = ParseResults(skiptext)if self.includeMatch:loc, mat = expr_parse(instring,loc,doActions,callPreParse=False)skipresult += matreturn loc, skipresultclass Forward(ParseElementEnhance):"""Forward declaration of an expression to be defined later -used for recursive grammars, such as algebraic infix notation.When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.Note: take care when assigning to C{Forward} not to overlook precedence of operators.Specifically, '|' has a lower precedence than '<<', so that::fwdExpr << a | b | cwill actually be evaluated as::(fwdExpr << a) | b | cthereby leaving b and c out as parseable alternatives. It is recommended that youexplicitly group the values inserted into the C{Forward}::fwdExpr << (a | b | c)Converting to use the '<<=' operator instead will avoid this problem.See L{ParseResults.pprint} for an example of a recursive parser created usingC{Forward}."""def __init__( self, other=None ):super(Forward,self).__init__( other, savelist=False )def __lshift__( self, other ):if isinstance( other, basestring ):other = ParserElement._literalStringClass(other)self.expr = otherself.strRepr = Noneself.mayIndexError = self.expr.mayIndexErrorself.mayReturnEmpty = self.expr.mayReturnEmptyself.setWhitespaceChars( self.expr.whiteChars )self.skipWhitespace = self.expr.skipWhitespaceself.saveAsList = self.expr.saveAsListself.ignoreExprs.extend(self.expr.ignoreExprs)return selfdef __ilshift__(self, other):return self << otherdef leaveWhitespace( self ):self.skipWhitespace = Falsereturn selfdef streamline( self ):if not self.streamlined:self.streamlined = Trueif self.expr is not None:self.expr.streamline()return selfdef validate( self, validateTrace=[] ):if self not in validateTrace:tmp = validateTrace[:]+[self]if self.expr is not None:self.expr.validate(tmp)self.checkRecursion([])def __str__( self ):if hasattr(self,"name"):return self.namereturn self.__class__.__name__ + ": ..."# stubbed out for now - creates awful memory and perf issuesself._revertClass = self.__class__self.__class__ = _ForwardNoRecursetry:if self.expr is not None:retString = _ustr(self.expr)else:retString = "None"finally:self.__class__ = self._revertClassreturn self.__class__.__name__ + ": " + retStringdef copy(self):if self.expr is not None:return super(Forward,self).copy()else:ret = Forward()ret <<= selfreturn retclass _ForwardNoRecurse(Forward):def __str__( self ):return "..."class TokenConverter(ParseElementEnhance):"""Abstract subclass of C{ParseExpression}, for converting parsed results."""def __init__( self, expr, savelist=False ):super(TokenConverter,self).__init__( expr )#, savelist )self.saveAsList = Falseclass Combine(TokenConverter):"""Converter to concatenate all matching tokens to a single string.By default, the matching patterns must also be contiguous in the input string;this can be disabled by specifying C{'adjacent=False'} in the constructor.Example::real = Word(nums) + '.' + Word(nums)print(real.parseString('3.1416')) # -> ['3', '.', '1416']# will also erroneously match the followingprint(real.parseString('3. 1416')) # -> ['3', '.', '1416']real = Combine(Word(nums) + '.' + Word(nums))print(real.parseString('3.1416')) # -> ['3.1416']# no match when there are internal spacesprint(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)"""def __init__( self, expr, joinString="", adjacent=True ):super(Combine,self).__init__( expr )# suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itselfif adjacent:self.leaveWhitespace()self.adjacent = adjacentself.skipWhitespace = Trueself.joinString = joinStringself.callPreparse = Truedef ignore( self, other ):if self.adjacent:ParserElement.ignore(self, other)else:super( Combine, self).ignore( other )return selfdef postParse( self, instring, loc, tokenlist ):retToks = tokenlist.copy()del retToks[:]retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)if self.resultsName and retToks.haskeys():return [ retToks ]else:return retToksclass Group(TokenConverter):"""Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.Example::ident = Word(alphas)num = Word(nums)term = ident | numfunc = ident + Optional(delimitedList(term))print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100']func = ident + Group(Optional(delimitedList(term)))print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']]"""def __init__( self, expr ):super(Group,self).__init__( expr )self.saveAsList = Truedef postParse( self, instring, loc, tokenlist ):return [ tokenlist ]class Dict(TokenConverter):"""Converter to return a repetitive expression as a list, but also as a dictionary.Each element can also be referenced using the first token in the expression as its key.Useful for tabular report scraping when the first column can be used as a item key.Example::data_word = Word(alphas)label = data_word + FollowedBy(':')attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))text = "shape: SQUARE posn: upper left color: light blue texture: burlap"attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))# print attributes as plain groupsprint(OneOrMore(attr_expr).parseString(text).dump())# instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign namesresult = Dict(OneOrMore(Group(attr_expr))).parseString(text)print(result.dump())# access named fields as dict entries, or output as dictprint(result['shape'])print(result.asDict())prints::['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'][['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]- color: light blue- posn: upper left- shape: SQUARE- texture: burlapSQUARE{'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}See more examples at L{ParseResults} of accessing fields by results name."""def __init__( self, expr ):super(Dict,self).__init__( expr )self.saveAsList = Truedef postParse( self, instring, loc, tokenlist ):for i,tok in enumerate(tokenlist):if len(tok) == 0:continueikey = tok[0]if isinstance(ikey,int):ikey = _ustr(tok[0]).strip()if len(tok)==1:tokenlist[ikey] = _ParseResultsWithOffset("",i)elif len(tok)==2 and not isinstance(tok[1],ParseResults):tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)else:dictvalue = tok.copy() #ParseResults(i)del dictvalue[0]if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()):tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)else:tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)if self.resultsName:return [ tokenlist ]else:return tokenlistclass Suppress(TokenConverter):"""Converter for ignoring the results of a parsed expression.Example::source = "a, b, c,d"wd = Word(alphas)wd_list1 = wd + ZeroOrMore(',' + wd)print(wd_list1.parseString(source))# often, delimiters that are useful during parsing are just in the# way afterward - use Suppress to keep them out of the parsed outputwd_list2 = wd + ZeroOrMore(Suppress(',') + wd)print(wd_list2.parseString(source))prints::['a', ',', 'b', ',', 'c', ',', 'd']['a', 'b', 'c', 'd'](See also L{delimitedList}.)"""def postParse( self, instring, loc, tokenlist ):return []def suppress( self ):return selfclass OnlyOnce(object):"""Wrapper for parse actions, to ensure they are only called once."""def __init__(self, methodCall):self.callable = _trim_arity(methodCall)self.called = Falsedef __call__(self,s,l,t):if not self.called:results = self.callable(s,l,t)self.called = Truereturn resultsraise ParseException(s,l,"")def reset(self):self.called = Falsedef traceParseAction(f):"""Decorator for debugging parse actions.When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.Example::wd = Word(alphas)@traceParseActiondef remove_duplicate_chars(tokens):return ''.join(sorted(set(''.join(tokens)))wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)print(wds.parseString("slkdjs sld sldd sdlf sdljf"))prints::>>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))<<leaving remove_duplicate_chars (ret: 'dfjkls')['dfjkls']"""f = _trim_arity(f)def z(*paArgs):thisFunc = f.__name__s,l,t = paArgs[-3:]if len(paArgs)>3:thisFunc = paArgs[0].__class__.__name__ + '.' + thisFuncsys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) )try:ret = f(*paArgs)except Exception as exc:sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) )raisesys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) )return rettry:z.__name__ = f.__name__except AttributeError:passreturn z## global helpers#def delimitedList( expr, delim=",", combine=False ):"""Helper to define a delimited list of expressions - the delimiter defaults to ','.By default, the list elements and delimiters can have intervening whitespace, andcomments, but this can be overridden by passing C{combine=True} in the constructor.If C{combine} is set to C{True}, the matching tokens are returned as a single tokenstring, with the delimiters included; otherwise, the matching tokens are returnedas a list of tokens, with the delimiters suppressed.Example::delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc']delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']"""dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."if combine:return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)else:return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)def countedArray( expr, intExpr=None ):"""Helper to define a counted list of expressions.This helper defines a pattern of the form::integer expr expr expr...where the leading integer tells how many expr expressions follow.The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.Example::countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd']# in this parser, the leading integer value is given in binary,# '10' indicating that 2 values are in the arraybinaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd']"""arrayExpr = Forward()def countFieldParseAction(s,l,t):n = t[0]arrayExpr << (n and Group(And([expr]*n)) or Group(empty))return []if intExpr is None:intExpr = Word(nums).setParseAction(lambda t:int(t[0]))else:intExpr = intExpr.copy()intExpr.setName("arrayLen")intExpr.addParseAction(countFieldParseAction, callDuringTry=True)return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...')def _flatten(L):ret = []for i in L:if isinstance(i,list):ret.extend(_flatten(i))else:ret.append(i)return retdef matchPreviousLiteral(expr):"""Helper to define an expression that is indirectly defined fromthe tokens matched in a previous expression, that is, it looksfor a 'repeat' of a previous expression. For example::first = Word(nums)second = matchPreviousLiteral(first)matchExpr = first + ":" + secondwill match C{"1:1"}, but not C{"1:2"}. Because this matches aprevious literal, will also match the leading C{"1:1"} in C{"1:10"}.If this is not desired, use C{matchPreviousExpr}.Do I{not} use with packrat parsing enabled."""rep = Forward()def copyTokenToRepeater(s,l,t):if t:if len(t) == 1:rep << t[0]else:# flatten t tokenstflat = _flatten(t.asList())rep << And(Literal(tt) for tt in tflat)else:rep << Empty()expr.addParseAction(copyTokenToRepeater, callDuringTry=True)rep.setName('(prev) ' + _ustr(expr))return repdef matchPreviousExpr(expr):"""Helper to define an expression that is indirectly defined fromthe tokens matched in a previous expression, that is, it looksfor a 'repeat' of a previous expression. For example::first = Word(nums)second = matchPreviousExpr(first)matchExpr = first + ":" + secondwill match C{"1:1"}, but not C{"1:2"}. Because this matches byexpressions, will I{not} match the leading C{"1:1"} in C{"1:10"};the expressions are evaluated first, and then compared, soC{"1"} is compared with C{"10"}.Do I{not} use with packrat parsing enabled."""rep = Forward()e2 = expr.copy()rep <<= e2def copyTokenToRepeater(s,l,t):matchTokens = _flatten(t.asList())def mustMatchTheseTokens(s,l,t):theseTokens = _flatten(t.asList())if theseTokens != matchTokens:raise ParseException("",0,"")rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )expr.addParseAction(copyTokenToRepeater, callDuringTry=True)rep.setName('(prev) ' + _ustr(expr))return repdef _escapeRegexRangeChars(s):#~ escape these chars: ^-]for c in r"\^-]":s = s.replace(c,_bslash+c)s = s.replace("\n",r"\n")s = s.replace("\t",r"\t")return _ustr(s)def oneOf( strs, caseless=False, useRegex=True ):"""Helper to quickly define a set of alternative Literals, and makes sure to dolongest-first testing when there is a conflict, regardless of the input order,but returns a C{L{MatchFirst}} for best performance.Parameters:- strs - a string of space-delimited literals, or a collection of string literals- caseless - (default=C{False}) - treat all literals as caseless- useRegex - (default=C{True}) - as an optimization, will generate a Regexobject; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, orif creating a C{Regex} raises an exception)Example::comp_oper = oneOf("< = > <= >= !=")var = Word(alphas)number = Word(nums)term = var | numbercomparison_expr = term + comp_oper + termprint(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12"))prints::[['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]"""if caseless:isequal = ( lambda a,b: a.upper() == b.upper() )masks = ( lambda a,b: b.upper().startswith(a.upper()) )parseElementClass = CaselessLiteralelse:isequal = ( lambda a,b: a == b )masks = ( lambda a,b: b.startswith(a) )parseElementClass = Literalsymbols = []if isinstance(strs,basestring):symbols = strs.split()elif isinstance(strs, collections.Iterable):symbols = list(strs)else:warnings.warn("Invalid argument to oneOf, expected string or iterable",SyntaxWarning, stacklevel=2)if not symbols:return NoMatch()i = 0while i < len(symbols)-1:cur = symbols[i]for j,other in enumerate(symbols[i+1:]):if ( isequal(other, cur) ):del symbols[i+j+1]breakelif ( masks(cur, other) ):del symbols[i+j+1]symbols.insert(i,other)cur = otherbreakelse:i += 1if not caseless and useRegex:#~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))try:if len(symbols)==len("".join(symbols)):return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols))else:return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols))except Exception:warnings.warn("Exception creating Regex for oneOf, building MatchFirst",SyntaxWarning, stacklevel=2)# last resort, just use MatchFirstreturn MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols))def dictOf( key, value ):"""Helper to easily and clearly define a dictionary by specifying the respective patternsfor the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokensin the proper order. The key pattern can include delimiting markers or punctuation,as long as they are suppressed, thereby leaving the significant key text. The valuepattern can include named results, so that the C{Dict} results can include named tokenfields.Example::text = "shape: SQUARE posn: upper left color: light blue texture: burlap"attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))print(OneOrMore(attr_expr).parseString(text).dump())attr_label = labelattr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)# similar to Dict, but simpler call formatresult = dictOf(attr_label, attr_value).parseString(text)print(result.dump())print(result['shape'])print(result.shape) # object attribute access works tooprint(result.asDict())prints::[['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]- color: light blue- posn: upper left- shape: SQUARE- texture: burlapSQUARESQUARE{'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}"""return Dict( ZeroOrMore( Group ( key + value ) ) )def originalTextFor(expr, asString=True):"""Helper to return the original, untokenized text for a given expression. Useful torestore the parsed fields of an HTML start tag into the raw tag text itself, or torevert separate tokens with intervening whitespace back to the original matchinginput text. By default, returns astring containing the original parsed text.If the optional C{asString} argument is passed as C{False}, then the return value is aC{L{ParseResults}} containing any results names that were originally matched, and asingle token containing the original matched text from the input string. So ifthe expression passed to C{L{originalTextFor}} contains expressions with definedresults names, you must set C{asString} to C{False} if you want to preserve thoseresults name values.Example::src = "this is test <b> bold <i>text</i> </b> normal text "for tag in ("b","i"):opener,closer = makeHTMLTags(tag)patt = originalTextFor(opener + SkipTo(closer) + closer)print(patt.searchString(src)[0])prints::['<b> bold <i>text</i> </b>']['<i>text</i>']"""locMarker = Empty().setParseAction(lambda s,loc,t: loc)endlocMarker = locMarker.copy()endlocMarker.callPreparse = FalsematchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")if asString:extractText = lambda s,l,t: s[t._original_start:t._original_end]else:def extractText(s,l,t):t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]]matchExpr.setParseAction(extractText)matchExpr.ignoreExprs = expr.ignoreExprsreturn matchExprdef ungroup(expr):"""Helper to undo pyparsing's default grouping of And expressions, evenif all but one are non-empty."""return TokenConverter(expr).setParseAction(lambda t:t[0])def locatedExpr(expr):"""Helper to decorate a returned token with its starting and ending locations in the input string.This helper adds the following results names:- locn_start = location where matched expression begins- locn_end = location where matched expression ends- value = the actual parsed resultsBe careful if the input text contains C{<TAB>} characters, you may want to callC{L{ParserElement.parseWithTabs}}Example::wd = Word(alphas)for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):print(match)prints::[[0, 'ljsdf', 5]][[8, 'lksdjjf', 15]][[18, 'lkkjj', 23]]"""locator = Empty().setParseAction(lambda s,l,t: l)return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))# convenience constants for positional expressionsempty = Empty().setName("empty")lineStart = LineStart().setName("lineStart")lineEnd = LineEnd().setName("lineEnd")stringStart = StringStart().setName("stringStart")stringEnd = StringEnd().setName("stringEnd")_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE)_charRange = Group(_singleChar + Suppress("-") + _singleChar)_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"def srange(s):r"""Helper to easily define string ranges for use in Word construction. Borrowssyntax from regexp '[]' string range definitions::srange("[0-9]") -> "0123456789"srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz"srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"The input string must be enclosed in []'s, and the returned string is the expandedcharacter set joined into a single string.The values enclosed in the []'s may be:- a single character- an escaped character with a leading backslash (such as C{\-} or C{\]})- an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character)(C{\0x##} is also supported for backwards compatibility)- an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character)- a range of any of the above, separated by a dash (C{'a-z'}, etc.)- any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.)"""_expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))try:return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)except Exception:return ""def matchOnlyAtCol(n):"""Helper method for defining parse actions that require matching at a specificcolumn in the input text."""def verifyCol(strg,locn,toks):if col(locn,strg) != n:raise ParseException(strg,locn,"matched token not at column %d" % n)return verifyColdef replaceWith(replStr):"""Helper method for common parse actions that simply return a literal value. Especiallyuseful when used with C{L{transformString<ParserElement.transformString>}()}.Example::num = Word(nums).setParseAction(lambda toks: int(toks[0]))na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))term = na | numOneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]"""return lambda s,l,t: [replStr]def removeQuotes(s,l,t):"""Helper parse action for removing quotation marks from parsed quoted strings.Example::# by default, quotation marks are included in parsed resultsquotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]# use removeQuotes to strip quotation marks from parsed resultsquotedString.setParseAction(removeQuotes)quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]"""return t[0][1:-1]def tokenMap(func, *args):"""Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additionalargs are passed, they are forwarded to the given function as additional arguments afterthe token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert theparsed data to an integer using base 16.Example (compare the last to example in L{ParserElement.transformString}::hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))hex_ints.runTests('''00 11 22 aa FF 0a 0d 1a''')upperword = Word(alphas).setParseAction(tokenMap(str.upper))OneOrMore(upperword).runTests('''my kingdom for a horse''')wd = Word(alphas).setParseAction(tokenMap(str.title))OneOrMore(wd).setParseAction(' '.join).runTests('''now is the winter of our discontent made glorious summer by this sun of york''')prints::00 11 22 aa FF 0a 0d 1a[0, 17, 34, 170, 255, 10, 13, 26]my kingdom for a horse['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']now is the winter of our discontent made glorious summer by this sun of york['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']"""def pa(s,l,t):return [func(tokn, *args) for tokn in t]try:func_name = getattr(func, '__name__',getattr(func, '__class__').__name__)except Exception:func_name = str(func)pa.__name__ = func_namereturn paupcaseTokens = tokenMap(lambda t: _ustr(t).upper())"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}"""downcaseTokens = tokenMap(lambda t: _ustr(t).lower())"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}"""def _makeTags(tagStr, xml):"""Internal helper to construct opening and closing tag expressions, given a tag name"""if isinstance(tagStr,basestring):resname = tagStrtagStr = Keyword(tagStr, caseless=not xml)else:resname = tagStr.nametagAttrName = Word(alphas,alphanums+"_-:")if (xml):tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )openTag = Suppress("<") + tagStr("tag") + \Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")else:printablesLessRAbrack = "".join(c for c in printables if c not in ">")tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack)openTag = Suppress("<") + tagStr("tag") + \Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \Optional( Suppress("=") + tagAttrValue ) ))) + \Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")closeTag = Combine(_L("</") + tagStr + ">")openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname)closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname)openTag.tag = resnamecloseTag.tag = resnamereturn openTag, closeTagdef makeHTMLTags(tagStr):"""Helper to construct opening and closing tag expressions for HTML, given a tag name. Matchestags in either upper or lower case, attributes with namespaces and with quoted or unquoted values.Example::text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'# makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuplea,a_end = makeHTMLTags("A")link_expr = a + SkipTo(a_end)("link_text") + a_endfor link in link_expr.searchString(text):# attributes in the <A> tag (like "href" shown here) are also accessible as named resultsprint(link.link_text, '->', link.href)prints::pyparsing -> http://pyparsing.wikispaces.com"""return _makeTags( tagStr, False )def makeXMLTags(tagStr):"""Helper to construct opening and closing tag expressions for XML, given a tag name. Matchestags only in the given upper/lower case.Example: similar to L{makeHTMLTags}"""return _makeTags( tagStr, True )def withAttribute(*args,**attrDict):"""Helper to create a validating parse action to be used with start tags createdwith C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tagwith a required attribute value, to avoid false matches on common tags such asC{<TD>} or C{<DIV>}.Call C{withAttribute} with a series of attribute names and values. Specify the listof filter attributes names and values as:- keyword arguments, as in C{(align="right")}, or- as an explicit dict with C{**} operator, when an attribute name is also a Pythonreserved word, as in C{**{"class":"Customer", "align":"right"}}- a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") )For attribute names with a namespace prefix, you must use the second form. Attributenames are matched insensitive to upper/lower case.If just testing for C{class} (with or without a namespace), use C{L{withClass}}.To verify that the attribute exists, but without specifying a value, passC{withAttribute.ANY_VALUE} as the value.Example::html = '''<div>Some text<div type="grid">1 4 0 1 0</div><div type="graph">1,3 2,3 1,1</div><div>this has no type</div></div>'''div,div_end = makeHTMLTags("div")# only match div tag having a type attribute with value "grid"div_grid = div().setParseAction(withAttribute(type="grid"))grid_expr = div_grid + SkipTo(div | div_end)("body")for grid_header in grid_expr.searchString(html):print(grid_header.body)# construct a match with any div tag having a type attribute, regardless of the valuediv_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE))div_expr = div_any_type + SkipTo(div | div_end)("body")for div_header in div_expr.searchString(html):print(div_header.body)prints::1 4 0 1 01 4 0 1 01,3 2,3 1,1"""if args:attrs = args[:]else:attrs = attrDict.items()attrs = [(k,v) for k,v in attrs]def pa(s,l,tokens):for attrName,attrValue in attrs:if attrName not in tokens:raise ParseException(s,l,"no matching attribute " + attrName)if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue:raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" %(attrName, tokens[attrName], attrValue))return pawithAttribute.ANY_VALUE = object()def withClass(classname, namespace=''):"""Simplified version of C{L{withAttribute}} when matching on a div class - madedifficult because C{class} is a reserved word in Python.Example::html = '''<div>Some text<div class="grid">1 4 0 1 0</div><div class="graph">1,3 2,3 1,1</div><div>this <div> has no class</div></div>'''div,div_end = makeHTMLTags("div")div_grid = div().setParseAction(withClass("grid"))grid_expr = div_grid + SkipTo(div | div_end)("body")for grid_header in grid_expr.searchString(html):print(grid_header.body)div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE))div_expr = div_any_type + SkipTo(div | div_end)("body")for div_header in div_expr.searchString(html):print(div_header.body)prints::1 4 0 1 01 4 0 1 01,3 2,3 1,1"""classattr = "%s:class" % namespace if namespace else "class"return withAttribute(**{classattr : classname})opAssoc = _Constants()opAssoc.LEFT = object()opAssoc.RIGHT = object()def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ):"""Helper method for constructing grammars of expressions made up ofoperators working in a precedence hierarchy. Operators may be unary orbinary, left- or right-associative. Parse actions can also be attachedto operator expressions. The generated parser will also recognize the useof parentheses to override operator precedences (see example below).Note: if you define a deep operator list, you may see performance issueswhen using infixNotation. See L{ParserElement.enablePackrat} for amechanism to potentially improve your parser performance.Parameters:- baseExpr - expression representing the most basic element for the nested- opList - list of tuples, one for each operator precedence level in theexpression grammar; each tuple is of the form(opExpr, numTerms, rightLeftAssoc, parseAction), where:- opExpr is the pyparsing expression for the operator;may also be a string, which will be converted to a Literal;if numTerms is 3, opExpr is a tuple of two expressions, for thetwo operators separating the 3 terms- numTerms is the number of terms for this operator (mustbe 1, 2, or 3)- rightLeftAssoc is the indicator whether the operator isright or left associative, using the pyparsing-definedconstants C{opAssoc.RIGHT} and C{opAssoc.LEFT}.- parseAction is the parse action to be associated withexpressions matching this operator expression (theparse action tuple member may be omitted)- lpar - expression for matching left-parentheses (default=C{Suppress('(')})- rpar - expression for matching right-parentheses (default=C{Suppress(')')})Example::# simple example of four-function arithmetic with ints and variable namesinteger = pyparsing_common.signed_integervarname = pyparsing_common.identifierarith_expr = infixNotation(integer | varname,[('-', 1, opAssoc.RIGHT),(oneOf('* /'), 2, opAssoc.LEFT),(oneOf('+ -'), 2, opAssoc.LEFT),])arith_expr.runTests('''5+3*6(5+3)*6-2--11''', fullDump=False)prints::5+3*6[[5, '+', [3, '*', 6]]](5+3)*6[[[5, '+', 3], '*', 6]]-2--11[[['-', 2], '-', ['-', 11]]]"""ret = Forward()lastExpr = baseExpr | ( lpar + ret + rpar )for i,operDef in enumerate(opList):opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4]termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExprif arity == 3:if opExpr is None or len(opExpr) != 2:raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions")opExpr1, opExpr2 = opExprthisExpr = Forward().setName(termName)if rightLeftAssoc == opAssoc.LEFT:if arity == 1:matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) )elif arity == 2:if opExpr is not None:matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) )else:matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) )elif arity == 3:matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr )else:raise ValueError("operator must be unary (1), binary (2), or ternary (3)")elif rightLeftAssoc == opAssoc.RIGHT:if arity == 1:# try to avoid LR with this extra testif not isinstance(opExpr, Optional):opExpr = Optional(opExpr)matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr )elif arity == 2:if opExpr is not None:matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) )else:matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) )elif arity == 3:matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr )else:raise ValueError("operator must be unary (1), binary (2), or ternary (3)")else:raise ValueError("operator must indicate right or left associativity")if pa:matchExpr.setParseAction( pa )thisExpr <<= ( matchExpr.setName(termName) | lastExpr )lastExpr = thisExprret <<= lastExprreturn retoperatorPrecedence = infixNotation"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release."""dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes")sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes")quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'|Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes")unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal")def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()):"""Helper method for defining nested lists enclosed in opening and closingdelimiters ("(" and ")" are the default).Parameters:- opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression- closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression- content - expression for items within the nested lists (default=C{None})- ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString})If an expression is not provided for the content argument, the nestedexpression will capture all whitespace-delimited content between delimitersas a list of separate values.Use the C{ignoreExpr} argument to define expressions that may containopening or closing characters that should not be treated as openingor closing characters for nesting, such as quotedString or a commentexpression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}.The default is L{quotedString}, but if no expressions are to be ignored,then pass C{None} for this argument.Example::data_type = oneOf("void int short long char float double")decl_data_type = Combine(data_type + Optional(Word('*')))ident = Word(alphas+'_', alphanums+'_')number = pyparsing_common.numberarg = Group(decl_data_type + ident)LPAR,RPAR = map(Suppress, "()")code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment))c_function = (decl_data_type("type")+ ident("name")+ LPAR + Optional(delimitedList(arg), [])("args") + RPAR+ code_body("body"))c_function.ignore(cStyleComment)source_code = '''int is_odd(int x) {return (x%2);}int dec_to_hex(char hchar) {if (hchar >= '0' && hchar <= '9') {return (ord(hchar)-ord('0'));} else {return (10+ord(hchar)-ord('A'));}}'''for func in c_function.searchString(source_code):print("%(name)s (%(type)s) args: %(args)s" % func)prints::is_odd (int) args: [['int', 'x']]dec_to_hex (int) args: [['char', 'hchar']]"""if opener == closer:raise ValueError("opening and closing strings cannot be the same")if content is None:if isinstance(opener,basestring) and isinstance(closer,basestring):if len(opener) == 1 and len(closer)==1:if ignoreExpr is not None:content = (Combine(OneOrMore(~ignoreExpr +CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1))).setParseAction(lambda t:t[0].strip()))else:content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS).setParseAction(lambda t:t[0].strip()))else:if ignoreExpr is not None:content = (Combine(OneOrMore(~ignoreExpr +~Literal(opener) + ~Literal(closer) +CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))).setParseAction(lambda t:t[0].strip()))else:content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) +CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))).setParseAction(lambda t:t[0].strip()))else:raise ValueError("opening and closing arguments must be strings if no content expression is given")ret = Forward()if ignoreExpr is not None:ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) )else:ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) )ret.setName('nested %s%s expression' % (opener,closer))return retdef indentedBlock(blockStatementExpr, indentStack, indent=True):"""Helper method for defining space-delimited indentation blocks, such asthose used to define block statements in Python source code.Parameters:- blockStatementExpr - expression defining syntax of statement thatis repeated within the indented block- indentStack - list created by caller to manage indentation stack(multiple statementWithIndentedBlock expressions within a single grammarshould share a common indentStack)- indent - boolean indicating whether block must be indented beyond thethe current level; set to False for block of left-most statements(default=C{True})A valid block must contain at least one C{blockStatement}.Example::data = '''def A(z):A1B = 100G = A2A2A3Bdef BB(a,b,c):BB1def BBA():bba1bba2bba3CDdef spam(x,y):def eggs(z):pass'''indentStack = [1]stmt = Forward()identifier = Word(alphas, alphanums)funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":")func_body = indentedBlock(stmt, indentStack)funcDef = Group( funcDecl + func_body )rvalue = Forward()funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")")rvalue << (funcCall | identifier | Word(nums))assignment = Group(identifier + "=" + rvalue)stmt << ( funcDef | assignment | identifier )module_body = OneOrMore(stmt)parseTree = module_body.parseString(data)parseTree.pprint()prints::[['def','A',['(', 'z', ')'],':',[['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]],'B',['def','BB',['(', 'a', 'b', 'c', ')'],':',[['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]],'C','D',['def','spam',['(', 'x', 'y', ')'],':',[[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]]"""def checkPeerIndent(s,l,t):if l >= len(s): returncurCol = col(l,s)if curCol != indentStack[-1]:if curCol > indentStack[-1]:raise ParseFatalException(s,l,"illegal nesting")raise ParseException(s,l,"not a peer entry")def checkSubIndent(s,l,t):curCol = col(l,s)if curCol > indentStack[-1]:indentStack.append( curCol )else:raise ParseException(s,l,"not a subentry")def checkUnindent(s,l,t):if l >= len(s): returncurCol = col(l,s)if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]):raise ParseException(s,l,"not an unindent")indentStack.pop()NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress())INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT')PEER = Empty().setParseAction(checkPeerIndent).setName('')UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT')if indent:smExpr = Group( Optional(NL) +#~ FollowedBy(blockStatementExpr) +INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT)else:smExpr = Group( Optional(NL) +(OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) )blockStatementExpr.ignore(_bslash + LineEnd())return smExpr.setName('indented block')alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]")punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]")anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag'))_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\''))commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity")def replaceHTMLEntity(t):"""Helper parser action to replace common HTML entities with their special characters"""return _htmlEntityMap.get(t.entity)# it's easy to get these comment structures wrong - they're very common, so may as well make them availablecStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment")"Comment of the form C{/* ... */}"htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment")"Comment of the form C{<!-- ... -->}"restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line")dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment")"Comment of the form C{// ... (to end of line)}"cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment")"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}"javaStyleComment = cppStyleComment"Same as C{L{cppStyleComment}}"pythonStyleComment = Regex(r"#.*").setName("Python style comment")"Comment of the form C{# ... (to end of line)}"_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') +Optional( Word(" \t") +~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem")commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList")"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas.This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}."""# some other useful expressions - using lower-case class name since we are really using this as a namespaceclass pyparsing_common:"""Here are some common low-level expressions that may be useful in jump-starting parser development:- numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>})- common L{programming identifiers<identifier>}- network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>})- ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>}- L{UUID<uuid>}- L{comma-separated list<comma_separated_list>}Parse actions:- C{L{convertToInteger}}- C{L{convertToFloat}}- C{L{convertToDate}}- C{L{convertToDatetime}}- C{L{stripHTMLTags}}- C{L{upcaseTokens}}- C{L{downcaseTokens}}Example::pyparsing_common.number.runTests('''# any int or real number, returned as the appropriate type100-100+1003.141596.02e231e-12''')pyparsing_common.fnumber.runTests('''# any int or real number, returned as float100-100+1003.141596.02e231e-12''')pyparsing_common.hex_integer.runTests('''# hex numbers100FF''')pyparsing_common.fraction.runTests('''# fractions1/2-3/4''')pyparsing_common.mixed_integer.runTests('''# mixed fractions11/2-3/41-3/4''')import uuidpyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))pyparsing_common.uuid.runTests('''# uuid12345678-1234-5678-1234-567812345678''')prints::# any int or real number, returned as the appropriate type100[100]-100[-100]+100[100]3.14159[3.14159]6.02e23[6.02e+23]1e-12[1e-12]# any int or real number, returned as float100[100.0]-100[-100.0]+100[100.0]3.14159[3.14159]6.02e23[6.02e+23]1e-12[1e-12]# hex numbers100[256]FF[255]# fractions1/2[0.5]-3/4[-0.75]# mixed fractions1[1]1/2[0.5]-3/4[-0.75]1-3/4[1.75]# uuid12345678-1234-5678-1234-567812345678[UUID('12345678-1234-5678-1234-567812345678')]"""convertToInteger = tokenMap(int)"""Parse action for converting parsed integers to Python int"""convertToFloat = tokenMap(float)"""Parse action for converting parsed numbers to Python float"""integer = Word(nums).setName("integer").setParseAction(convertToInteger)"""expression that parses an unsigned integer, returns an int"""hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16))"""expression that parses a hexadecimal integer, returns an int"""signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger)"""expression that parses an integer with optional leading sign, returns an int"""fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction")"""fractional expression of an integer divided by an integer, returns a float"""fraction.addParseAction(lambda t: t[0]/t[-1])mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction")"""mixed integer of the form 'integer - fraction', with optional leading integer, returns float"""mixed_integer.addParseAction(sum)real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat)"""expression that parses a floating point number and returns a float"""sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat)"""expression that parses a floating point number with optional scientific notation and returns a float"""# streamlining this expression makes the docs nicer-lookingnumber = (sci_real | real | signed_integer).streamline()"""any numeric expression, returns the corresponding Python type"""fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat)"""any int or real number, returned as float"""identifier = Word(alphas+'_', alphanums+'_').setName("identifier")"""typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')"""ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address")"IPv4 address (C{0.0.0.0 - 255.255.255.255})"_ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer")_full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address")_short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address")_short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8)_mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address")ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address")"IPv6 address (long, short, or mixed form)"mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address")"MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)"@staticmethoddef convertToDate(fmt="%Y-%m-%d"):"""Helper to create a parse action for converting parsed date string to Python datetime.dateParams -- fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"})Example::date_expr = pyparsing_common.iso8601_date.copy()date_expr.setParseAction(pyparsing_common.convertToDate())print(date_expr.parseString("1999-12-31"))prints::[datetime.date(1999, 12, 31)]"""def cvt_fn(s,l,t):try:return datetime.strptime(t[0], fmt).date()except ValueError as ve:raise ParseException(s, l, str(ve))return cvt_fn@staticmethoddef convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"):"""Helper to create a parse action for converting parsed datetime string to Python datetime.datetimeParams -- fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"})Example::dt_expr = pyparsing_common.iso8601_datetime.copy()dt_expr.setParseAction(pyparsing_common.convertToDatetime())print(dt_expr.parseString("1999-12-31T23:59:59.999"))prints::[datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)]"""def cvt_fn(s,l,t):try:return datetime.strptime(t[0], fmt)except ValueError as ve:raise ParseException(s, l, str(ve))return cvt_fniso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date")"ISO8601 date (C{yyyy-mm-dd})"iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime")"ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}"uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID")"UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})"_html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress()@staticmethoddef stripHTMLTags(s, l, tokens):"""Parse action to remove HTML tags from web page HTML sourceExample::# strip HTML links from normal texttext = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'td,td_end = makeHTMLTags("TD")table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_endprint(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page'"""return pyparsing_common._html_stripper.transformString(tokens[0])_commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',')+ Optional( White(" \t") ) ) ).streamline().setName("commaItem")comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list")"""Predefined expression of 1 or more printable words or quoted strings, separated by commas."""upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper()))"""Parse action to convert tokens to upper case."""downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower()))"""Parse action to convert tokens to lower case."""if __name__ == "__main__":selectToken = CaselessLiteral("select")fromToken = CaselessLiteral("from")ident = Word(alphas, alphanums + "_$")columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)columnNameList = Group(delimitedList(columnName)).setName("columns")columnSpec = ('*' | columnNameList)tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)tableNameList = Group(delimitedList(tableName)).setName("tables")simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables")# demo runTests method, including embedded comments in test stringsimpleSQL.runTests("""# '*' as column list and dotted table nameselect * from SYS.XYZZY# caseless match on "SELECT", and casts back to "select"SELECT * from XYZZY, ABC# list of column names, and mixed case SELECT keywordSelect AA,BB,CC from Sys.dual# multiple tablesSelect A, B, C from Sys.dual, Table2# invalid SELECT keyword - should failXelect A, B, C from Sys.dual# incomplete command - should failSelect# invalid column name - should failSelect ^^^ frox Sys.dual""")pyparsing_common.number.runTests("""100-100+1003.141596.02e231e-12""")# any int or real number, returned as floatpyparsing_common.fnumber.runTests("""100-100+1003.141596.02e231e-12""")pyparsing_common.hex_integer.runTests("""100FF""")import uuidpyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))pyparsing_common.uuid.runTests("""12345678-1234-5678-1234-567812345678""")
# -*- coding: utf-8 -*-# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>## Permission to use, copy, modify, and distribute this software for any# purpose with or without fee is hereby granted, provided that the above# copyright notice and this permission notice appear in all copies.## THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.from . import Infinitefrom .helpers import WriteMixinclass Spinner(WriteMixin, Infinite):message = ''phases = ('-', '\\', '|', '/')hide_cursor = Truedef update(self):i = self.index % len(self.phases)self.write(self.phases[i])class PieSpinner(Spinner):phases = [u'◷', u'◶', u'◵', u'◴']class MoonSpinner(Spinner):phases = [u'◑', u'◒', u'◐', u'◓']class LineSpinner(Spinner):phases = [u'⎺', u'⎻', u'⎼', u'⎽', u'⎼', u'⎻']
# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>## Permission to use, copy, modify, and distribute this software for any# purpose with or without fee is hereby granted, provided that the above# copyright notice and this permission notice appear in all copies.## THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.from __future__ import print_functionHIDE_CURSOR = '\x1b[?25l'SHOW_CURSOR = '\x1b[?25h'class WriteMixin(object):hide_cursor = Falsedef __init__(self, message=None, **kwargs):super(WriteMixin, self).__init__(**kwargs)self._width = 0if message:self.message = messageif self.file.isatty():if self.hide_cursor:print(HIDE_CURSOR, end='', file=self.file)print(self.message, end='', file=self.file)self.file.flush()def write(self, s):if self.file.isatty():b = '\b' * self._widthc = s.ljust(self._width)print(b + c, end='', file=self.file)self._width = max(self._width, len(s))self.file.flush()def finish(self):if self.file.isatty() and self.hide_cursor:print(SHOW_CURSOR, end='', file=self.file)class WritelnMixin(object):hide_cursor = Falsedef __init__(self, message=None, **kwargs):super(WritelnMixin, self).__init__(**kwargs)if message:self.message = messageif self.file.isatty() and self.hide_cursor:print(HIDE_CURSOR, end='', file=self.file)def clearln(self):if self.file.isatty():print('\r\x1b[K', end='', file=self.file)def writeln(self, line):if self.file.isatty():self.clearln()print(line, end='', file=self.file)self.file.flush()def finish(self):if self.file.isatty():print(file=self.file)if self.hide_cursor:print(SHOW_CURSOR, end='', file=self.file)from signal import signal, SIGINTfrom sys import exitclass SigIntMixin(object):"""Registers a signal handler that calls finish on SIGINT"""def __init__(self, *args, **kwargs):super(SigIntMixin, self).__init__(*args, **kwargs)signal(SIGINT, self._sigint_handler)def _sigint_handler(self, signum, frame):self.finish()exit(0)
# -*- coding: utf-8 -*-# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>## Permission to use, copy, modify, and distribute this software for any# purpose with or without fee is hereby granted, provided that the above# copyright notice and this permission notice appear in all copies.## THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.from . import Infinite, Progressfrom .helpers import WriteMixinclass Counter(WriteMixin, Infinite):message = ''hide_cursor = Truedef update(self):self.write(str(self.index))class Countdown(WriteMixin, Progress):hide_cursor = Truedef update(self):self.write(str(self.remaining))class Stack(WriteMixin, Progress):phases = (u' ', u'▁', u'▂', u'▃', u'▄', u'▅', u'▆', u'▇', u'█')hide_cursor = Truedef update(self):nphases = len(self.phases)i = min(nphases - 1, int(self.progress * nphases))self.write(self.phases[i])class Pie(Stack):phases = (u'○', u'◔', u'◑', u'◕', u'●')
# -*- coding: utf-8 -*-# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>## Permission to use, copy, modify, and distribute this software for any# purpose with or without fee is hereby granted, provided that the above# copyright notice and this permission notice appear in all copies.## THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.from . import Progressfrom .helpers import WritelnMixinclass Bar(WritelnMixin, Progress):width = 32message = ''suffix = '%(index)d/%(max)d'bar_prefix = ' |'bar_suffix = '| 'empty_fill = ' 'fill = '#'hide_cursor = Truedef update(self):filled_length = int(self.width * self.progress)empty_length = self.width - filled_lengthmessage = self.message % selfbar = self.fill * filled_lengthempty = self.empty_fill * empty_lengthsuffix = self.suffix % selfline = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix,suffix])self.writeln(line)class ChargingBar(Bar):suffix = '%(percent)d%%'bar_prefix = ' 'bar_suffix = ' 'empty_fill = u'∙'fill = u'█'class FillingSquaresBar(ChargingBar):empty_fill = u'▢'fill = u'▣'class FillingCirclesBar(ChargingBar):empty_fill = u'◯'fill = u'◉'class IncrementalBar(Bar):phases = (u' ', u'▏', u'▎', u'▍', u'▌', u'▋', u'▊', u'▉', u'█')def update(self):nphases = len(self.phases)expanded_length = int(nphases * self.width * self.progress)filled_length = int(self.width * self.progress)empty_length = self.width - filled_lengthphase = expanded_length - (filled_length * nphases)message = self.message % selfbar = self.phases[-1] * filled_lengthcurrent = self.phases[phase] if phase > 0 else ''empty = self.empty_fill * max(0, empty_length - len(current))suffix = self.suffix % selfline = ''.join([message, self.bar_prefix, bar, current, empty,self.bar_suffix, suffix])self.writeln(line)class ShadyBar(IncrementalBar):phases = (u' ', u'░', u'▒', u'▓', u'█')
# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>## Permission to use, copy, modify, and distribute this software for any# purpose with or without fee is hereby granted, provided that the above# copyright notice and this permission notice appear in all copies.## THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.from __future__ import divisionfrom collections import dequefrom datetime import timedeltafrom math import ceilfrom sys import stderrfrom time import time__version__ = '1.2'class Infinite(object):file = stderrsma_window = 10def __init__(self, *args, **kwargs):self.index = 0self.start_ts = time()self._ts = self.start_tsself._dt = deque(maxlen=self.sma_window)for key, val in kwargs.items():setattr(self, key, val)def __getitem__(self, key):if key.startswith('_'):return Nonereturn getattr(self, key, None)@propertydef avg(self):return sum(self._dt) / len(self._dt) if self._dt else 0@propertydef elapsed(self):return int(time() - self.start_ts)@propertydef elapsed_td(self):return timedelta(seconds=self.elapsed)def update(self):passdef start(self):passdef finish(self):passdef next(self, n=1):if n > 0:now = time()dt = (now - self._ts) / nself._dt.append(dt)self._ts = nowself.index = self.index + nself.update()def iter(self, it):for x in it:yield xself.next()self.finish()class Progress(Infinite):def __init__(self, *args, **kwargs):super(Progress, self).__init__(*args, **kwargs)self.max = kwargs.get('max', 100)@propertydef eta(self):return int(ceil(self.avg * self.remaining))@propertydef eta_td(self):return timedelta(seconds=self.eta)@propertydef percent(self):return self.progress * 100@propertydef progress(self):return min(1, self.index / self.max)@propertydef remaining(self):return max(self.max - self.index, 0)def start(self):self.update()def goto(self, index):incr = index - self.indexself.next(incr)def iter(self, it):try:self.max = len(it)except TypeError:passfor x in it:yield xself.next()self.finish()
# coding: utf-8"""Package resource API--------------------A resource is a logical file contained within a package, or a logicalsubdirectory thereof. The package resource API expects resource namesto have their path parts separated with ``/``, *not* whatever the localpath separator is. Do not use os.path operations to manipulate resourcenames being passed into the API.The package resource API is designed to work with normal filesystem packages,.egg files, and unpacked .egg files. It can also work in a limited way with.zip files and with custom PEP 302 loaders that support the ``get_data()``method."""from __future__ import absolute_importimport sysimport osimport ioimport timeimport reimport typesimport zipfileimport zipimportimport warningsimport statimport functoolsimport pkgutilimport operatorimport platformimport collectionsimport plistlibimport email.parserimport tempfileimport textwrapimport itertoolsfrom pkgutil import get_importertry:import _impexcept ImportError:# Python 3.2 compatibilityimport imp as _impfrom pip._vendor import sixfrom pip._vendor.six.moves import urllib, map, filter# capture these to bypass sandboxingfrom os import utimetry:from os import mkdir, rename, unlinkWRITE_SUPPORT = Trueexcept ImportError:# no write support, probably under GAEWRITE_SUPPORT = Falsefrom os import open as os_openfrom os.path import isdir, splittry:import importlib.machinery as importlib_machinery# access attribute to force import under delayed import mechanisms.importlib_machinery.__name__except ImportError:importlib_machinery = Nonefrom pip._vendor import appdirsfrom pip._vendor import packaging__import__('pip._vendor.packaging.version')__import__('pip._vendor.packaging.specifiers')__import__('pip._vendor.packaging.requirements')__import__('pip._vendor.packaging.markers')if (3, 0) < sys.version_info < (3, 3):msg = ("Support for Python 3.0-3.2 has been dropped. Future versions ""will fail here.")warnings.warn(msg)# declare some globals that will be defined later to# satisfy the linters.require = Noneworking_set = Noneclass PEP440Warning(RuntimeWarning):"""Used when there is an issue with a version or specifier not complying withPEP 440."""class _SetuptoolsVersionMixin(object):def __hash__(self):return super(_SetuptoolsVersionMixin, self).__hash__()def __lt__(self, other):if isinstance(other, tuple):return tuple(self) < otherelse:return super(_SetuptoolsVersionMixin, self).__lt__(other)def __le__(self, other):if isinstance(other, tuple):return tuple(self) <= otherelse:return super(_SetuptoolsVersionMixin, self).__le__(other)def __eq__(self, other):if isinstance(other, tuple):return tuple(self) == otherelse:return super(_SetuptoolsVersionMixin, self).__eq__(other)def __ge__(self, other):if isinstance(other, tuple):return tuple(self) >= otherelse:return super(_SetuptoolsVersionMixin, self).__ge__(other)def __gt__(self, other):if isinstance(other, tuple):return tuple(self) > otherelse:return super(_SetuptoolsVersionMixin, self).__gt__(other)def __ne__(self, other):if isinstance(other, tuple):return tuple(self) != otherelse:return super(_SetuptoolsVersionMixin, self).__ne__(other)def __getitem__(self, key):return tuple(self)[key]def __iter__(self):component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)replace = {'pre': 'c','preview': 'c','-': 'final-','rc': 'c','dev': '@',}.getdef _parse_version_parts(s):for part in component_re.split(s):part = replace(part, part)if not part or part == '.':continueif part[:1] in '0123456789':# pad for numeric comparisonyield part.zfill(8)else:yield '*' + part# ensure that alpha/beta/candidate are before finalyield '*final'def old_parse_version(s):parts = []for part in _parse_version_parts(s.lower()):if part.startswith('*'):# remove '-' before a prerelease tagif part < '*final':while parts and parts[-1] == '*final-':parts.pop()# remove trailing zeros from each series of numeric partswhile parts and parts[-1] == '00000000':parts.pop()parts.append(part)return tuple(parts)# Warn for use of this functionwarnings.warn("You have iterated over the result of ""pkg_resources.parse_version. This is a legacy behavior which is ""inconsistent with the new version class introduced in setuptools ""8.0. In most cases, conversion to a tuple is unnecessary. For ""comparison of versions, sort the Version instances directly. If ""you have another use case requiring the tuple, please file a ""bug with the setuptools project describing that need.",RuntimeWarning,stacklevel=1,)for part in old_parse_version(str(self)):yield partclass SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):passclass SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,packaging.version.LegacyVersion):passdef parse_version(v):try:return SetuptoolsVersion(v)except packaging.version.InvalidVersion:return SetuptoolsLegacyVersion(v)_state_vars = {}def _declare_state(vartype, **kw):globals().update(kw)_state_vars.update(dict.fromkeys(kw, vartype))def __getstate__():state = {}g = globals()for k, v in _state_vars.items():state[k] = g['_sget_' + v](g[k])return statedef __setstate__(state):g = globals()for k, v in state.items():g['_sset_' + _state_vars[k]](k, g[k], v)return statedef _sget_dict(val):return val.copy()def _sset_dict(key, ob, state):ob.clear()ob.update(state)def _sget_object(val):return val.__getstate__()def _sset_object(key, ob, state):ob.__setstate__(state)_sget_none = _sset_none = lambda *args: Nonedef get_supported_platform():"""Return this platform's maximum compatible version.distutils.util.get_platform() normally reports the minimum versionof Mac OS X that would be required to *use* extensions produced bydistutils. But what we want when checking compatibility is to know theversion of Mac OS X that we are *running*. To allow usage of packages thatexplicitly require a newer version of Mac OS X, we must also know thecurrent version of the OS.If this condition occurs for any other platform with a version in itsplatform strings, this function should be extended accordingly."""plat = get_build_platform()m = macosVersionString.match(plat)if m is not None and sys.platform == "darwin":try:plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))except ValueError:# not Mac OS Xpassreturn plat__all__ = [# Basic resource access and distribution/entry point discovery'require', 'run_script', 'get_provider', 'get_distribution','load_entry_point', 'get_entry_map', 'get_entry_info','iter_entry_points','resource_string', 'resource_stream', 'resource_filename','resource_listdir', 'resource_exists', 'resource_isdir',# Environmental control'declare_namespace', 'working_set', 'add_activation_listener','find_distributions', 'set_extraction_path', 'cleanup_resources','get_default_cache',# Primary implementation classes'Environment', 'WorkingSet', 'ResourceManager','Distribution', 'Requirement', 'EntryPoint',# Exceptions'ResolutionError', 'VersionConflict', 'DistributionNotFound','UnknownExtra', 'ExtractionError',# Warnings'PEP440Warning',# Parsing functions and string utilities'parse_requirements', 'parse_version', 'safe_name', 'safe_version','get_platform', 'compatible_platforms', 'yield_lines', 'split_sections','safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',# filesystem utilities'ensure_directory', 'normalize_path',# Distribution "precedence" constants'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',# "Provider" interfaces, implementations, and registration/lookup APIs'IMetadataProvider', 'IResourceProvider', 'FileMetadata','PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider','NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider','register_finder', 'register_namespace_handler', 'register_loader_type','fixup_namespace_packages', 'get_importer',# Deprecated/backward compatibility only'run_main', 'AvailableDistributions',]class ResolutionError(Exception):"""Abstract base for dependency resolution errors"""def __repr__(self):return self.__class__.__name__ + repr(self.args)class VersionConflict(ResolutionError):"""An already-installed version conflicts with the requested version.Should be initialized with the installed Distribution and the requestedRequirement."""_template = "{self.dist} is installed but {self.req} is required"@propertydef dist(self):return self.args[0]@propertydef req(self):return self.args[1]def report(self):return self._template.format(**locals())def with_context(self, required_by):"""If required_by is non-empty, return a version of self that is aContextualVersionConflict."""if not required_by:return selfargs = self.args + (required_by,)return ContextualVersionConflict(*args)class ContextualVersionConflict(VersionConflict):"""A VersionConflict that accepts a third parameter, the set of therequirements that required the installed Distribution."""_template = VersionConflict._template + ' by {self.required_by}'@propertydef required_by(self):return self.args[2]class DistributionNotFound(ResolutionError):"""A requested distribution was not found"""_template = ("The '{self.req}' distribution was not found ""and is required by {self.requirers_str}")@propertydef req(self):return self.args[0]@propertydef requirers(self):return self.args[1]@propertydef requirers_str(self):if not self.requirers:return 'the application'return ', '.join(self.requirers)def report(self):return self._template.format(**locals())def __str__(self):return self.report()class UnknownExtra(ResolutionError):"""Distribution doesn't have an "extra feature" of the given name"""_provider_factories = {}PY_MAJOR = sys.version[:3]EGG_DIST = 3BINARY_DIST = 2SOURCE_DIST = 1CHECKOUT_DIST = 0DEVELOP_DIST = -1def register_loader_type(loader_type, provider_factory):"""Register `provider_factory` to make providers for `loader_type``loader_type` is the type or class of a PEP 302 ``module.__loader__``,and `provider_factory` is a function that, passed a *module* object,returns an ``IResourceProvider`` for that module."""_provider_factories[loader_type] = provider_factorydef get_provider(moduleOrReq):"""Return an IResourceProvider for the named module or requirement"""if isinstance(moduleOrReq, Requirement):return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]try:module = sys.modules[moduleOrReq]except KeyError:__import__(moduleOrReq)module = sys.modules[moduleOrReq]loader = getattr(module, '__loader__', None)return _find_adapter(_provider_factories, loader)(module)def _macosx_vers(_cache=[]):if not _cache:version = platform.mac_ver()[0]# fallback for MacPortsif version == '':plist = '/System/Library/CoreServices/SystemVersion.plist'if os.path.exists(plist):if hasattr(plistlib, 'readPlist'):plist_content = plistlib.readPlist(plist)if 'ProductVersion' in plist_content:version = plist_content['ProductVersion']_cache.append(version.split('.'))return _cache[0]def _macosx_arch(machine):return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)def get_build_platform():"""Return this platform's string for platform-specific distributionsXXX Currently this is the same as ``distutils.util.get_platform()``, but itneeds some hacks for Linux and Mac OS X."""try:# Python 2.7 or >=3.2from sysconfig import get_platformexcept ImportError:from distutils.util import get_platformplat = get_platform()if sys.platform == "darwin" and not plat.startswith('macosx-'):try:version = _macosx_vers()machine = os.uname()[4].replace(" ", "_")return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),_macosx_arch(machine))except ValueError:# if someone is running a non-Mac darwin system, this will fall# through to the default implementationpassreturn platmacosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")# XXX backward compatget_platform = get_build_platformdef compatible_platforms(provided, required):"""Can code for the `provided` platform run on the `required` platform?Returns true if either platform is ``None``, or the platforms are equal.XXX Needs compatibility checks for Linux and other unixy OSes."""if provided is None or required is None or provided == required:# easy casereturn True# Mac OS X special casesreqMac = macosVersionString.match(required)if reqMac:provMac = macosVersionString.match(provided)# is this a Mac package?if not provMac:# this is backwards compatibility for packages built before# setuptools 0.6. All packages built after this point will# use the new macosx designation.provDarwin = darwinVersionString.match(provided)if provDarwin:dversion = int(provDarwin.group(1))macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))if dversion == 7 and macosversion >= "10.3" or \dversion == 8 and macosversion >= "10.4":return True# egg isn't macosx or legacy darwinreturn False# are they the same major version and machine type?if provMac.group(1) != reqMac.group(1) or \provMac.group(3) != reqMac.group(3):return False# is the required OS major update >= the provided one?if int(provMac.group(2)) > int(reqMac.group(2)):return Falsereturn True# XXX Linux and other platforms' special cases should go herereturn Falsedef run_script(dist_spec, script_name):"""Locate distribution `dist_spec` and run its `script_name` script"""ns = sys._getframe(1).f_globalsname = ns['__name__']ns.clear()ns['__name__'] = namerequire(dist_spec)[0].run_script(script_name, ns)# backward compatibilityrun_main = run_scriptdef get_distribution(dist):"""Return a current distribution object for a Requirement or string"""if isinstance(dist, six.string_types):dist = Requirement.parse(dist)if isinstance(dist, Requirement):dist = get_provider(dist)if not isinstance(dist, Distribution):raise TypeError("Expected string, Requirement, or Distribution", dist)return distdef load_entry_point(dist, group, name):"""Return `name` entry point of `group` for `dist` or raise ImportError"""return get_distribution(dist).load_entry_point(group, name)def get_entry_map(dist, group=None):"""Return the entry point map for `group`, or the full entry map"""return get_distribution(dist).get_entry_map(group)def get_entry_info(dist, group, name):"""Return the EntryPoint object for `group`+`name`, or ``None``"""return get_distribution(dist).get_entry_info(group, name)class IMetadataProvider:def has_metadata(name):"""Does the package's distribution contain the named metadata?"""def get_metadata(name):"""The named metadata resource as a string"""def get_metadata_lines(name):"""Yield named metadata resource as list of non-blank non-comment linesLeading and trailing whitespace is stripped from each line, and lineswith ``#`` as the first non-blank character are omitted."""def metadata_isdir(name):"""Is the named metadata a directory? (like ``os.path.isdir()``)"""def metadata_listdir(name):"""List of metadata names in the directory (like ``os.listdir()``)"""def run_script(script_name, namespace):"""Execute the named script in the supplied namespace dictionary"""class IResourceProvider(IMetadataProvider):"""An object that provides access to package resources"""def get_resource_filename(manager, resource_name):"""Return a true filesystem path for `resource_name``manager` must be an ``IResourceManager``"""def get_resource_stream(manager, resource_name):"""Return a readable file-like object for `resource_name``manager` must be an ``IResourceManager``"""def get_resource_string(manager, resource_name):"""Return a string containing the contents of `resource_name``manager` must be an ``IResourceManager``"""def has_resource(resource_name):"""Does the package contain the named resource?"""def resource_isdir(resource_name):"""Is the named resource a directory? (like ``os.path.isdir()``)"""def resource_listdir(resource_name):"""List of resource names in the directory (like ``os.listdir()``)"""class WorkingSet(object):"""A collection of active distributions on sys.path (or a similar list)"""def __init__(self, entries=None):"""Create working set from list of path entries (default=sys.path)"""self.entries = []self.entry_keys = {}self.by_key = {}self.callbacks = []if entries is None:entries = sys.pathfor entry in entries:self.add_entry(entry)@classmethoddef _build_master(cls):"""Prepare the master working set."""ws = cls()try:from __main__ import __requires__except ImportError:# The main program does not list any requirementsreturn ws# ensure the requirements are mettry:ws.require(__requires__)except VersionConflict:return cls._build_from_requirements(__requires__)return ws@classmethoddef _build_from_requirements(cls, req_spec):"""Build a working set from a requirement spec. Rewrites sys.path."""# try it without defaults already on sys.path# by starting with an empty pathws = cls([])reqs = parse_requirements(req_spec)dists = ws.resolve(reqs, Environment())for dist in dists:ws.add(dist)# add any missing entries from sys.pathfor entry in sys.path:if entry not in ws.entries:ws.add_entry(entry)# then copy back to sys.pathsys.path[:] = ws.entriesreturn wsdef add_entry(self, entry):"""Add a path item to ``.entries``, finding any distributions on it``find_distributions(entry, True)`` is used to find distributionscorresponding to the path entry, and they are added. `entry` isalways appended to ``.entries``, even if it is already present.(This is because ``sys.path`` can contain the same value more thanonce, and the ``.entries`` of the ``sys.path`` WorkingSet should alwaysequal ``sys.path``.)"""self.entry_keys.setdefault(entry, [])self.entries.append(entry)for dist in find_distributions(entry, True):self.add(dist, entry, False)def __contains__(self, dist):"""True if `dist` is the active distribution for its project"""return self.by_key.get(dist.key) == distdef find(self, req):"""Find a distribution matching requirement `req`If there is an active distribution for the requested project, thisreturns it as long as it meets the version requirement specified by`req`. But, if there is an active distribution for the project and itdoes *not* meet the `req` requirement, ``VersionConflict`` is raised.If there is no active distribution for the requested project, ``None``is returned."""dist = self.by_key.get(req.key)if dist is not None and dist not in req:# XXX add more inforaise VersionConflict(dist, req)return distdef iter_entry_points(self, group, name=None):"""Yield entry point objects from `group` matching `name`If `name` is None, yields all entry points in `group` from alldistributions in the working set, otherwise only ones matchingboth `group` and `name` are yielded (in distribution order)."""for dist in self:entries = dist.get_entry_map(group)if name is None:for ep in entries.values():yield epelif name in entries:yield entries[name]def run_script(self, requires, script_name):"""Locate distribution for `requires` and run `script_name` script"""ns = sys._getframe(1).f_globalsname = ns['__name__']ns.clear()ns['__name__'] = nameself.require(requires)[0].run_script(script_name, ns)def __iter__(self):"""Yield distributions for non-duplicate projects in the working setThe yield order is the order in which the items' path entries wereadded to the working set."""seen = {}for item in self.entries:if item not in self.entry_keys:# workaround a cache issuecontinuefor key in self.entry_keys[item]:if key not in seen:seen[key] = 1yield self.by_key[key]def add(self, dist, entry=None, insert=True, replace=False):"""Add `dist` to working set, associated with `entry`If `entry` is unspecified, it defaults to the ``.location`` of `dist`.On exit from this routine, `entry` is added to the end of the workingset's ``.entries`` (if it wasn't already present).`dist` is only added to the working set if it's for a project thatdoesn't already have a distribution in the set, unless `replace=True`.If it's added, any callbacks registered with the ``subscribe()`` methodwill be called."""if insert:dist.insert_on(self.entries, entry, replace=replace)if entry is None:entry = dist.locationkeys = self.entry_keys.setdefault(entry, [])keys2 = self.entry_keys.setdefault(dist.location, [])if not replace and dist.key in self.by_key:# ignore hidden distrosreturnself.by_key[dist.key] = distif dist.key not in keys:keys.append(dist.key)if dist.key not in keys2:keys2.append(dist.key)self._added_new(dist)def resolve(self, requirements, env=None, installer=None,replace_conflicting=False):"""List all distributions needed to (recursively) meet `requirements``requirements` must be a sequence of ``Requirement`` objects. `env`,if supplied, should be an ``Environment`` instance. Ifnot supplied, it defaults to all distributions available within anyentry or distribution in the working set. `installer`, if supplied,will be invoked with each requirement that cannot be met by analready-installed distribution; it should return a ``Distribution`` or``None``.Unless `replace_conflicting=True`, raises a VersionConflict exception ifany requirements are found on the path that have the correct name butthe wrong version. Otherwise, if an `installer` is supplied it will beinvoked to obtain the correct version of the requirement and activateit."""# set up the stackrequirements = list(requirements)[::-1]# set of processed requirementsprocessed = {}# key -> distbest = {}to_activate = []req_extras = _ReqExtras()# Mapping of requirement to set of distributions that required it;# useful for reporting info about conflicts.required_by = collections.defaultdict(set)while requirements:# process dependencies breadth-firstreq = requirements.pop(0)if req in processed:# Ignore cyclic or redundant dependenciescontinueif not req_extras.markers_pass(req):continuedist = best.get(req.key)if dist is None:# Find the best distribution and add it to the mapdist = self.by_key.get(req.key)if dist is None or (dist not in req and replace_conflicting):ws = selfif env is None:if dist is None:env = Environment(self.entries)else:# Use an empty environment and workingset to avoid# any further conflicts with the conflicting# distributionenv = Environment([])ws = WorkingSet([])dist = best[req.key] = env.best_match(req, ws, installer)if dist is None:requirers = required_by.get(req, None)raise DistributionNotFound(req, requirers)to_activate.append(dist)if dist not in req:# Oops, the "best" so far conflicts with a dependencydependent_req = required_by[req]raise VersionConflict(dist, req).with_context(dependent_req)# push the new requirements onto the stacknew_requirements = dist.requires(req.extras)[::-1]requirements.extend(new_requirements)# Register the new requirements needed by reqfor new_requirement in new_requirements:required_by[new_requirement].add(req.project_name)req_extras[new_requirement] = req.extrasprocessed[req] = True# return list of distros to activatereturn to_activatedef find_plugins(self, plugin_env, full_env=None, installer=None,fallback=True):"""Find all activatable distributions in `plugin_env`Example usage::distributions, errors = working_set.find_plugins(Environment(plugin_dirlist))# add plugins+libs to sys.pathmap(working_set.add, distributions)# display errorsprint('Could not load', errors)The `plugin_env` should be an ``Environment`` instance that containsonly distributions that are in the project's "plugin directory" ordirectories. The `full_env`, if supplied, should be an ``Environment``contains all currently-available distributions. If `full_env` is notsupplied, one is created automatically from the ``WorkingSet`` thismethod is called on, which will typically mean that every directory on``sys.path`` will be scanned for distributions.`installer` is a standard installer callback as used by the``resolve()`` method. The `fallback` flag indicates whether we shouldattempt to resolve older versions of a plugin if the newest versioncannot be resolved.This method returns a 2-tuple: (`distributions`, `error_info`), where`distributions` is a list of the distributions found in `plugin_env`that were loadable, along with any other distributions that are neededto resolve their dependencies. `error_info` is a dictionary mappingunloadable plugin distributions to an exception instance describing theerror that occurred. Usually this will be a ``DistributionNotFound`` or``VersionConflict`` instance."""plugin_projects = list(plugin_env)# scan project names in alphabetic orderplugin_projects.sort()error_info = {}distributions = {}if full_env is None:env = Environment(self.entries)env += plugin_envelse:env = full_env + plugin_envshadow_set = self.__class__([])# put all our entries in shadow_setlist(map(shadow_set.add, self))for project_name in plugin_projects:for dist in plugin_env[project_name]:req = [dist.as_requirement()]try:resolvees = shadow_set.resolve(req, env, installer)except ResolutionError as v:# save error infoerror_info[dist] = vif fallback:# try the next older version of projectcontinueelse:# give up on this project, keep goingbreakelse:list(map(shadow_set.add, resolvees))distributions.update(dict.fromkeys(resolvees))# success, no need to try any more versions of this projectbreakdistributions = list(distributions)distributions.sort()return distributions, error_infodef require(self, *requirements):"""Ensure that distributions matching `requirements` are activated`requirements` must be a string or a (possibly-nested) sequencethereof, specifying the distributions and versions required. Thereturn value is a sequence of the distributions that needed to beactivated to fulfill the requirements; all relevant distributions areincluded, even if they were already activated in this working set."""needed = self.resolve(parse_requirements(requirements))for dist in needed:self.add(dist)return neededdef subscribe(self, callback, existing=True):"""Invoke `callback` for all distributionsIf `existing=True` (default),call on all existing ones, as well."""if callback in self.callbacks:returnself.callbacks.append(callback)if not existing:returnfor dist in self:callback(dist)def _added_new(self, dist):for callback in self.callbacks:callback(dist)def __getstate__(self):return (self.entries[:], self.entry_keys.copy(), self.by_key.copy(),self.callbacks[:])def __setstate__(self, e_k_b_c):entries, keys, by_key, callbacks = e_k_b_cself.entries = entries[:]self.entry_keys = keys.copy()self.by_key = by_key.copy()self.callbacks = callbacks[:]class _ReqExtras(dict):"""Map each requirement to the extras that demanded it."""def markers_pass(self, req):"""Evaluate markers for req against each extra thatdemanded it.Return False if the req has a marker and failsevaluation. Otherwise, return True."""extra_evals = (req.marker.evaluate({'extra': extra})for extra in self.get(req, ()) + (None,))return not req.marker or any(extra_evals)class Environment(object):"""Searchable snapshot of distributions on a search path"""def __init__(self, search_path=None, platform=get_supported_platform(),python=PY_MAJOR):"""Snapshot distributions available on a search pathAny distributions found on `search_path` are added to the environment.`search_path` should be a sequence of ``sys.path`` items. If notsupplied, ``sys.path`` is used.`platform` is an optional string specifying the name of the platformthat platform-specific distributions must be compatible with. Ifunspecified, it defaults to the current platform. `python` is anoptional string naming the desired version of Python (e.g. ``'3.3'``);it defaults to the current version.You may explicitly set `platform` (and/or `python`) to ``None`` if youwish to map *all* distributions, not just those compatible with therunning platform or Python version."""self._distmap = {}self.platform = platformself.python = pythonself.scan(search_path)def can_add(self, dist):"""Is distribution `dist` acceptable for this environment?The distribution must match the platform and python versionrequirements specified when this environment was created, or Falseis returned."""return (self.python is None or dist.py_version is Noneor dist.py_version == self.python) \and compatible_platforms(dist.platform, self.platform)def remove(self, dist):"""Remove `dist` from the environment"""self._distmap[dist.key].remove(dist)def scan(self, search_path=None):"""Scan `search_path` for distributions usable in this environmentAny distributions found are added to the environment.`search_path` should be a sequence of ``sys.path`` items. If notsupplied, ``sys.path`` is used. Only distributions conforming tothe platform/python version defined at initialization are added."""if search_path is None:search_path = sys.pathfor item in search_path:for dist in find_distributions(item):self.add(dist)def __getitem__(self, project_name):"""Return a newest-to-oldest list of distributions for `project_name`Uses case-insensitive `project_name` comparison, assuming all theproject's distributions use their project's name converted to alllowercase as their key."""distribution_key = project_name.lower()return self._distmap.get(distribution_key, [])def add(self, dist):"""Add `dist` if we ``can_add()`` it and it has not already been added"""if self.can_add(dist) and dist.has_version():dists = self._distmap.setdefault(dist.key, [])if dist not in dists:dists.append(dist)dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)def best_match(self, req, working_set, installer=None):"""Find distribution best matching `req` and usable on `working_set`This calls the ``find(req)`` method of the `working_set` to see if asuitable distribution is already active. (This may raise``VersionConflict`` if an unsuitable version of the project is alreadyactive in the specified `working_set`.) If a suitable distributionisn't active, this method returns the newest distribution in theenvironment that meets the ``Requirement`` in `req`. If no suitabledistribution is found, and `installer` is supplied, then the result ofcalling the environment's ``obtain(req, installer)`` method will bereturned."""dist = working_set.find(req)if dist is not None:return distfor dist in self[req.key]:if dist in req:return dist# try to download/installreturn self.obtain(req, installer)def obtain(self, requirement, installer=None):"""Obtain a distribution matching `requirement` (e.g. via download)Obtain a distro that matches requirement (e.g. via download). In thebase ``Environment`` class, this routine just returns``installer(requirement)``, unless `installer` is None, in which caseNone is returned instead. This method is a hook that allows subclassesto attempt other ways of obtaining a distribution before falling backto the `installer` argument."""if installer is not None:return installer(requirement)def __iter__(self):"""Yield the unique project names of the available distributions"""for key in self._distmap.keys():if self[key]:yield keydef __iadd__(self, other):"""In-place addition of a distribution or environment"""if isinstance(other, Distribution):self.add(other)elif isinstance(other, Environment):for project in other:for dist in other[project]:self.add(dist)else:raise TypeError("Can't add %r to environment" % (other,))return selfdef __add__(self, other):"""Add an environment or distribution to an environment"""new = self.__class__([], platform=None, python=None)for env in self, other:new += envreturn new# XXX backward compatibilityAvailableDistributions = Environmentclass ExtractionError(RuntimeError):"""An error occurred extracting a resourceThe following attributes are available from instances of this exception:managerThe resource manager that raised this exceptioncache_pathThe base directory for resource extractionoriginal_errorThe exception instance that caused extraction to fail"""class ResourceManager:"""Manage resource extraction and packages"""extraction_path = Nonedef __init__(self):self.cached_files = {}def resource_exists(self, package_or_requirement, resource_name):"""Does the named resource exist?"""return get_provider(package_or_requirement).has_resource(resource_name)def resource_isdir(self, package_or_requirement, resource_name):"""Is the named resource an existing directory?"""return get_provider(package_or_requirement).resource_isdir(resource_name)def resource_filename(self, package_or_requirement, resource_name):"""Return a true filesystem path for specified resource"""return get_provider(package_or_requirement).get_resource_filename(self, resource_name)def resource_stream(self, package_or_requirement, resource_name):"""Return a readable file-like object for specified resource"""return get_provider(package_or_requirement).get_resource_stream(self, resource_name)def resource_string(self, package_or_requirement, resource_name):"""Return specified resource as a string"""return get_provider(package_or_requirement).get_resource_string(self, resource_name)def resource_listdir(self, package_or_requirement, resource_name):"""List the contents of the named resource directory"""return get_provider(package_or_requirement).resource_listdir(resource_name)def extraction_error(self):"""Give an error message for problems extracting file(s)"""old_exc = sys.exc_info()[1]cache_path = self.extraction_path or get_default_cache()tmpl = textwrap.dedent("""Can't extract file(s) to egg cacheThe following error occurred while trying to extract file(s) to the Python eggcache:{old_exc}The Python egg cache directory is currently set to:{cache_path}Perhaps your account does not have write access to this directory? You canchange the cache directory by setting the PYTHON_EGG_CACHE environmentvariable to point to an accessible directory.""").lstrip()err = ExtractionError(tmpl.format(**locals()))err.manager = selferr.cache_path = cache_patherr.original_error = old_excraise errdef get_cache_path(self, archive_name, names=()):"""Return absolute location in cache for `archive_name` and `names`The parent directory of the resulting path will be created if it doesnot already exist. `archive_name` should be the base filename of theenclosing egg (which may not be the name of the enclosing zipfile!),including its ".egg" extension. `names`, if provided, should be asequence of path name parts "under" the egg's extraction location.This method should only be called by resource providers that need toobtain an extraction location, and only for names they intend toextract, as it tracks the generated names for possible cleanup later."""extract_path = self.extraction_path or get_default_cache()target_path = os.path.join(extract_path, archive_name + '-tmp', *names)try:_bypass_ensure_directory(target_path)except:self.extraction_error()self._warn_unsafe_extraction_path(extract_path)self.cached_files[target_path] = 1return target_path@staticmethoddef _warn_unsafe_extraction_path(path):"""If the default extraction path is overridden and set to an insecurelocation, such as /tmp, it opens up an opportunity for an attacker toreplace an extracted file with an unauthorized payload. Warn the userif a known insecure location is used.See Distribute #375 for more details."""if os.name == 'nt' and not path.startswith(os.environ['windir']):# On Windows, permissions are generally restrictive by default# and temp directories are not writable by other users, so# bypass the warning.returnmode = os.stat(path).st_modeif mode & stat.S_IWOTH or mode & stat.S_IWGRP:msg = ("%s is writable by group/others and vulnerable to attack ""when ""used with get_resource_filename. Consider a more secure ""location (set with .set_extraction_path or the ""PYTHON_EGG_CACHE environment variable)." % path)warnings.warn(msg, UserWarning)def postprocess(self, tempname, filename):"""Perform any platform-specific postprocessing of `tempname`This is where Mac header rewrites should be done; other platforms don'thave anything special they should do.Resource providers should call this method ONLY after successfullyextracting a compressed resource. They must NOT call it on resourcesthat are already in the filesystem.`tempname` is the current (temporary) name of the file, and `filename`is the name it will be renamed to by the caller after this routinereturns."""if os.name == 'posix':# Make the resource executablemode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777os.chmod(tempname, mode)def set_extraction_path(self, path):"""Set the base path where resources will be extracted to, if needed.If you do not call this routine before any extractions take place, thepath defaults to the return value of ``get_default_cache()``. (Whichis based on the ``PYTHON_EGG_CACHE`` environment variable, with variousplatform-specific fallbacks. See that routine's documentation for moredetails.)Resources are extracted to subdirectories of this path based uponinformation given by the ``IResourceProvider``. You may set this to atemporary directory, but then you must call ``cleanup_resources()`` todelete the extracted files when done. There is no guarantee that``cleanup_resources()`` will be able to remove all extracted files.(Note: you may not change the extraction path for a given resourcemanager once resources have been extracted, unless you first call``cleanup_resources()``.)"""if self.cached_files:raise ValueError("Can't change extraction path, files already extracted")self.extraction_path = pathdef cleanup_resources(self, force=False):"""Delete all extracted resource files and directories, returning a listof the file and directory names that could not be successfully removed.This function does not have any concurrency protection, so it shouldgenerally only be called when the extraction path is a temporarydirectory exclusive to a single process. This method is notautomatically called; you must call it explicitly or register it as an``atexit`` function if you wish to ensure cleanup of a temporarydirectory used for extractions."""# XXXdef get_default_cache():"""Return the ``PYTHON_EGG_CACHE`` environment variableor a platform-relevant user cache dir for an appnamed "Python-Eggs"."""return (os.environ.get('PYTHON_EGG_CACHE')or appdirs.user_cache_dir(appname='Python-Eggs'))def safe_name(name):"""Convert an arbitrary string to a standard distribution nameAny runs of non-alphanumeric/. characters are replaced with a single '-'."""return re.sub('[^A-Za-z0-9.]+', '-', name)def safe_version(version):"""Convert an arbitrary string to a standard version string"""try:# normalize the versionreturn str(packaging.version.Version(version))except packaging.version.InvalidVersion:version = version.replace(' ', '.')return re.sub('[^A-Za-z0-9.]+', '-', version)def safe_extra(extra):"""Convert an arbitrary string to a standard 'extra' nameAny runs of non-alphanumeric characters are replaced with a single '_',and the result is always lowercased."""return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()def to_filename(name):"""Convert a project or version name to its filename-escaped formAny '-' characters are currently replaced with '_'."""return name.replace('-', '_')def invalid_marker(text):"""Validate text as a PEP 508 environment marker; return an exceptionif invalid or False otherwise."""try:evaluate_marker(text)except SyntaxError as e:e.filename = Nonee.lineno = Nonereturn ereturn Falsedef evaluate_marker(text, extra=None):"""Evaluate a PEP 508 environment marker.Return a boolean indicating the marker result in this environment.Raise SyntaxError if marker is invalid.This implementation uses the 'pyparsing' module."""try:marker = packaging.markers.Marker(text)return marker.evaluate()except packaging.markers.InvalidMarker as e:raise SyntaxError(e)class NullProvider:"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""egg_name = Noneegg_info = Noneloader = Nonedef __init__(self, module):self.loader = getattr(module, '__loader__', None)self.module_path = os.path.dirname(getattr(module, '__file__', ''))def get_resource_filename(self, manager, resource_name):return self._fn(self.module_path, resource_name)def get_resource_stream(self, manager, resource_name):return io.BytesIO(self.get_resource_string(manager, resource_name))def get_resource_string(self, manager, resource_name):return self._get(self._fn(self.module_path, resource_name))def has_resource(self, resource_name):return self._has(self._fn(self.module_path, resource_name))def has_metadata(self, name):return self.egg_info and self._has(self._fn(self.egg_info, name))def get_metadata(self, name):if not self.egg_info:return ""value = self._get(self._fn(self.egg_info, name))return value.decode('utf-8') if six.PY3 else valuedef get_metadata_lines(self, name):return yield_lines(self.get_metadata(name))def resource_isdir(self, resource_name):return self._isdir(self._fn(self.module_path, resource_name))def metadata_isdir(self, name):return self.egg_info and self._isdir(self._fn(self.egg_info, name))def resource_listdir(self, resource_name):return self._listdir(self._fn(self.module_path, resource_name))def metadata_listdir(self, name):if self.egg_info:return self._listdir(self._fn(self.egg_info, name))return []def run_script(self, script_name, namespace):script = 'scripts/' + script_nameif not self.has_metadata(script):raise ResolutionError("No script named %r" % script_name)script_text = self.get_metadata(script).replace('\r\n', '\n')script_text = script_text.replace('\r', '\n')script_filename = self._fn(self.egg_info, script)namespace['__file__'] = script_filenameif os.path.exists(script_filename):source = open(script_filename).read()code = compile(source, script_filename, 'exec')exec(code, namespace, namespace)else:from linecache import cachecache[script_filename] = (len(script_text), 0, script_text.split('\n'), script_filename)script_code = compile(script_text, script_filename, 'exec')exec(script_code, namespace, namespace)def _has(self, path):raise NotImplementedError("Can't perform this operation for unregistered loader type")def _isdir(self, path):raise NotImplementedError("Can't perform this operation for unregistered loader type")def _listdir(self, path):raise NotImplementedError("Can't perform this operation for unregistered loader type")def _fn(self, base, resource_name):if resource_name:return os.path.join(base, *resource_name.split('/'))return basedef _get(self, path):if hasattr(self.loader, 'get_data'):return self.loader.get_data(path)raise NotImplementedError("Can't perform this operation for loaders without 'get_data()'")register_loader_type(object, NullProvider)class EggProvider(NullProvider):"""Provider based on a virtual filesystem"""def __init__(self, module):NullProvider.__init__(self, module)self._setup_prefix()def _setup_prefix(self):# we assume here that our metadata may be nested inside a "basket"# of multiple eggs; that's why we use module_path instead of .archivepath = self.module_pathold = Nonewhile path != old:if _is_unpacked_egg(path):self.egg_name = os.path.basename(path)self.egg_info = os.path.join(path, 'EGG-INFO')self.egg_root = pathbreakold = pathpath, base = os.path.split(path)class DefaultProvider(EggProvider):"""Provides access to package resources in the filesystem"""def _has(self, path):return os.path.exists(path)def _isdir(self, path):return os.path.isdir(path)def _listdir(self, path):return os.listdir(path)def get_resource_stream(self, manager, resource_name):return open(self._fn(self.module_path, resource_name), 'rb')def _get(self, path):with open(path, 'rb') as stream:return stream.read()@classmethoddef _register(cls):loader_cls = getattr(importlib_machinery, 'SourceFileLoader',type(None))register_loader_type(loader_cls, cls)DefaultProvider._register()class EmptyProvider(NullProvider):"""Provider that returns nothing for all requests"""_isdir = _has = lambda self, path: False_get = lambda self, path: ''_listdir = lambda self, path: []module_path = Nonedef __init__(self):passempty_provider = EmptyProvider()class ZipManifests(dict):"""zip manifest builder"""@classmethoddef build(cls, path):"""Build a dictionary similar to the zipimport directorycaches, except instead of tuples, store ZipInfo objects.Use a platform-specific path separator (os.sep) for the path keysfor compatibility with pypy on Windows."""with ContextualZipFile(path) as zfile:items = ((name.replace('/', os.sep),zfile.getinfo(name),)for name in zfile.namelist())return dict(items)load = buildclass MemoizedZipManifests(ZipManifests):"""Memoized zipfile manifests."""manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')def load(self, path):"""Load a manifest at path or return a suitable manifest already loaded."""path = os.path.normpath(path)mtime = os.stat(path).st_mtimeif path not in self or self[path].mtime != mtime:manifest = self.build(path)self[path] = self.manifest_mod(manifest, mtime)return self[path].manifestclass ContextualZipFile(zipfile.ZipFile):"""Supplement ZipFile class to support context manager for Python 2.6"""def __enter__(self):return selfdef __exit__(self, type, value, traceback):self.close()def __new__(cls, *args, **kwargs):"""Construct a ZipFile or ContextualZipFile as appropriate"""if hasattr(zipfile.ZipFile, '__exit__'):return zipfile.ZipFile(*args, **kwargs)return super(ContextualZipFile, cls).__new__(cls)class ZipProvider(EggProvider):"""Resource support for zips and eggs"""eagers = None_zip_manifests = MemoizedZipManifests()def __init__(self, module):EggProvider.__init__(self, module)self.zip_pre = self.loader.archive + os.sepdef _zipinfo_name(self, fspath):# Convert a virtual filename (full path to file) into a zipfile subpath# usable with the zipimport directory cache for our target archiveif fspath.startswith(self.zip_pre):return fspath[len(self.zip_pre):]raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre))def _parts(self, zip_path):# Convert a zipfile subpath into an egg-relative path part list.# pseudo-fs pathfspath = self.zip_pre + zip_pathif fspath.startswith(self.egg_root + os.sep):return fspath[len(self.egg_root) + 1:].split(os.sep)raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root))@propertydef zipinfo(self):return self._zip_manifests.load(self.loader.archive)def get_resource_filename(self, manager, resource_name):if not self.egg_name:raise NotImplementedError("resource_filename() only supported for .egg, not .zip")# no need to lock for extraction, since we use temp nameszip_path = self._resource_to_zip(resource_name)eagers = self._get_eager_resources()if '/'.join(self._parts(zip_path)) in eagers:for name in eagers:self._extract_resource(manager, self._eager_to_zip(name))return self._extract_resource(manager, zip_path)@staticmethoddef _get_date_and_size(zip_stat):size = zip_stat.file_size# ymdhms+wday, yday, dstdate_time = zip_stat.date_time + (0, 0, -1)# 1980 offset already donetimestamp = time.mktime(date_time)return timestamp, sizedef _extract_resource(self, manager, zip_path):if zip_path in self._index():for name in self._index()[zip_path]:last = self._extract_resource(manager, os.path.join(zip_path, name))# return the extracted directory namereturn os.path.dirname(last)timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])if not WRITE_SUPPORT:raise IOError('"os.rename" and "os.unlink" are not supported ''on this platform')try:real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path))if self._is_current(real_path, zip_path):return real_pathoutf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))os.write(outf, self.loader.get_data(zip_path))os.close(outf)utime(tmpnam, (timestamp, timestamp))manager.postprocess(tmpnam, real_path)try:rename(tmpnam, real_path)except os.error:if os.path.isfile(real_path):if self._is_current(real_path, zip_path):# the file became current since it was checked above,# so proceed.return real_path# Windows, del old file and retryelif os.name == 'nt':unlink(real_path)rename(tmpnam, real_path)return real_pathraiseexcept os.error:# report a user-friendly errormanager.extraction_error()return real_pathdef _is_current(self, file_path, zip_path):"""Return True if the file_path is current for this zip_path"""timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])if not os.path.isfile(file_path):return Falsestat = os.stat(file_path)if stat.st_size != size or stat.st_mtime != timestamp:return False# check that the contents matchzip_contents = self.loader.get_data(zip_path)with open(file_path, 'rb') as f:file_contents = f.read()return zip_contents == file_contentsdef _get_eager_resources(self):if self.eagers is None:eagers = []for name in ('native_libs.txt', 'eager_resources.txt'):if self.has_metadata(name):eagers.extend(self.get_metadata_lines(name))self.eagers = eagersreturn self.eagersdef _index(self):try:return self._dirindexexcept AttributeError:ind = {}for path in self.zipinfo:parts = path.split(os.sep)while parts:parent = os.sep.join(parts[:-1])if parent in ind:ind[parent].append(parts[-1])breakelse:ind[parent] = [parts.pop()]self._dirindex = indreturn inddef _has(self, fspath):zip_path = self._zipinfo_name(fspath)return zip_path in self.zipinfo or zip_path in self._index()def _isdir(self, fspath):return self._zipinfo_name(fspath) in self._index()def _listdir(self, fspath):return list(self._index().get(self._zipinfo_name(fspath), ()))def _eager_to_zip(self, resource_name):return self._zipinfo_name(self._fn(self.egg_root, resource_name))def _resource_to_zip(self, resource_name):return self._zipinfo_name(self._fn(self.module_path, resource_name))register_loader_type(zipimport.zipimporter, ZipProvider)class FileMetadata(EmptyProvider):"""Metadata handler for standalone PKG-INFO filesUsage::metadata = FileMetadata("/path/to/PKG-INFO")This provider rejects all data and metadata requests except for PKG-INFO,which is treated as existing, and will be the contents of the file atthe provided location."""def __init__(self, path):self.path = pathdef has_metadata(self, name):return name == 'PKG-INFO' and os.path.isfile(self.path)def get_metadata(self, name):if name != 'PKG-INFO':raise KeyError("No metadata except PKG-INFO is available")with io.open(self.path, encoding='utf-8', errors="replace") as f:metadata = f.read()self._warn_on_replacement(metadata)return metadatadef _warn_on_replacement(self, metadata):# Python 2.6 and 3.2 compat for: replacement_char = '�'replacement_char = b'\xef\xbf\xbd'.decode('utf-8')if replacement_char in metadata:tmpl = "{self.path} could not be properly decoded in UTF-8"msg = tmpl.format(**locals())warnings.warn(msg)def get_metadata_lines(self, name):return yield_lines(self.get_metadata(name))class PathMetadata(DefaultProvider):"""Metadata provider for egg directoriesUsage::# Development eggs:egg_info = "/path/to/PackageName.egg-info"base_dir = os.path.dirname(egg_info)metadata = PathMetadata(base_dir, egg_info)dist_name = os.path.splitext(os.path.basename(egg_info))[0]dist = Distribution(basedir, project_name=dist_name, metadata=metadata)# Unpacked egg directories:egg_path = "/path/to/PackageName-ver-pyver-etc.egg"metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))dist = Distribution.from_filename(egg_path, metadata=metadata)"""def __init__(self, path, egg_info):self.module_path = pathself.egg_info = egg_infoclass EggMetadata(ZipProvider):"""Metadata provider for .egg files"""def __init__(self, importer):"""Create a metadata provider from a zipimporter"""self.zip_pre = importer.archive + os.sepself.loader = importerif importer.prefix:self.module_path = os.path.join(importer.archive, importer.prefix)else:self.module_path = importer.archiveself._setup_prefix()_declare_state('dict', _distribution_finders={})def register_finder(importer_type, distribution_finder):"""Register `distribution_finder` to find distributions in sys.path items`importer_type` is the type or class of a PEP 302 "Importer" (sys.path itemhandler), and `distribution_finder` is a callable that, passed a pathitem and the importer instance, yields ``Distribution`` instances found onthat path item. See ``pkg_resources.find_on_path`` for an example."""_distribution_finders[importer_type] = distribution_finderdef find_distributions(path_item, only=False):"""Yield distributions accessible via `path_item`"""importer = get_importer(path_item)finder = _find_adapter(_distribution_finders, importer)return finder(importer, path_item, only)def find_eggs_in_zip(importer, path_item, only=False):"""Find eggs in zip files; possibly multiple nested eggs."""if importer.archive.endswith('.whl'):# wheels are not supported with this finder# they don't have PKG-INFO metadata, and won't ever contain eggsreturnmetadata = EggMetadata(importer)if metadata.has_metadata('PKG-INFO'):yield Distribution.from_filename(path_item, metadata=metadata)if only:# don't yield nested distrosreturnfor subitem in metadata.resource_listdir('/'):if _is_unpacked_egg(subitem):subpath = os.path.join(path_item, subitem)for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):yield distregister_finder(zipimport.zipimporter, find_eggs_in_zip)def find_nothing(importer, path_item, only=False):return ()register_finder(object, find_nothing)def _by_version_descending(names):"""Given a list of filenames, return them in descending orderby version number.>>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'>>> _by_version_descending(names)['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']>>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'>>> _by_version_descending(names)['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']>>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'>>> _by_version_descending(names)['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']"""def _by_version(name):"""Parse each component of the filename"""name, ext = os.path.splitext(name)parts = itertools.chain(name.split('-'), [ext])return [packaging.version.parse(part) for part in parts]return sorted(names, key=_by_version, reverse=True)def find_on_path(importer, path_item, only=False):"""Yield distributions accessible on a sys.path directory"""path_item = _normalize_cached(path_item)if os.path.isdir(path_item) and os.access(path_item, os.R_OK):if _is_unpacked_egg(path_item):yield Distribution.from_filename(path_item, metadata=PathMetadata(path_item, os.path.join(path_item, 'EGG-INFO')))else:# scan for .egg and .egg-info in directorypath_item_entries = _by_version_descending(os.listdir(path_item))for entry in path_item_entries:lower = entry.lower()if lower.endswith('.egg-info') or lower.endswith('.dist-info'):fullpath = os.path.join(path_item, entry)if os.path.isdir(fullpath):# egg-info directory, allow getting metadataif len(os.listdir(fullpath)) == 0:# Empty egg directory, skip.continuemetadata = PathMetadata(path_item, fullpath)else:metadata = FileMetadata(fullpath)yield Distribution.from_location(path_item, entry, metadata, precedence=DEVELOP_DIST)elif not only and _is_unpacked_egg(entry):dists = find_distributions(os.path.join(path_item, entry))for dist in dists:yield distelif not only and lower.endswith('.egg-link'):with open(os.path.join(path_item, entry)) as entry_file:entry_lines = entry_file.readlines()for line in entry_lines:if not line.strip():continuepath = os.path.join(path_item, line.rstrip())dists = find_distributions(path)for item in dists:yield itembreakregister_finder(pkgutil.ImpImporter, find_on_path)if hasattr(importlib_machinery, 'FileFinder'):register_finder(importlib_machinery.FileFinder, find_on_path)_declare_state('dict', _namespace_handlers={})_declare_state('dict', _namespace_packages={})def register_namespace_handler(importer_type, namespace_handler):"""Register `namespace_handler` to declare namespace packages`importer_type` is the type or class of a PEP 302 "Importer" (sys.path itemhandler), and `namespace_handler` is a callable like this::def namespace_handler(importer, path_entry, moduleName, module):# return a path_entry to use for child packagesNamespace handlers are only called if the importer object has alreadyagreed that it can handle the relevant path item, and they should onlyreturn a subpath if the module __path__ does not already contain anequivalent subpath. For an example namespace handler, see``pkg_resources.file_ns_handler``."""_namespace_handlers[importer_type] = namespace_handlerdef _handle_ns(packageName, path_item):"""Ensure that named package includes a subpath of path_item (if needed)"""importer = get_importer(path_item)if importer is None:return Noneloader = importer.find_module(packageName)if loader is None:return Nonemodule = sys.modules.get(packageName)if module is None:module = sys.modules[packageName] = types.ModuleType(packageName)module.__path__ = []_set_parent_ns(packageName)elif not hasattr(module, '__path__'):raise TypeError("Not a package:", packageName)handler = _find_adapter(_namespace_handlers, importer)subpath = handler(importer, path_item, packageName, module)if subpath is not None:path = module.__path__path.append(subpath)loader.load_module(packageName)_rebuild_mod_path(path, packageName, module)return subpathdef _rebuild_mod_path(orig_path, package_name, module):"""Rebuild module.__path__ ensuring that all entries are orderedcorresponding to their sys.path order"""sys_path = [_normalize_cached(p) for p in sys.path]def safe_sys_path_index(entry):"""Workaround for #520 and #513."""try:return sys_path.index(entry)except ValueError:return float('inf')def position_in_sys_path(path):"""Return the ordinal of the path based on its position in sys.path"""path_parts = path.split(os.sep)module_parts = package_name.count('.') + 1parts = path_parts[:-module_parts]return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))orig_path.sort(key=position_in_sys_path)module.__path__[:] = [_normalize_cached(p) for p in orig_path]def declare_namespace(packageName):"""Declare that package 'packageName' is a namespace package"""_imp.acquire_lock()try:if packageName in _namespace_packages:returnpath, parent = sys.path, Noneif '.' in packageName:parent = '.'.join(packageName.split('.')[:-1])declare_namespace(parent)if parent not in _namespace_packages:__import__(parent)try:path = sys.modules[parent].__path__except AttributeError:raise TypeError("Not a package:", parent)# Track what packages are namespaces, so when new path items are added,# they can be updated_namespace_packages.setdefault(parent, []).append(packageName)_namespace_packages.setdefault(packageName, [])for path_item in path:# Ensure all the parent's path items are reflected in the child,# if they apply_handle_ns(packageName, path_item)finally:_imp.release_lock()def fixup_namespace_packages(path_item, parent=None):"""Ensure that previously-declared namespace packages include path_item"""_imp.acquire_lock()try:for package in _namespace_packages.get(parent, ()):subpath = _handle_ns(package, path_item)if subpath:fixup_namespace_packages(subpath, package)finally:_imp.release_lock()def file_ns_handler(importer, path_item, packageName, module):"""Compute an ns-package subpath for a filesystem or zipfile importer"""subpath = os.path.join(path_item, packageName.split('.')[-1])normalized = _normalize_cached(subpath)for item in module.__path__:if _normalize_cached(item) == normalized:breakelse:# Only return the path if it's not already therereturn subpathregister_namespace_handler(pkgutil.ImpImporter, file_ns_handler)register_namespace_handler(zipimport.zipimporter, file_ns_handler)if hasattr(importlib_machinery, 'FileFinder'):register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)def null_ns_handler(importer, path_item, packageName, module):return Noneregister_namespace_handler(object, null_ns_handler)def normalize_path(filename):"""Normalize a file/dir name for comparison purposes"""return os.path.normcase(os.path.realpath(filename))def _normalize_cached(filename, _cache={}):try:return _cache[filename]except KeyError:_cache[filename] = result = normalize_path(filename)return resultdef _is_unpacked_egg(path):"""Determine if given path appears to be an unpacked egg."""return (path.lower().endswith('.egg'))def _set_parent_ns(packageName):parts = packageName.split('.')name = parts.pop()if parts:parent = '.'.join(parts)setattr(sys.modules[parent], name, sys.modules[packageName])def yield_lines(strs):"""Yield non-empty/non-comment lines of a string or sequence"""if isinstance(strs, six.string_types):for s in strs.splitlines():s = s.strip()# skip blank lines/commentsif s and not s.startswith('#'):yield selse:for ss in strs:for s in yield_lines(ss):yield sMODULE = re.compile(r"\w+(\.\w+)*$").matchEGG_NAME = re.compile(r"""(?P<name>[^-]+) (-(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))?)?)?""",re.VERBOSE | re.IGNORECASE,).matchclass EntryPoint(object):"""Object representing an advertised importable object"""def __init__(self, name, module_name, attrs=(), extras=(), dist=None):if not MODULE(module_name):raise ValueError("Invalid module name", module_name)self.name = nameself.module_name = module_nameself.attrs = tuple(attrs)self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extrasself.dist = distdef __str__(self):s = "%s = %s" % (self.name, self.module_name)if self.attrs:s += ':' + '.'.join(self.attrs)if self.extras:s += ' [%s]' % ','.join(self.extras)return sdef __repr__(self):return "EntryPoint.parse(%r)" % str(self)def load(self, require=True, *args, **kwargs):"""Require packages for this EntryPoint, then resolve it."""if not require or args or kwargs:warnings.warn("Parameters to load are deprecated. Call .resolve and "".require separately.",DeprecationWarning,stacklevel=2,)if require:self.require(*args, **kwargs)return self.resolve()def resolve(self):"""Resolve the entry point from its module and attrs."""module = __import__(self.module_name, fromlist=['__name__'], level=0)try:return functools.reduce(getattr, self.attrs, module)except AttributeError as exc:raise ImportError(str(exc))def require(self, env=None, installer=None):if self.extras and not self.dist:raise UnknownExtra("Can't require() without a distribution", self)reqs = self.dist.requires(self.extras)items = working_set.resolve(reqs, env, installer)list(map(working_set.add, items))pattern = re.compile(r'\s*'r'(?P<name>.+?)\s*'r'=\s*'r'(?P<module>[\w.]+)\s*'r'(:\s*(?P<attr>[\w.]+))?\s*'r'(?P<extras>\[.*\])?\s*$')@classmethoddef parse(cls, src, dist=None):"""Parse a single entry point from string `src`Entry point syntax follows the form::name = some.module:some.attr [extra1, extra2]The entry name and module name are required, but the ``:attrs`` and``[extras]`` parts are optional"""m = cls.pattern.match(src)if not m:msg = "EntryPoint must be in 'name=module:attrs [extras]' format"raise ValueError(msg, src)res = m.groupdict()extras = cls._parse_extras(res['extras'])attrs = res['attr'].split('.') if res['attr'] else ()return cls(res['name'], res['module'], attrs, extras, dist)@classmethoddef _parse_extras(cls, extras_spec):if not extras_spec:return ()req = Requirement.parse('x' + extras_spec)if req.specs:raise ValueError()return req.extras@classmethoddef parse_group(cls, group, lines, dist=None):"""Parse an entry point group"""if not MODULE(group):raise ValueError("Invalid group name", group)this = {}for line in yield_lines(lines):ep = cls.parse(line, dist)if ep.name in this:raise ValueError("Duplicate entry point", group, ep.name)this[ep.name] = epreturn this@classmethoddef parse_map(cls, data, dist=None):"""Parse a map of entry point groups"""if isinstance(data, dict):data = data.items()else:data = split_sections(data)maps = {}for group, lines in data:if group is None:if not lines:continueraise ValueError("Entry points must be listed in groups")group = group.strip()if group in maps:raise ValueError("Duplicate group name", group)maps[group] = cls.parse_group(group, lines, dist)return mapsdef _remove_md5_fragment(location):if not location:return ''parsed = urllib.parse.urlparse(location)if parsed[-1].startswith('md5='):return urllib.parse.urlunparse(parsed[:-1] + ('',))return locationdef _version_from_file(lines):"""Given an iterable of lines from a Metadata file, returnthe value of the Version field, if present, or None otherwise."""is_version_line = lambda line: line.lower().startswith('version:')version_lines = filter(is_version_line, lines)line = next(iter(version_lines), '')_, _, value = line.partition(':')return safe_version(value.strip()) or Noneclass Distribution(object):"""Wrap an actual or potential sys.path entry w/metadata"""PKG_INFO = 'PKG-INFO'def __init__(self, location=None, metadata=None, project_name=None,version=None, py_version=PY_MAJOR, platform=None,precedence=EGG_DIST):self.project_name = safe_name(project_name or 'Unknown')if version is not None:self._version = safe_version(version)self.py_version = py_versionself.platform = platformself.location = locationself.precedence = precedenceself._provider = metadata or empty_provider@classmethoddef from_location(cls, location, basename, metadata=None, **kw):project_name, version, py_version, platform = [None] * 4basename, ext = os.path.splitext(basename)if ext.lower() in _distributionImpl:cls = _distributionImpl[ext.lower()]match = EGG_NAME(basename)if match:project_name, version, py_version, platform = match.group('name', 'ver', 'pyver', 'plat')return cls(location, metadata, project_name=project_name, version=version,py_version=py_version, platform=platform, **kw)._reload_version()def _reload_version(self):return self@propertydef hashcmp(self):return (self.parsed_version,self.precedence,self.key,_remove_md5_fragment(self.location),self.py_version or '',self.platform or '',)def __hash__(self):return hash(self.hashcmp)def __lt__(self, other):return self.hashcmp < other.hashcmpdef __le__(self, other):return self.hashcmp <= other.hashcmpdef __gt__(self, other):return self.hashcmp > other.hashcmpdef __ge__(self, other):return self.hashcmp >= other.hashcmpdef __eq__(self, other):if not isinstance(other, self.__class__):# It's not a Distribution, so they are not equalreturn Falsereturn self.hashcmp == other.hashcmpdef __ne__(self, other):return not self == other# These properties have to be lazy so that we don't have to load any# metadata until/unless it's actually needed. (i.e., some distributions# may not know their name or version without loading PKG-INFO)@propertydef key(self):try:return self._keyexcept AttributeError:self._key = key = self.project_name.lower()return key@propertydef parsed_version(self):if not hasattr(self, "_parsed_version"):self._parsed_version = parse_version(self.version)return self._parsed_versiondef _warn_legacy_version(self):LV = packaging.version.LegacyVersionis_legacy = isinstance(self._parsed_version, LV)if not is_legacy:return# While an empty version is technically a legacy version and# is not a valid PEP 440 version, it's also unlikely to# actually come from someone and instead it is more likely that# it comes from setuptools attempting to parse a filename and# including it in the list. So for that we'll gate this warning# on if the version is anything at all or not.if not self.version:returntmpl = textwrap.dedent("""'{project_name} ({version})' is being parsed as a legacy,non PEP 440,version. You may find odd behavior and sort order.In particular it will be sorted as less than 0.0. Itis recommended to migrate to PEP 440 compatibleversions.""").strip().replace('\n', ' ')warnings.warn(tmpl.format(**vars(self)), PEP440Warning)@propertydef version(self):try:return self._versionexcept AttributeError:version = _version_from_file(self._get_metadata(self.PKG_INFO))if version is None:tmpl = "Missing 'Version:' header and/or %s file"raise ValueError(tmpl % self.PKG_INFO, self)return version@propertydef _dep_map(self):try:return self.__dep_mapexcept AttributeError:dm = self.__dep_map = {None: []}for name in 'requires.txt', 'depends.txt':for extra, reqs in split_sections(self._get_metadata(name)):if extra:if ':' in extra:extra, marker = extra.split(':', 1)if invalid_marker(marker):# XXX warnreqs = []elif not evaluate_marker(marker):reqs = []extra = safe_extra(extra) or Nonedm.setdefault(extra, []).extend(parse_requirements(reqs))return dmdef requires(self, extras=()):"""List of Requirements needed for this distro if `extras` are used"""dm = self._dep_mapdeps = []deps.extend(dm.get(None, ()))for ext in extras:try:deps.extend(dm[safe_extra(ext)])except KeyError:raise UnknownExtra("%s has no such extra feature %r" % (self, ext))return depsdef _get_metadata(self, name):if self.has_metadata(name):for line in self.get_metadata_lines(name):yield linedef activate(self, path=None, replace=False):"""Ensure distribution is importable on `path` (default=sys.path)"""if path is None:path = sys.pathself.insert_on(path, replace=replace)if path is sys.path:fixup_namespace_packages(self.location)for pkg in self._get_metadata('namespace_packages.txt'):if pkg in sys.modules:declare_namespace(pkg)def egg_name(self):"""Return what this distribution's standard .egg filename should be"""filename = "%s-%s-py%s" % (to_filename(self.project_name), to_filename(self.version),self.py_version or PY_MAJOR)if self.platform:filename += '-' + self.platformreturn filenamedef __repr__(self):if self.location:return "%s (%s)" % (self, self.location)else:return str(self)def __str__(self):try:version = getattr(self, 'version', None)except ValueError:version = Noneversion = version or "[unknown version]"return "%s %s" % (self.project_name, version)def __getattr__(self, attr):"""Delegate all unrecognized public attributes to .metadata provider"""if attr.startswith('_'):raise AttributeError(attr)return getattr(self._provider, attr)@classmethoddef from_filename(cls, filename, metadata=None, **kw):return cls.from_location(_normalize_cached(filename), os.path.basename(filename), metadata,**kw)def as_requirement(self):"""Return a ``Requirement`` that matches this distribution exactly"""if isinstance(self.parsed_version, packaging.version.Version):spec = "%s==%s" % (self.project_name, self.parsed_version)else:spec = "%s===%s" % (self.project_name, self.parsed_version)return Requirement.parse(spec)def load_entry_point(self, group, name):"""Return the `name` entry point of `group` or raise ImportError"""ep = self.get_entry_info(group, name)if ep is None:raise ImportError("Entry point %r not found" % ((group, name),))return ep.load()def get_entry_map(self, group=None):"""Return the entry point map for `group`, or the full entry map"""try:ep_map = self._ep_mapexcept AttributeError:ep_map = self._ep_map = EntryPoint.parse_map(self._get_metadata('entry_points.txt'), self)if group is not None:return ep_map.get(group, {})return ep_mapdef get_entry_info(self, group, name):"""Return the EntryPoint object for `group`+`name`, or ``None``"""return self.get_entry_map(group).get(name)def insert_on(self, path, loc=None, replace=False):"""Ensure self.location is on pathIf replace=False (default):- If location is already in path anywhere, do nothing.- Else:- If it's an egg and its parent directory is on path,insert just ahead of the parent.- Else: add to the end of path.If replace=True:- If location is already on path anywhere (not eggs)or higher priority than its parent (eggs)do nothing.- Else:- If it's an egg and its parent directory is on path,insert just ahead of the parent,removing any lower-priority entries.- Else: add it to the front of path."""loc = loc or self.locationif not loc:returnnloc = _normalize_cached(loc)bdir = os.path.dirname(nloc)npath = [(p and _normalize_cached(p) or p) for p in path]for p, item in enumerate(npath):if item == nloc:if replace:breakelse:# don't modify path (even removing duplicates) if found and not replacereturnelif item == bdir and self.precedence == EGG_DIST:# if it's an .egg, give it precedence over its directory# UNLESS it's already been added to sys.path and replace=Falseif (not replace) and nloc in npath[p:]:returnif path is sys.path:self.check_version_conflict()path.insert(p, loc)npath.insert(p, nloc)breakelse:if path is sys.path:self.check_version_conflict()if replace:path.insert(0, loc)else:path.append(loc)return# p is the spot where we found or inserted loc; now remove duplicateswhile True:try:np = npath.index(nloc, p + 1)except ValueError:breakelse:del npath[np], path[np]# ha!p = npreturndef check_version_conflict(self):if self.key == 'setuptools':# ignore the inevitable setuptools self-conflicts :(returnnsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))loc = normalize_path(self.location)for modname in self._get_metadata('top_level.txt'):if (modname not in sys.modules or modname in nspor modname in _namespace_packages):continueif modname in ('pkg_resources', 'setuptools', 'site'):continuefn = getattr(sys.modules[modname], '__file__', None)if fn and (normalize_path(fn).startswith(loc) orfn.startswith(self.location)):continueissue_warning("Module %s was already imported from %s, but %s is being added"" to sys.path" % (modname, fn, self.location),)def has_version(self):try:self.versionexcept ValueError:issue_warning("Unbuilt egg for " + repr(self))return Falsereturn Truedef clone(self, **kw):"""Copy this distribution, substituting in any changed keyword args"""names = 'project_name version py_version platform location precedence'for attr in names.split():kw.setdefault(attr, getattr(self, attr, None))kw.setdefault('metadata', self._provider)return self.__class__(**kw)@propertydef extras(self):return [dep for dep in self._dep_map if dep]class EggInfoDistribution(Distribution):def _reload_version(self):"""Packages installed by distutils (e.g. numpy or scipy),which uses an old safe_version, and sotheir version numbers can get mangled whenconverted to filenames (e.g., 1.11.0.dev0+2329eae to1.11.0.dev0_2329eae). These distributions will not beparsed properlydownstream by Distribution and safe_version, sotake an extra step and try to get the version number fromthe metadata file itself instead of the filename."""md_version = _version_from_file(self._get_metadata(self.PKG_INFO))if md_version:self._version = md_versionreturn selfclass DistInfoDistribution(Distribution):"""Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""PKG_INFO = 'METADATA'EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")@propertydef _parsed_pkg_info(self):"""Parse and cache metadata"""try:return self._pkg_infoexcept AttributeError:metadata = self.get_metadata(self.PKG_INFO)self._pkg_info = email.parser.Parser().parsestr(metadata)return self._pkg_info@propertydef _dep_map(self):try:return self.__dep_mapexcept AttributeError:self.__dep_map = self._compute_dependencies()return self.__dep_mapdef _compute_dependencies(self):"""Recompute this distribution's dependencies."""dm = self.__dep_map = {None: []}reqs = []# Including any condition expressionsfor req in self._parsed_pkg_info.get_all('Requires-Dist') or []:reqs.extend(parse_requirements(req))def reqs_for_extra(extra):for req in reqs:if not req.marker or req.marker.evaluate({'extra': extra}):yield reqcommon = frozenset(reqs_for_extra(None))dm[None].extend(common)for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:s_extra = safe_extra(extra.strip())dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)return dm_distributionImpl = {'.egg': Distribution,'.egg-info': EggInfoDistribution,'.dist-info': DistInfoDistribution,}def issue_warning(*args, **kw):level = 1g = globals()try:# find the first stack frame that is *not* code in# the pkg_resources module, to use for the warningwhile sys._getframe(level).f_globals is g:level += 1except ValueError:passwarnings.warn(stacklevel=level + 1, *args, **kw)class RequirementParseError(ValueError):def __str__(self):return ' '.join(self.args)def parse_requirements(strs):"""Yield ``Requirement`` objects for each specification in `strs``strs` must be a string, or a (possibly-nested) iterable thereof."""# create a steppable iterator, so we can handle \-continuationslines = iter(yield_lines(strs))for line in lines:# Drop comments -- a hash without a space may be in a URL.if ' #' in line:line = line[:line.find(' #')]# If there is a line continuation, drop it, and append the next line.if line.endswith('\\'):line = line[:-2].strip()line += next(lines)yield Requirement(line)class Requirement(packaging.requirements.Requirement):def __init__(self, requirement_string):"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""try:super(Requirement, self).__init__(requirement_string)except packaging.requirements.InvalidRequirement as e:raise RequirementParseError(str(e))self.unsafe_name = self.nameproject_name = safe_name(self.name)self.project_name, self.key = project_name, project_name.lower()self.specs = [(spec.operator, spec.version) for spec in self.specifier]self.extras = tuple(map(safe_extra, self.extras))self.hashCmp = (self.key,self.specifier,frozenset(self.extras),str(self.marker) if self.marker else None,)self.__hash = hash(self.hashCmp)def __eq__(self, other):return (isinstance(other, Requirement) andself.hashCmp == other.hashCmp)def __ne__(self, other):return not self == otherdef __contains__(self, item):if isinstance(item, Distribution):if item.key != self.key:return Falseitem = item.version# Allow prereleases always in order to match the previous behavior of# this method. In the future this should be smarter and follow PEP 440# more accurately.return self.specifier.contains(item, prereleases=True)def __hash__(self):return self.__hashdef __repr__(self): return "Requirement.parse(%r)" % str(self)@staticmethoddef parse(s):req, = parse_requirements(s)return reqdef _get_mro(cls):"""Get an mro for a type or classic class"""if not isinstance(cls, type):class cls(cls, object):passreturn cls.__mro__[1:]return cls.__mro__def _find_adapter(registry, ob):"""Return an adapter factory for `ob` from `registry`"""for t in _get_mro(getattr(ob, '__class__', type(ob))):if t in registry:return registry[t]def ensure_directory(path):"""Ensure that the parent directory of `path` exists"""dirname = os.path.dirname(path)if not os.path.isdir(dirname):os.makedirs(dirname)def _bypass_ensure_directory(path):"""Sandbox-bypassing version of ensure_directory()"""if not WRITE_SUPPORT:raise IOError('"os.mkdir" not supported on this platform.')dirname, filename = split(path)if dirname and filename and not isdir(dirname):_bypass_ensure_directory(dirname)mkdir(dirname, 0o755)def split_sections(s):"""Split a string or iterable thereof into (section, content) pairsEach ``section`` is a stripped version of the section header ("[section]")and each ``content`` is a list of stripped lines excluding blank lines andcomment-only lines. If there are any such lines before the first sectionheader, they're returned in a first ``section`` of ``None``."""section = Nonecontent = []for line in yield_lines(s):if line.startswith("["):if line.endswith("]"):if section or content:yield section, contentsection = line[1:-1].strip()content = []else:raise ValueError("Invalid section heading", line)else:content.append(line)# wrap up last segmentyield section, contentdef _mkstemp(*args, **kw):old_open = os.opentry:# temporarily bypass sandboxingos.open = os_openreturn tempfile.mkstemp(*args, **kw)finally:# and then put it backos.open = old_open# Silence the PEP440Warning by default, so that end users don't get hit by it# randomly just because they use pkg_resources. We want to append the rule# because we want earlier uses of filterwarnings to take precedence over this# one.warnings.filterwarnings("ignore", category=PEP440Warning, append=True)# from jaraco.functools 1.3def _call_aside(f, *args, **kwargs):f(*args, **kwargs)return f@_call_asidedef _initialize(g=globals()):"Set up global resource manager (deliberately not state-saved)"manager = ResourceManager()g['_manager'] = managerfor name in dir(manager):if not name.startswith('_'):g[name] = getattr(manager, name)@_call_asidedef _initialize_master_working_set():"""Prepare the master working set and make the ``require()``API available.This function has explicit effects on the global stateof pkg_resources. It is intended to be invoked once atthe initialization of this module.Invocation by other packages is unsupported and doneat their own risk."""working_set = WorkingSet._build_master()_declare_state('object', working_set=working_set)require = working_set.requireiter_entry_points = working_set.iter_entry_pointsadd_activation_listener = working_set.subscriberun_script = working_set.run_script# backward compatibilityrun_main = run_script# Activate all distributions already on sys.path with replace=False and# ensure that all distributions added to the working set in the future# (e.g. by calling ``require()``) will get activated as well,# with higher priority (replace=True).dist = None # ensure dist is defined for del dist belowfor dist in working_set:dist.activate(replace=False)del distadd_activation_listener(lambda dist: dist.activate(replace=True), existing=False)working_set.entries = []# match orderlist(map(working_set.add_entry, sys.path))globals().update(locals())
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionimport collectionsimport itertoolsimport refrom ._structures import Infinity__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]_Version = collections.namedtuple("_Version",["epoch", "release", "dev", "pre", "post", "local"],)def parse(version):"""Parse the given version string and return either a :class:`Version` objector a :class:`LegacyVersion` object depending on if the given version isa valid PEP 440 version or a legacy version."""try:return Version(version)except InvalidVersion:return LegacyVersion(version)class InvalidVersion(ValueError):"""An invalid version was found, users should refer to PEP 440."""class _BaseVersion(object):def __hash__(self):return hash(self._key)def __lt__(self, other):return self._compare(other, lambda s, o: s < o)def __le__(self, other):return self._compare(other, lambda s, o: s <= o)def __eq__(self, other):return self._compare(other, lambda s, o: s == o)def __ge__(self, other):return self._compare(other, lambda s, o: s >= o)def __gt__(self, other):return self._compare(other, lambda s, o: s > o)def __ne__(self, other):return self._compare(other, lambda s, o: s != o)def _compare(self, other, method):if not isinstance(other, _BaseVersion):return NotImplementedreturn method(self._key, other._key)class LegacyVersion(_BaseVersion):def __init__(self, version):self._version = str(version)self._key = _legacy_cmpkey(self._version)def __str__(self):return self._versiondef __repr__(self):return "<LegacyVersion({0})>".format(repr(str(self)))@propertydef public(self):return self._version@propertydef base_version(self):return self._version@propertydef local(self):return None@propertydef is_prerelease(self):return False@propertydef is_postrelease(self):return False_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,)_legacy_version_replacement_map = {"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",}def _parse_version_parts(s):for part in _legacy_version_component_re.split(s):part = _legacy_version_replacement_map.get(part, part)if not part or part == ".":continueif part[:1] in "0123456789":# pad for numeric comparisonyield part.zfill(8)else:yield "*" + part# ensure that alpha/beta/candidate are before finalyield "*final"def _legacy_cmpkey(version):# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch# greater than or equal to 0. This will effectively put the LegacyVersion,# which uses the defacto standard originally implemented by setuptools,# as before all PEP 440 versions.epoch = -1# This scheme is taken from pkg_resources.parse_version setuptools prior to# it's adoption of the packaging library.parts = []for part in _parse_version_parts(version.lower()):if part.startswith("*"):# remove "-" before a prerelease tagif part < "*final":while parts and parts[-1] == "*final-":parts.pop()# remove trailing zeros from each series of numeric partswhile parts and parts[-1] == "00000000":parts.pop()parts.append(part)parts = tuple(parts)return epoch, parts# Deliberately not anchored to the start and end of the string, to make it# easier for 3rd party code to reuseVERSION_PATTERN = r"""v?(?:(?:(?P<epoch>[0-9]+)!)? # epoch(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment(?P<pre> # pre-release[-_\.]?(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))[-_\.]?(?P<pre_n>[0-9]+)?)?(?P<post> # post release(?:-(?P<post_n1>[0-9]+))|(?:[-_\.]?(?P<post_l>post|rev|r)[-_\.]?(?P<post_n2>[0-9]+)?))?(?P<dev> # dev release[-_\.]?(?P<dev_l>dev)[-_\.]?(?P<dev_n>[0-9]+)?)?)(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version"""class Version(_BaseVersion):_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$",re.VERBOSE | re.IGNORECASE,)def __init__(self, version):# Validate the version and parse it into piecesmatch = self._regex.search(version)if not match:raise InvalidVersion("Invalid version: '{0}'".format(version))# Store the parsed out pieces of the versionself._version = _Version(epoch=int(match.group("epoch")) if match.group("epoch") else 0,release=tuple(int(i) for i in match.group("release").split(".")),pre=_parse_letter_version(match.group("pre_l"),match.group("pre_n"),),post=_parse_letter_version(match.group("post_l"),match.group("post_n1") or match.group("post_n2"),),dev=_parse_letter_version(match.group("dev_l"),match.group("dev_n"),),local=_parse_local_version(match.group("local")),)# Generate a key which will be used for sortingself._key = _cmpkey(self._version.epoch,self._version.release,self._version.pre,self._version.post,self._version.dev,self._version.local,)def __repr__(self):return "<Version({0})>".format(repr(str(self)))def __str__(self):parts = []# Epochif self._version.epoch != 0:parts.append("{0}!".format(self._version.epoch))# Release segmentparts.append(".".join(str(x) for x in self._version.release))# Pre-releaseif self._version.pre is not None:parts.append("".join(str(x) for x in self._version.pre))# Post-releaseif self._version.post is not None:parts.append(".post{0}".format(self._version.post[1]))# Development releaseif self._version.dev is not None:parts.append(".dev{0}".format(self._version.dev[1]))# Local version segmentif self._version.local is not None:parts.append("+{0}".format(".".join(str(x) for x in self._version.local)))return "".join(parts)@propertydef public(self):return str(self).split("+", 1)[0]@propertydef base_version(self):parts = []# Epochif self._version.epoch != 0:parts.append("{0}!".format(self._version.epoch))# Release segmentparts.append(".".join(str(x) for x in self._version.release))return "".join(parts)@propertydef local(self):version_string = str(self)if "+" in version_string:return version_string.split("+", 1)[1]@propertydef is_prerelease(self):return bool(self._version.dev or self._version.pre)@propertydef is_postrelease(self):return bool(self._version.post)def _parse_letter_version(letter, number):if letter:# We consider there to be an implicit 0 in a pre-release if there is# not a numeral associated with it.if number is None:number = 0# We normalize any letters to their lower case formletter = letter.lower()# We consider some words to be alternate spellings of other words and# in those cases we want to normalize the spellings to our preferred# spelling.if letter == "alpha":letter = "a"elif letter == "beta":letter = "b"elif letter in ["c", "pre", "preview"]:letter = "rc"elif letter in ["rev", "r"]:letter = "post"return letter, int(number)if not letter and number:# We assume if we are given a number, but we are not given a letter# then this is using the implicit post release syntax (e.g. 1.0-1)letter = "post"return letter, int(number)_local_version_seperators = re.compile(r"[\._-]")def _parse_local_version(local):"""Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve")."""if local is not None:return tuple(part.lower() if not part.isdigit() else int(part)for part in _local_version_seperators.split(local))def _cmpkey(epoch, release, pre, post, dev, local):# When we compare a release version, we want to compare it with all of the# trailing zeros removed. So we'll use a reverse the list, drop all the now# leading zeros until we come to something non zero, then take the rest# re-reverse it back into the correct order and make it a tuple and use# that for our sorting key.release = tuple(reversed(list(itertools.dropwhile(lambda x: x == 0,reversed(release),))))# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.# We'll do this by abusing the pre segment, but we _only_ want to do this# if there is not a pre or a post segment. If we have one of those then# the normal sorting rules will handle this case correctly.if pre is None and post is None and dev is not None:pre = -Infinity# Versions without a pre-release (except as noted above) should sort after# those with one.elif pre is None:pre = Infinity# Versions without a post segment should sort before those with one.if post is None:post = -Infinity# Versions without a development segment should sort after those with one.if dev is None:dev = Infinityif local is None:# Versions without a local segment should sort before those with one.local = -Infinityelse:# Versions with a local segment need that segment parsed to implement# the sorting rules in PEP440.# - Alpha numeric segments sort before numeric segments# - Alpha numeric segments sort lexicographically# - Numeric segments sort numerically# - Shorter versions sort before longer versions when the prefixes# match exactlylocal = tuple((i, "") if isinstance(i, int) else (-Infinity, i)for i in local)return epoch, release, pre, post, dev, local
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionimport re_canonicalize_regex = re.compile(r"[-_.]+")def canonicalize_name(name):# This is taken from PEP 503.return _canonicalize_regex.sub("-", name).lower()
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionimport abcimport functoolsimport itertoolsimport refrom ._compat import string_types, with_metaclassfrom .version import Version, LegacyVersion, parseclass InvalidSpecifier(ValueError):"""An invalid specifier was found, users should refer to PEP 440."""class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):@abc.abstractmethoddef __str__(self):"""Returns the str representation of this Specifier like object. Thisshould be representative of the Specifier itself."""@abc.abstractmethoddef __hash__(self):"""Returns a hash value for this Specifier like object."""@abc.abstractmethoddef __eq__(self, other):"""Returns a boolean representing whether or not the two Specifier likeobjects are equal."""@abc.abstractmethoddef __ne__(self, other):"""Returns a boolean representing whether or not the two Specifier likeobjects are not equal."""@abc.abstractpropertydef prereleases(self):"""Returns whether or not pre-releases as a whole are allowed by thisspecifier."""@prereleases.setterdef prereleases(self, value):"""Sets whether or not pre-releases as a whole are allowed by thisspecifier."""@abc.abstractmethoddef contains(self, item, prereleases=None):"""Determines if the given item is contained within this specifier."""@abc.abstractmethoddef filter(self, iterable, prereleases=None):"""Takes an iterable of items and filters them so that only items whichare contained within this specifier are allowed in it."""class _IndividualSpecifier(BaseSpecifier):_operators = {}def __init__(self, spec="", prereleases=None):match = self._regex.search(spec)if not match:raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))self._spec = (match.group("operator").strip(),match.group("version").strip(),)# Store whether or not this Specifier should accept prereleasesself._prereleases = prereleasesdef __repr__(self):pre = (", prereleases={0!r}".format(self.prereleases)if self._prereleases is not Noneelse "")return "<{0}({1!r}{2})>".format(self.__class__.__name__,str(self),pre,)def __str__(self):return "{0}{1}".format(*self._spec)def __hash__(self):return hash(self._spec)def __eq__(self, other):if isinstance(other, string_types):try:other = self.__class__(other)except InvalidSpecifier:return NotImplementedelif not isinstance(other, self.__class__):return NotImplementedreturn self._spec == other._specdef __ne__(self, other):if isinstance(other, string_types):try:other = self.__class__(other)except InvalidSpecifier:return NotImplementedelif not isinstance(other, self.__class__):return NotImplementedreturn self._spec != other._specdef _get_operator(self, op):return getattr(self, "_compare_{0}".format(self._operators[op]))def _coerce_version(self, version):if not isinstance(version, (LegacyVersion, Version)):version = parse(version)return version@propertydef operator(self):return self._spec[0]@propertydef version(self):return self._spec[1]@propertydef prereleases(self):return self._prereleases@prereleases.setterdef prereleases(self, value):self._prereleases = valuedef __contains__(self, item):return self.contains(item)def contains(self, item, prereleases=None):# Determine if prereleases are to be allowed or not.if prereleases is None:prereleases = self.prereleases# Normalize item to a Version or LegacyVersion, this allows us to have# a shortcut for ``"2.0" in Specifier(">=2")item = self._coerce_version(item)# Determine if we should be supporting prereleases in this specifier# or not, if we do not support prereleases than we can short circuit# logic if this version is a prereleases.if item.is_prerelease and not prereleases:return False# Actually do the comparison to determine if this item is contained# within this Specifier or not.return self._get_operator(self.operator)(item, self.version)def filter(self, iterable, prereleases=None):yielded = Falsefound_prereleases = []kw = {"prereleases": prereleases if prereleases is not None else True}# Attempt to iterate over all the values in the iterable and if any of# them match, yield them.for version in iterable:parsed_version = self._coerce_version(version)if self.contains(parsed_version, **kw):# If our version is a prerelease, and we were not set to allow# prereleases, then we'll store it for later incase nothing# else matches this specifier.if (parsed_version.is_prerelease and not(prereleases or self.prereleases)):found_prereleases.append(version)# Either this is not a prerelease, or we should have been# accepting prereleases from the begining.else:yielded = Trueyield version# Now that we've iterated over everything, determine if we've yielded# any values, and if we have not and we have any prereleases stored up# then we will go ahead and yield the prereleases.if not yielded and found_prereleases:for version in found_prereleases:yield versionclass LegacySpecifier(_IndividualSpecifier):_regex_str = (r"""(?P<operator>(==|!=|<=|>=|<|>))\s*(?P<version>[^,;\s)]* # Since this is a "legacy" specifier, and the version# string can be just about anything, we match everything# except for whitespace, a semi-colon for marker support,# a closing paren since versions can be enclosed in# them, and a comma since it's a version separator.)""")_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)_operators = {"==": "equal","!=": "not_equal","<=": "less_than_equal",">=": "greater_than_equal","<": "less_than",">": "greater_than",}def _coerce_version(self, version):if not isinstance(version, LegacyVersion):version = LegacyVersion(str(version))return versiondef _compare_equal(self, prospective, spec):return prospective == self._coerce_version(spec)def _compare_not_equal(self, prospective, spec):return prospective != self._coerce_version(spec)def _compare_less_than_equal(self, prospective, spec):return prospective <= self._coerce_version(spec)def _compare_greater_than_equal(self, prospective, spec):return prospective >= self._coerce_version(spec)def _compare_less_than(self, prospective, spec):return prospective < self._coerce_version(spec)def _compare_greater_than(self, prospective, spec):return prospective > self._coerce_version(spec)def _require_version_compare(fn):@functools.wraps(fn)def wrapped(self, prospective, spec):if not isinstance(prospective, Version):return Falsereturn fn(self, prospective, spec)return wrappedclass Specifier(_IndividualSpecifier):_regex_str = (r"""(?P<operator>(~=|==|!=|<=|>=|<|>|===))(?P<version>(?:# The identity operators allow for an escape hatch that will# do an exact string match of the version you wish to install.# This will not be parsed by PEP 440 and we cannot determine# any semantic meaning from it. This operator is discouraged# but included entirely as an escape hatch.(?<====) # Only match for the identity operator\s*[^\s]* # We just match everything, except for whitespace# since we are only testing for strict identity.)|(?:# The (non)equality operators allow for wild card and local# versions to be specified so we have to define these two# operators separately to enable that.(?<===|!=) # Only match for equals and not equals\s*v?(?:[0-9]+!)? # epoch[0-9]+(?:\.[0-9]+)* # release(?: # pre release[-_\.]?(a|b|c|rc|alpha|beta|pre|preview)[-_\.]?[0-9]*)?(?: # post release(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*))?# You cannot use a wild card and a dev or local version# together so group them with a | and make them optional.(?:(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local|\.\* # Wild card syntax of .*)?)|(?:# The compatible operator requires at least two digits in the# release segment.(?<=~=) # Only match for the compatible operator\s*v?(?:[0-9]+!)? # epoch[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)(?: # pre release[-_\.]?(a|b|c|rc|alpha|beta|pre|preview)[-_\.]?[0-9]*)?(?: # post release(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*))?(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release)|(?:# All other operators only allow a sub set of what the# (non)equality operators do. Specifically they do not allow# local versions to be specified nor do they allow the prefix# matching wild cards.(?<!==|!=|~=) # We have special cases for these# operators so we want to make sure they# don't match here.\s*v?(?:[0-9]+!)? # epoch[0-9]+(?:\.[0-9]+)* # release(?: # pre release[-_\.]?(a|b|c|rc|alpha|beta|pre|preview)[-_\.]?[0-9]*)?(?: # post release(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*))?(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release))""")_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)_operators = {"~=": "compatible","==": "equal","!=": "not_equal","<=": "less_than_equal",">=": "greater_than_equal","<": "less_than",">": "greater_than","===": "arbitrary",}@_require_version_comparedef _compare_compatible(self, prospective, spec):# Compatible releases have an equivalent combination of >= and ==. That# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to# implement this in terms of the other specifiers instead of# implementing it ourselves. The only thing we need to do is construct# the other specifiers.# We want everything but the last item in the version, but we want to# ignore post and dev releases and we want to treat the pre-release as# it's own separate segment.prefix = ".".join(list(itertools.takewhile(lambda x: (not x.startswith("post") and notx.startswith("dev")),_version_split(spec),))[:-1])# Add the prefix notation to the end of our stringprefix += ".*"return (self._get_operator(">=")(prospective, spec) andself._get_operator("==")(prospective, prefix))@_require_version_comparedef _compare_equal(self, prospective, spec):# We need special logic to handle prefix matchingif spec.endswith(".*"):# In the case of prefix matching we want to ignore local segment.prospective = Version(prospective.public)# Split the spec out by dots, and pretend that there is an implicit# dot in between a release segment and a pre-release segment.spec = _version_split(spec[:-2]) # Remove the trailing .*# Split the prospective version out by dots, and pretend that there# is an implicit dot in between a release segment and a pre-release# segment.prospective = _version_split(str(prospective))# Shorten the prospective version to be the same length as the spec# so that we can determine if the specifier is a prefix of the# prospective version or not.prospective = prospective[:len(spec)]# Pad out our two sides with zeros so that they both equal the same# length.spec, prospective = _pad_version(spec, prospective)else:# Convert our spec string into a Versionspec = Version(spec)# If the specifier does not have a local segment, then we want to# act as if the prospective version also does not have a local# segment.if not spec.local:prospective = Version(prospective.public)return prospective == spec@_require_version_comparedef _compare_not_equal(self, prospective, spec):return not self._compare_equal(prospective, spec)@_require_version_comparedef _compare_less_than_equal(self, prospective, spec):return prospective <= Version(spec)@_require_version_comparedef _compare_greater_than_equal(self, prospective, spec):return prospective >= Version(spec)@_require_version_comparedef _compare_less_than(self, prospective, spec):# Convert our spec to a Version instance, since we'll want to work with# it as a version.spec = Version(spec)# Check to see if the prospective version is less than the spec# version. If it's not we can short circuit and just return False now# instead of doing extra unneeded work.if not prospective < spec:return False# This special case is here so that, unless the specifier itself# includes is a pre-release version, that we do not accept pre-release# versions for the version mentioned in the specifier (e.g. <3.1 should# not match 3.1.dev0, but should match 3.0.dev0).if not spec.is_prerelease and prospective.is_prerelease:if Version(prospective.base_version) == Version(spec.base_version):return False# If we've gotten to here, it means that prospective version is both# less than the spec version *and* it's not a pre-release of the same# version in the spec.return True@_require_version_comparedef _compare_greater_than(self, prospective, spec):# Convert our spec to a Version instance, since we'll want to work with# it as a version.spec = Version(spec)# Check to see if the prospective version is greater than the spec# version. If it's not we can short circuit and just return False now# instead of doing extra unneeded work.if not prospective > spec:return False# This special case is here so that, unless the specifier itself# includes is a post-release version, that we do not accept# post-release versions for the version mentioned in the specifier# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).if not spec.is_postrelease and prospective.is_postrelease:if Version(prospective.base_version) == Version(spec.base_version):return False# Ensure that we do not allow a local version of the version mentioned# in the specifier, which is techincally greater than, to match.if prospective.local is not None:if Version(prospective.base_version) == Version(spec.base_version):return False# If we've gotten to here, it means that prospective version is both# greater than the spec version *and* it's not a pre-release of the# same version in the spec.return Truedef _compare_arbitrary(self, prospective, spec):return str(prospective).lower() == str(spec).lower()@propertydef prereleases(self):# If there is an explicit prereleases set for this, then we'll just# blindly use that.if self._prereleases is not None:return self._prereleases# Look at all of our specifiers and determine if they are inclusive# operators, and if they are if they are including an explicit# prerelease.operator, version = self._specif operator in ["==", ">=", "<=", "~=", "==="]:# The == specifier can include a trailing .*, if it does we# want to remove before parsing.if operator == "==" and version.endswith(".*"):version = version[:-2]# Parse the version, and if it is a pre-release than this# specifier allows pre-releases.if parse(version).is_prerelease:return Truereturn False@prereleases.setterdef prereleases(self, value):self._prereleases = value_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")def _version_split(version):result = []for item in version.split("."):match = _prefix_regex.search(item)if match:result.extend(match.groups())else:result.append(item)return resultdef _pad_version(left, right):left_split, right_split = [], []# Get the release segment of our versionsleft_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))# Get the rest of our versionsleft_split.append(left[len(left_split[0]):])right_split.append(right[len(right_split[0]):])# Insert our paddingleft_split.insert(1,["0"] * max(0, len(right_split[0]) - len(left_split[0])),)right_split.insert(1,["0"] * max(0, len(left_split[0]) - len(right_split[0])),)return (list(itertools.chain(*left_split)),list(itertools.chain(*right_split)),)class SpecifierSet(BaseSpecifier):def __init__(self, specifiers="", prereleases=None):# Split on , to break each indidivual specifier into it's own item, and# strip each item to remove leading/trailing whitespace.specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]# Parsed each individual specifier, attempting first to make it a# Specifier and falling back to a LegacySpecifier.parsed = set()for specifier in specifiers:try:parsed.add(Specifier(specifier))except InvalidSpecifier:parsed.add(LegacySpecifier(specifier))# Turn our parsed specifiers into a frozen set and save them for later.self._specs = frozenset(parsed)# Store our prereleases value so we can use it later to determine if# we accept prereleases or not.self._prereleases = prereleasesdef __repr__(self):pre = (", prereleases={0!r}".format(self.prereleases)if self._prereleases is not Noneelse "")return "<SpecifierSet({0!r}{1})>".format(str(self), pre)def __str__(self):return ",".join(sorted(str(s) for s in self._specs))def __hash__(self):return hash(self._specs)def __and__(self, other):if isinstance(other, string_types):other = SpecifierSet(other)elif not isinstance(other, SpecifierSet):return NotImplementedspecifier = SpecifierSet()specifier._specs = frozenset(self._specs | other._specs)if self._prereleases is None and other._prereleases is not None:specifier._prereleases = other._prereleaseselif self._prereleases is not None and other._prereleases is None:specifier._prereleases = self._prereleaseselif self._prereleases == other._prereleases:specifier._prereleases = self._prereleaseselse:raise ValueError("Cannot combine SpecifierSets with True and False prerelease ""overrides.")return specifierdef __eq__(self, other):if isinstance(other, string_types):other = SpecifierSet(other)elif isinstance(other, _IndividualSpecifier):other = SpecifierSet(str(other))elif not isinstance(other, SpecifierSet):return NotImplementedreturn self._specs == other._specsdef __ne__(self, other):if isinstance(other, string_types):other = SpecifierSet(other)elif isinstance(other, _IndividualSpecifier):other = SpecifierSet(str(other))elif not isinstance(other, SpecifierSet):return NotImplementedreturn self._specs != other._specsdef __len__(self):return len(self._specs)def __iter__(self):return iter(self._specs)@propertydef prereleases(self):# If we have been given an explicit prerelease modifier, then we'll# pass that through here.if self._prereleases is not None:return self._prereleases# If we don't have any specifiers, and we don't have a forced value,# then we'll just return None since we don't know if this should have# pre-releases or not.if not self._specs:return None# Otherwise we'll see if any of the given specifiers accept# prereleases, if any of them do we'll return True, otherwise False.return any(s.prereleases for s in self._specs)@prereleases.setterdef prereleases(self, value):self._prereleases = valuedef __contains__(self, item):return self.contains(item)def contains(self, item, prereleases=None):# Ensure that our item is a Version or LegacyVersion instance.if not isinstance(item, (LegacyVersion, Version)):item = parse(item)# Determine if we're forcing a prerelease or not, if we're not forcing# one for this particular filter call, then we'll use whatever the# SpecifierSet thinks for whether or not we should support prereleases.if prereleases is None:prereleases = self.prereleases# We can determine if we're going to allow pre-releases by looking to# see if any of the underlying items supports them. If none of them do# and this item is a pre-release then we do not allow it and we can# short circuit that here.# Note: This means that 1.0.dev1 would not be contained in something# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0if not prereleases and item.is_prerelease:return False# We simply dispatch to the underlying specs here to make sure that the# given version is contained within all of them.# Note: This use of all() here means that an empty set of specifiers# will always return True, this is an explicit design decision.return all(s.contains(item, prereleases=prereleases)for s in self._specs)def filter(self, iterable, prereleases=None):# Determine if we're forcing a prerelease or not, if we're not forcing# one for this particular filter call, then we'll use whatever the# SpecifierSet thinks for whether or not we should support prereleases.if prereleases is None:prereleases = self.prereleases# If we have any specifiers, then we want to wrap our iterable in the# filter method for each one, this will act as a logical AND amongst# each specifier.if self._specs:for spec in self._specs:iterable = spec.filter(iterable, prereleases=bool(prereleases))return iterable# If we do not have any specifiers, then we need to have a rough filter# which will filter out any pre-releases, unless there are no final# releases, and which will filter out LegacyVersion in general.else:filtered = []found_prereleases = []for item in iterable:# Ensure that we some kind of Version class for this item.if not isinstance(item, (LegacyVersion, Version)):parsed_version = parse(item)else:parsed_version = item# Filter out any item which is parsed as a LegacyVersionif isinstance(parsed_version, LegacyVersion):continue# Store any item which is a pre-release for later unless we've# already found a final version or we are accepting prereleasesif parsed_version.is_prerelease and not prereleases:if not filtered:found_prereleases.append(item)else:filtered.append(item)# If we've found no items except for pre-releases, then we'll go# ahead and use the pre-releasesif not filtered and found_prereleases and prereleases is None:return found_prereleasesreturn filtered
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionimport stringimport refrom pip._vendor.pyparsing import (stringStart, stringEnd, originalTextFor, ParseException)from pip._vendor.pyparsing import ZeroOrMore, Word, Optional, Regex, Combinefrom pip._vendor.pyparsing import Literal as L # noqafrom pip._vendor.six.moves.urllib import parse as urlparsefrom .markers import MARKER_EXPR, Markerfrom .specifiers import LegacySpecifier, Specifier, SpecifierSetclass InvalidRequirement(ValueError):"""An invalid requirement was found, users should refer to PEP 508."""ALPHANUM = Word(string.ascii_letters + string.digits)LBRACKET = L("[").suppress()RBRACKET = L("]").suppress()LPAREN = L("(").suppress()RPAREN = L(")").suppress()COMMA = L(",").suppress()SEMICOLON = L(";").suppress()AT = L("@").suppress()PUNCTUATION = Word("-_.")IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))NAME = IDENTIFIER("name")EXTRA = IDENTIFIERURI = Regex(r'[^ ]+')("url")URL = (AT + URI)EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACYVERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),joinString=",", adjacent=False)("_raw_spec")_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")VERSION_SPEC.setParseAction(lambda s, l, t: t[1])MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")MARKER_EXPR.setParseAction(lambda s, l, t: Marker(s[t._original_start:t._original_end]))MARKER_SEPERATOR = SEMICOLONMARKER = MARKER_SEPERATOR + MARKER_EXPRVERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)URL_AND_MARKER = URL + Optional(MARKER)NAMED_REQUIREMENT = \NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEndclass Requirement(object):"""Parse a requirement.Parse a given requirement string into its parts, such as name, specifier,URL, and extras. Raises InvalidRequirement on a badly-formed requirementstring."""# TODO: Can we test whether something is contained within a requirement?# If so how do we do that? Do we need to test against the _name_ of# the thing as well as the version? What about the markers?# TODO: Can we normalize the name and extra name?def __init__(self, requirement_string):try:req = REQUIREMENT.parseString(requirement_string)except ParseException as e:raise InvalidRequirement("Invalid requirement, parse error at \"{0!r}\"".format(requirement_string[e.loc:e.loc + 8]))self.name = req.nameif req.url:parsed_url = urlparse.urlparse(req.url)if not (parsed_url.scheme and parsed_url.netloc) or (not parsed_url.scheme and not parsed_url.netloc):raise InvalidRequirement("Invalid URL given")self.url = req.urlelse:self.url = Noneself.extras = set(req.extras.asList() if req.extras else [])self.specifier = SpecifierSet(req.specifier)self.marker = req.marker if req.marker else Nonedef __str__(self):parts = [self.name]if self.extras:parts.append("[{0}]".format(",".join(sorted(self.extras))))if self.specifier:parts.append(str(self.specifier))if self.url:parts.append("@ {0}".format(self.url))if self.marker:parts.append("; {0}".format(self.marker))return "".join(parts)def __repr__(self):return "<Requirement({0!r})>".format(str(self))
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionimport operatorimport osimport platformimport sysfrom pip._vendor.pyparsing import (ParseException, ParseResults, stringStart, stringEnd,)from pip._vendor.pyparsing import ZeroOrMore, Group, Forward, QuotedStringfrom pip._vendor.pyparsing import Literal as L # noqafrom ._compat import string_typesfrom .specifiers import Specifier, InvalidSpecifier__all__ = ["InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName","Marker", "default_environment",]class InvalidMarker(ValueError):"""An invalid marker was found, users should refer to PEP 508."""class UndefinedComparison(ValueError):"""An invalid operation was attempted on a value that doesn't support it."""class UndefinedEnvironmentName(ValueError):"""A name was attempted to be used that does not exist inside of theenvironment."""class Node(object):def __init__(self, value):self.value = valuedef __str__(self):return str(self.value)def __repr__(self):return "<{0}({1!r})>".format(self.__class__.__name__, str(self))def serialize(self):raise NotImplementedErrorclass Variable(Node):def serialize(self):return str(self)class Value(Node):def serialize(self):return '"{0}"'.format(self)class Op(Node):def serialize(self):return str(self)VARIABLE = (L("implementation_version") |L("platform_python_implementation") |L("implementation_name") |L("python_full_version") |L("platform_release") |L("platform_version") |L("platform_machine") |L("platform_system") |L("python_version") |L("sys_platform") |L("os_name") |L("os.name") | # PEP-345L("sys.platform") | # PEP-345L("platform.version") | # PEP-345L("platform.machine") | # PEP-345L("platform.python_implementation") | # PEP-345L("python_implementation") | # undocumented setuptools legacyL("extra"))ALIASES = {'os.name': 'os_name','sys.platform': 'sys_platform','platform.version': 'platform_version','platform.machine': 'platform_machine','platform.python_implementation': 'platform_python_implementation','python_implementation': 'platform_python_implementation'}VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))VERSION_CMP = (L("===") |L("==") |L(">=") |L("<=") |L("!=") |L("~=") |L(">") |L("<"))MARKER_OP = VERSION_CMP | L("not in") | L("in")MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))MARKER_VALUE = QuotedString("'") | QuotedString('"')MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))BOOLOP = L("and") | L("or")MARKER_VAR = VARIABLE | MARKER_VALUEMARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))LPAREN = L("(").suppress()RPAREN = L(")").suppress()MARKER_EXPR = Forward()MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)MARKER = stringStart + MARKER_EXPR + stringEnddef _coerce_parse_result(results):if isinstance(results, ParseResults):return [_coerce_parse_result(i) for i in results]else:return resultsdef _format_marker(marker, first=True):assert isinstance(marker, (list, tuple, string_types))# Sometimes we have a structure like [[...]] which is a single item list# where the single item is itself it's own list. In that case we want skip# the rest of this function so that we don't get extraneous () on the# outside.if (isinstance(marker, list) and len(marker) == 1 andisinstance(marker[0], (list, tuple))):return _format_marker(marker[0])if isinstance(marker, list):inner = (_format_marker(m, first=False) for m in marker)if first:return " ".join(inner)else:return "(" + " ".join(inner) + ")"elif isinstance(marker, tuple):return " ".join([m.serialize() for m in marker])else:return marker_operators = {"in": lambda lhs, rhs: lhs in rhs,"not in": lambda lhs, rhs: lhs not in rhs,"<": operator.lt,"<=": operator.le,"==": operator.eq,"!=": operator.ne,">=": operator.ge,">": operator.gt,}def _eval_op(lhs, op, rhs):try:spec = Specifier("".join([op.serialize(), rhs]))except InvalidSpecifier:passelse:return spec.contains(lhs)oper = _operators.get(op.serialize())if oper is None:raise UndefinedComparison("Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs))return oper(lhs, rhs)_undefined = object()def _get_env(environment, name):value = environment.get(name, _undefined)if value is _undefined:raise UndefinedEnvironmentName("{0!r} does not exist in evaluation environment.".format(name))return valuedef _evaluate_markers(markers, environment):groups = [[]]for marker in markers:assert isinstance(marker, (list, tuple, string_types))if isinstance(marker, list):groups[-1].append(_evaluate_markers(marker, environment))elif isinstance(marker, tuple):lhs, op, rhs = markerif isinstance(lhs, Variable):lhs_value = _get_env(environment, lhs.value)rhs_value = rhs.valueelse:lhs_value = lhs.valuerhs_value = _get_env(environment, rhs.value)groups[-1].append(_eval_op(lhs_value, op, rhs_value))else:assert marker in ["and", "or"]if marker == "or":groups.append([])return any(all(item) for item in groups)def format_full_version(info):version = '{0.major}.{0.minor}.{0.micro}'.format(info)kind = info.releaselevelif kind != 'final':version += kind[0] + str(info.serial)return versiondef default_environment():if hasattr(sys, 'implementation'):iver = format_full_version(sys.implementation.version)implementation_name = sys.implementation.nameelse:iver = '0'implementation_name = ''return {"implementation_name": implementation_name,"implementation_version": iver,"os_name": os.name,"platform_machine": platform.machine(),"platform_release": platform.release(),"platform_system": platform.system(),"platform_version": platform.version(),"python_full_version": platform.python_version(),"platform_python_implementation": platform.python_implementation(),"python_version": platform.python_version()[:3],"sys_platform": sys.platform,}class Marker(object):def __init__(self, marker):try:self._markers = _coerce_parse_result(MARKER.parseString(marker))except ParseException as e:err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(marker, marker[e.loc:e.loc + 8])raise InvalidMarker(err_str)def __str__(self):return _format_marker(self._markers)def __repr__(self):return "<Marker({0!r})>".format(str(self))def evaluate(self, environment=None):"""Evaluate a marker.Return the boolean from evaluating the given marker against theenvironment. environment is an optional argument to override all orpart of the determined environment.The environment is determined from the current Python process."""current_environment = default_environment()if environment is not None:current_environment.update(environment)return _evaluate_markers(self._markers, current_environment)
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionclass Infinity(object):def __repr__(self):return "Infinity"def __hash__(self):return hash(repr(self))def __lt__(self, other):return Falsedef __le__(self, other):return Falsedef __eq__(self, other):return isinstance(other, self.__class__)def __ne__(self, other):return not isinstance(other, self.__class__)def __gt__(self, other):return Truedef __ge__(self, other):return Truedef __neg__(self):return NegativeInfinityInfinity = Infinity()class NegativeInfinity(object):def __repr__(self):return "-Infinity"def __hash__(self):return hash(repr(self))def __lt__(self, other):return Truedef __le__(self, other):return Truedef __eq__(self, other):return isinstance(other, self.__class__)def __ne__(self, other):return not isinstance(other, self.__class__)def __gt__(self, other):return Falsedef __ge__(self, other):return Falsedef __neg__(self):return InfinityNegativeInfinity = NegativeInfinity()
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionimport sysPY2 = sys.version_info[0] == 2PY3 = sys.version_info[0] == 3# flake8: noqaif PY3:string_types = str,else:string_types = basestring,def with_metaclass(meta, *bases):"""Create a base class with a metaclass."""# This requires a bit of explanation: the basic idea is to make a dummy# metaclass for one level of class instantiation that replaces itself with# the actual metaclass.class metaclass(meta):def __new__(cls, name, this_bases, d):return meta(name, bases, d)return type.__new__(metaclass, 'temporary_class', (), {})
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_functionfrom .__about__ import (__author__, __copyright__, __email__, __license__, __summary__, __title__,__uri__, __version__)__all__ = ["__title__", "__summary__", "__uri__", "__version__", "__author__","__email__", "__license__", "__copyright__",]
# This file is dual licensed under the terms of the Apache License, Version# 2.0, and the BSD License. See the LICENSE file in the root of this repository# for complete details.from __future__ import absolute_import, division, print_function__all__ = ["__title__", "__summary__", "__uri__", "__version__", "__author__","__email__", "__license__", "__copyright__",]__title__ = "packaging"__summary__ = "Core utilities for Python packages"__uri__ = "https://github.com/pypa/packaging"__version__ = "16.8"__author__ = "Donald Stufft and individual contributors"__email__ = "donald@stufft.io"__license__ = "BSD or Apache License, Version 2.0"__copyright__ = "Copyright 2014-2016 %s" % __author__
# Copyright (c) 2009 Raymond Hettinger## Permission is hereby granted, free of charge, to any person# obtaining a copy of this software and associated documentation files# (the "Software"), to deal in the Software without restriction,# including without limitation the rights to use, copy, modify, merge,# publish, distribute, sublicense, and/or sell copies of the Software,# and to permit persons to whom the Software is furnished to do so,# subject to the following conditions:## The above copyright notice and this permission notice shall be# included in all copies or substantial portions of the Software.## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR# OTHER DEALINGS IN THE SOFTWARE.from UserDict import DictMixinclass OrderedDict(dict, DictMixin):def __init__(self, *args, **kwds):if len(args) > 1:raise TypeError('expected at most 1 arguments, got %d' % len(args))try:self.__endexcept AttributeError:self.clear()self.update(*args, **kwds)def clear(self):self.__end = end = []end += [None, end, end] # sentinel node for doubly linked listself.__map = {} # key --> [key, prev, next]dict.clear(self)def __setitem__(self, key, value):if key not in self:end = self.__endcurr = end[1]curr[2] = end[1] = self.__map[key] = [key, curr, end]dict.__setitem__(self, key, value)def __delitem__(self, key):dict.__delitem__(self, key)key, prev, next = self.__map.pop(key)prev[2] = nextnext[1] = prevdef __iter__(self):end = self.__endcurr = end[2]while curr is not end:yield curr[0]curr = curr[2]def __reversed__(self):end = self.__endcurr = end[1]while curr is not end:yield curr[0]curr = curr[1]def popitem(self, last=True):if not self:raise KeyError('dictionary is empty')if last:key = reversed(self).next()else:key = iter(self).next()value = self.pop(key)return key, valuedef __reduce__(self):items = [[k, self[k]] for k in self]tmp = self.__map, self.__enddel self.__map, self.__endinst_dict = vars(self).copy()self.__map, self.__end = tmpif inst_dict:return (self.__class__, (items,), inst_dict)return self.__class__, (items,)def keys(self):return list(self)setdefault = DictMixin.setdefaultupdate = DictMixin.updatepop = DictMixin.popvalues = DictMixin.valuesitems = DictMixin.itemsiterkeys = DictMixin.iterkeysitervalues = DictMixin.itervaluesiteritems = DictMixin.iteritemsdef __repr__(self):if not self:return '%s()' % (self.__class__.__name__,)return '%s(%r)' % (self.__class__.__name__, self.items())def copy(self):return self.__class__(self)@classmethoddef fromkeys(cls, iterable, value=None):d = cls()for key in iterable:d[key] = valuereturn ddef __eq__(self, other):if isinstance(other, OrderedDict):if len(self) != len(other):return Falsefor p, q in zip(self.items(), other.items()):if p != q:return Falsereturn Truereturn dict.__eq__(self, other)def __ne__(self, other):return not self == other
from __future__ import absolute_importimport osimport timefrom . import (LockBase, NotLocked, NotMyLock, LockTimeout,AlreadyLocked)class SymlinkLockFile(LockBase):"""Lock access to a file using symlink(2)."""def __init__(self, path, threaded=True, timeout=None):# super(SymlinkLockFile).__init(...)LockBase.__init__(self, path, threaded, timeout)# split it back!self.unique_name = os.path.split(self.unique_name)[1]def acquire(self, timeout=None):# Hopefully unnecessary for symlink.# try:# open(self.unique_name, "wb").close()# except IOError:# raise LockFailed("failed to create %s" % self.unique_name)timeout = timeout if timeout is not None else self.timeoutend_time = time.time()if timeout is not None and timeout > 0:end_time += timeoutwhile True:# Try and create a symbolic link to it.try:os.symlink(self.unique_name, self.lock_file)except OSError:# Link creation failed. Maybe we've double-locked?if self.i_am_locking():# Linked to out unique name. Proceed.returnelse:# Otherwise the lock creation failed.if timeout is not None and time.time() > end_time:if timeout > 0:raise LockTimeout("Timeout waiting to acquire"" lock for %s" %self.path)else:raise AlreadyLocked("%s is already locked" %self.path)time.sleep(timeout / 10 if timeout is not None else 0.1)else:# Link creation succeeded. We're good to go.returndef release(self):if not self.is_locked():raise NotLocked("%s is not locked" % self.path)elif not self.i_am_locking():raise NotMyLock("%s is locked, but not by me" % self.path)os.unlink(self.lock_file)def is_locked(self):return os.path.islink(self.lock_file)def i_am_locking(self):return (os.path.islink(self.lock_file)and os.readlink(self.lock_file) == self.unique_name)def break_lock(self):if os.path.islink(self.lock_file): # exists && linkos.unlink(self.lock_file)
from __future__ import absolute_import, divisionimport timeimport ostry:unicodeexcept NameError:unicode = strfrom . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLockedclass SQLiteLockFile(LockBase):"Demonstrate SQL-based locking."testdb = Nonedef __init__(self, path, threaded=True, timeout=None):""">>> lock = SQLiteLockFile('somefile')>>> lock = SQLiteLockFile('somefile', threaded=False)"""LockBase.__init__(self, path, threaded, timeout)self.lock_file = unicode(self.lock_file)self.unique_name = unicode(self.unique_name)if SQLiteLockFile.testdb is None:import tempfile_fd, testdb = tempfile.mkstemp()os.close(_fd)os.unlink(testdb)del _fd, tempfileSQLiteLockFile.testdb = testdbimport sqlite3self.connection = sqlite3.connect(SQLiteLockFile.testdb)c = self.connection.cursor()try:c.execute("create table locks""("" lock_file varchar(32),"" unique_name varchar(32)"")")except sqlite3.OperationalError:passelse:self.connection.commit()import atexitatexit.register(os.unlink, SQLiteLockFile.testdb)def acquire(self, timeout=None):timeout = timeout if timeout is not None else self.timeoutend_time = time.time()if timeout is not None and timeout > 0:end_time += timeoutif timeout is None:wait = 0.1elif timeout <= 0:wait = 0else:wait = timeout / 10cursor = self.connection.cursor()while True:if not self.is_locked():# Not locked. Try to lock it.cursor.execute("insert into locks"" (lock_file, unique_name)"" values"" (?, ?)",(self.lock_file, self.unique_name))self.connection.commit()# Check to see if we are the only lock holder.cursor.execute("select * from locks"" where unique_name = ?",(self.unique_name,))rows = cursor.fetchall()if len(rows) > 1:# Nope. Someone else got there. Remove our lock.cursor.execute("delete from locks"" where unique_name = ?",(self.unique_name,))self.connection.commit()else:# Yup. We're done, so go home.returnelse:# Check to see if we are the only lock holder.cursor.execute("select * from locks"" where unique_name = ?",(self.unique_name,))rows = cursor.fetchall()if len(rows) == 1:# We're the locker, so go home.return# Maybe we should wait a bit longer.if timeout is not None and time.time() > end_time:if timeout > 0:# No more waiting.raise LockTimeout("Timeout waiting to acquire"" lock for %s" %self.path)else:# Someone else has the lock and we are impatient..raise AlreadyLocked("%s is already locked" % self.path)# Well, okay. We'll give it a bit longer.time.sleep(wait)def release(self):if not self.is_locked():raise NotLocked("%s is not locked" % self.path)if not self.i_am_locking():raise NotMyLock("%s is locked, but not by me (by %s)" %(self.unique_name, self._who_is_locking()))cursor = self.connection.cursor()cursor.execute("delete from locks"" where unique_name = ?",(self.unique_name,))self.connection.commit()def _who_is_locking(self):cursor = self.connection.cursor()cursor.execute("select unique_name from locks"" where lock_file = ?",(self.lock_file,))return cursor.fetchone()[0]def is_locked(self):cursor = self.connection.cursor()cursor.execute("select * from locks"" where lock_file = ?",(self.lock_file,))rows = cursor.fetchall()return not not rowsdef i_am_locking(self):cursor = self.connection.cursor()cursor.execute("select * from locks"" where lock_file = ?"" and unique_name = ?",(self.lock_file, self.unique_name))return not not cursor.fetchall()def break_lock(self):cursor = self.connection.cursor()cursor.execute("delete from locks"" where lock_file = ?",(self.lock_file,))self.connection.commit()
# -*- coding: utf-8 -*-# pidlockfile.py## Copyright © 2008–2009 Ben Finney <ben+python@benfinney.id.au>## This is free software: you may copy, modify, and/or distribute this work# under the terms of the Python Software Foundation License, version 2 or# later as published by the Python Software Foundation.# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.""" Lockfile behaviour implemented via Unix PID files."""from __future__ import absolute_importimport errnoimport osimport timefrom . import (LockBase, AlreadyLocked, LockFailed, NotLocked, NotMyLock,LockTimeout)class PIDLockFile(LockBase):""" Lockfile implemented as a Unix PID file.The lock file is a normal file named by the attribute `path`.A lock's PID file contains a single line of text, containingthe process ID (PID) of the process that acquired the lock.>>> lock = PIDLockFile('somefile')>>> lock = PIDLockFile('somefile')"""def __init__(self, path, threaded=False, timeout=None):# pid lockfiles don't support threaded operation, so always force# False as the threaded arg.LockBase.__init__(self, path, False, timeout)self.unique_name = self.pathdef read_pid(self):""" Get the PID from the lock file."""return read_pid_from_pidfile(self.path)def is_locked(self):""" Test if the lock is currently held.The lock is held if the PID file for this lock exists."""return os.path.exists(self.path)def i_am_locking(self):""" Test if the lock is held by the current process.Returns ``True`` if the current process ID matches thenumber stored in the PID file."""return self.is_locked() and os.getpid() == self.read_pid()def acquire(self, timeout=None):""" Acquire the lock.Creates the PID file for this lock, or raises an error ifthe lock could not be acquired."""timeout = timeout if timeout is not None else self.timeoutend_time = time.time()if timeout is not None and timeout > 0:end_time += timeoutwhile True:try:write_pid_to_pidfile(self.path)except OSError as exc:if exc.errno == errno.EEXIST:# The lock creation failed. Maybe sleep a bit.if time.time() > end_time:if timeout is not None and timeout > 0:raise LockTimeout("Timeout waiting to acquire"" lock for %s" %self.path)else:raise AlreadyLocked("%s is already locked" %self.path)time.sleep(timeout is not None and timeout / 10 or 0.1)else:raise LockFailed("failed to create %s" % self.path)else:returndef release(self):""" Release the lock.Removes the PID file to release the lock, or raises anerror if the current process does not hold the lock."""if not self.is_locked():raise NotLocked("%s is not locked" % self.path)if not self.i_am_locking():raise NotMyLock("%s is locked, but not by me" % self.path)remove_existing_pidfile(self.path)def break_lock(self):""" Break an existing lock.Removes the PID file if it already exists, otherwise doesnothing."""remove_existing_pidfile(self.path)def read_pid_from_pidfile(pidfile_path):""" Read the PID recorded in the named PID file.Read and return the numeric PID recorded as text in the namedPID file. If the PID file cannot be read, or if the content isnot a valid PID, return ``None``."""pid = Nonetry:pidfile = open(pidfile_path, 'r')except IOError:passelse:# According to the FHS 2.3 section on PID files in /var/run:## The file must consist of the process identifier in# ASCII-encoded decimal, followed by a newline character.## Programs that read PID files should be somewhat flexible# in what they accept; i.e., they should ignore extra# whitespace, leading zeroes, absence of the trailing# newline, or additional lines in the PID file.line = pidfile.readline().strip()try:pid = int(line)except ValueError:passpidfile.close()return piddef write_pid_to_pidfile(pidfile_path):""" Write the PID in the named PID file.Get the numeric process ID (“PID”) of the current processand write it to the named file as a line of text."""open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)open_mode = 0o644pidfile_fd = os.open(pidfile_path, open_flags, open_mode)pidfile = os.fdopen(pidfile_fd, 'w')# According to the FHS 2.3 section on PID files in /var/run:## The file must consist of the process identifier in# ASCII-encoded decimal, followed by a newline character. For# example, if crond was process number 25, /var/run/crond.pid# would contain three characters: two, five, and newline.pid = os.getpid()pidfile.write("%s\n" % pid)pidfile.close()def remove_existing_pidfile(pidfile_path):""" Remove the named PID file if it exists.Removing a PID file that doesn't already exist puts us in thedesired state, so we ignore the condition if the file does notexist."""try:os.remove(pidfile_path)except OSError as exc:if exc.errno == errno.ENOENT:passelse:raise
from __future__ import absolute_import, divisionimport timeimport osimport sysimport errnofrom . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,AlreadyLocked)class MkdirLockFile(LockBase):"""Lock file by creating a directory."""def __init__(self, path, threaded=True, timeout=None):""">>> lock = MkdirLockFile('somefile')>>> lock = MkdirLockFile('somefile', threaded=False)"""LockBase.__init__(self, path, threaded, timeout)# Lock file itself is a directory. Place the unique file name into# it.self.unique_name = os.path.join(self.lock_file,"%s.%s%s" % (self.hostname,self.tname,self.pid))def acquire(self, timeout=None):timeout = timeout if timeout is not None else self.timeoutend_time = time.time()if timeout is not None and timeout > 0:end_time += timeoutif timeout is None:wait = 0.1else:wait = max(0, timeout / 10)while True:try:os.mkdir(self.lock_file)except OSError:err = sys.exc_info()[1]if err.errno == errno.EEXIST:# Already locked.if os.path.exists(self.unique_name):# Already locked by me.returnif timeout is not None and time.time() > end_time:if timeout > 0:raise LockTimeout("Timeout waiting to acquire"" lock for %s" %self.path)else:# Someone else has the lock.raise AlreadyLocked("%s is already locked" %self.path)time.sleep(wait)else:# Couldn't create the lock for some other reasonraise LockFailed("failed to create %s" % self.lock_file)else:open(self.unique_name, "wb").close()returndef release(self):if not self.is_locked():raise NotLocked("%s is not locked" % self.path)elif not os.path.exists(self.unique_name):raise NotMyLock("%s is locked, but not by me" % self.path)os.unlink(self.unique_name)os.rmdir(self.lock_file)def is_locked(self):return os.path.exists(self.lock_file)def i_am_locking(self):return (self.is_locked() andos.path.exists(self.unique_name))def break_lock(self):if os.path.exists(self.lock_file):for name in os.listdir(self.lock_file):os.unlink(os.path.join(self.lock_file, name))os.rmdir(self.lock_file)
from __future__ import absolute_importimport timeimport osfrom . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,AlreadyLocked)class LinkLockFile(LockBase):"""Lock access to a file using atomic property of link(2).>>> lock = LinkLockFile('somefile')>>> lock = LinkLockFile('somefile', threaded=False)"""def acquire(self, timeout=None):try:open(self.unique_name, "wb").close()except IOError:raise LockFailed("failed to create %s" % self.unique_name)timeout = timeout if timeout is not None else self.timeoutend_time = time.time()if timeout is not None and timeout > 0:end_time += timeoutwhile True:# Try and create a hard link to it.try:os.link(self.unique_name, self.lock_file)except OSError:# Link creation failed. Maybe we've double-locked?nlinks = os.stat(self.unique_name).st_nlinkif nlinks == 2:# The original link plus the one I created == 2. We're# good to go.returnelse:# Otherwise the lock creation failed.if timeout is not None and time.time() > end_time:os.unlink(self.unique_name)if timeout > 0:raise LockTimeout("Timeout waiting to acquire"" lock for %s" %self.path)else:raise AlreadyLocked("%s is already locked" %self.path)time.sleep(timeout is not None and timeout / 10 or 0.1)else:# Link creation succeeded. We're good to go.returndef release(self):if not self.is_locked():raise NotLocked("%s is not locked" % self.path)elif not os.path.exists(self.unique_name):raise NotMyLock("%s is locked, but not by me" % self.path)os.unlink(self.unique_name)os.unlink(self.lock_file)def is_locked(self):return os.path.exists(self.lock_file)def i_am_locking(self):return (self.is_locked() andos.path.exists(self.unique_name) andos.stat(self.unique_name).st_nlink == 2)def break_lock(self):if os.path.exists(self.lock_file):os.unlink(self.lock_file)
# -*- coding: utf-8 -*-"""lockfile.py - Platform-independent advisory file locks.Requires Python 2.5 unless you apply 2.4.diffLocking is done on a per-thread basis instead of a per-process basis.Usage:>>> lock = LockFile('somefile')>>> try:... lock.acquire()... except AlreadyLocked:... print 'somefile', 'is locked already.'... except LockFailed:... print 'somefile', 'can\\'t be locked.'... else:... print 'got lock'got lock>>> print lock.is_locked()True>>> lock.release()>>> lock = LockFile('somefile')>>> print lock.is_locked()False>>> with lock:... print lock.is_locked()True>>> print lock.is_locked()False>>> lock = LockFile('somefile')>>> # It is okay to lock twice from the same thread...>>> with lock:... lock.acquire()...>>> # Though no counter is kept, so you can't unlock multiple times...>>> print lock.is_locked()FalseExceptions:Error - base class for other exceptionsLockError - base class for all locking exceptionsAlreadyLocked - Another thread or process already holds the lockLockFailed - Lock failed for some other reasonUnlockError - base class for all unlocking exceptionsAlreadyUnlocked - File was not locked.NotMyLock - File was locked but not by the current thread/process"""from __future__ import absolute_importimport functoolsimport osimport socketimport threadingimport warnings# Work with PEP8 and non-PEP8 versions of threading module.if not hasattr(threading, "current_thread"):threading.current_thread = threading.currentThreadif not hasattr(threading.Thread, "get_name"):threading.Thread.get_name = threading.Thread.getName__all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked','LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock','LinkFileLock', 'MkdirFileLock', 'SQLiteFileLock','LockBase', 'locked']class Error(Exception):"""Base class for other exceptions.>>> try:... raise Error... except Exception:... pass"""passclass LockError(Error):"""Base class for error arising from attempts to acquire the lock.>>> try:... raise LockError... except Error:... pass"""passclass LockTimeout(LockError):"""Raised when lock creation fails within a user-defined period of time.>>> try:... raise LockTimeout... except LockError:... pass"""passclass AlreadyLocked(LockError):"""Some other thread/process is locking the file.>>> try:... raise AlreadyLocked... except LockError:... pass"""passclass LockFailed(LockError):"""Lock file creation failed for some other reason.>>> try:... raise LockFailed... except LockError:... pass"""passclass UnlockError(Error):"""Base class for errors arising from attempts to release the lock.>>> try:... raise UnlockError... except Error:... pass"""passclass NotLocked(UnlockError):"""Raised when an attempt is made to unlock an unlocked file.>>> try:... raise NotLocked... except UnlockError:... pass"""passclass NotMyLock(UnlockError):"""Raised when an attempt is made to unlock a file someone else locked.>>> try:... raise NotMyLock... except UnlockError:... pass"""passclass _SharedBase(object):def __init__(self, path):self.path = pathdef acquire(self, timeout=None):"""Acquire the lock.* If timeout is omitted (or None), wait forever trying to lock thefile.* If timeout > 0, try to acquire the lock for that many seconds. Ifthe lock period expires and the file is still locked, raiseLockTimeout.* If timeout <= 0, raise AlreadyLocked immediately if the file isalready locked."""raise NotImplemented("implement in subclass")def release(self):"""Release the lock.If the file is not locked, raise NotLocked."""raise NotImplemented("implement in subclass")def __enter__(self):"""Context manager support."""self.acquire()return selfdef __exit__(self, *_exc):"""Context manager support."""self.release()def __repr__(self):return "<%s: %r>" % (self.__class__.__name__, self.path)class LockBase(_SharedBase):"""Base class for platform-specific lock classes."""def __init__(self, path, threaded=True, timeout=None):""">>> lock = LockBase('somefile')>>> lock = LockBase('somefile', threaded=False)"""super(LockBase, self).__init__(path)self.lock_file = os.path.abspath(path) + ".lock"self.hostname = socket.gethostname()self.pid = os.getpid()if threaded:t = threading.current_thread()# Thread objects in Python 2.4 and earlier do not have ident# attrs. Worm around that.ident = getattr(t, "ident", hash(t))self.tname = "-%x" % (ident & 0xffffffff)else:self.tname = ""dirname = os.path.dirname(self.lock_file)# unique name is mostly about the current process, but must# also contain the path -- otherwise, two adjacent locked# files conflict (one file gets locked, creating lock-file and# unique file, the other one gets locked, creating lock-file# and overwriting the already existing lock-file, then one# gets unlocked, deleting both lock-file and unique file,# finally the last lock errors out upon releasing.self.unique_name = os.path.join(dirname,"%s%s.%s%s" % (self.hostname,self.tname,self.pid,hash(self.path)))self.timeout = timeoutdef is_locked(self):"""Tell whether or not the file is locked."""raise NotImplemented("implement in subclass")def i_am_locking(self):"""Return True if this object is locking the file."""raise NotImplemented("implement in subclass")def break_lock(self):"""Remove a lock. Useful if a locking thread failed to unlock."""raise NotImplemented("implement in subclass")def __repr__(self):return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name,self.path)def _fl_helper(cls, mod, *args, **kwds):warnings.warn("Import from %s module instead of lockfile package" % mod,DeprecationWarning, stacklevel=2)# This is a bit funky, but it's only for awhile. The way the unit tests# are constructed this function winds up as an unbound method, so it# actually takes three args, not two. We want to toss out self.if not isinstance(args[0], str):# We are testing, avoid the first argargs = args[1:]if len(args) == 1 and not kwds:kwds["threaded"] = Truereturn cls(*args, **kwds)def LinkFileLock(*args, **kwds):"""Factory function provided for backwards compatibility.Do not use in new code. Instead, import LinkLockFile from thelockfile.linklockfile module."""from . import linklockfilereturn _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile",*args, **kwds)def MkdirFileLock(*args, **kwds):"""Factory function provided for backwards compatibility.Do not use in new code. Instead, import MkdirLockFile from thelockfile.mkdirlockfile module."""from . import mkdirlockfilereturn _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile",*args, **kwds)def SQLiteFileLock(*args, **kwds):"""Factory function provided for backwards compatibility.Do not use in new code. Instead, import SQLiteLockFile from thelockfile.mkdirlockfile module."""from . import sqlitelockfilereturn _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile",*args, **kwds)def locked(path, timeout=None):"""Decorator which enables locks for decorated function.Arguments:- path: path for lockfile.- timeout (optional): Timeout for acquiring lock.Usage:@locked('/var/run/myname', timeout=0)def myname(...):..."""def decor(func):@functools.wraps(func)def wrapper(*args, **kwargs):lock = FileLock(path, timeout=timeout)lock.acquire()try:return func(*args, **kwargs)finally:lock.release()return wrapperreturn decorif hasattr(os, "link"):from . import linklockfile as _llfLockFile = _llf.LinkLockFileelse:from . import mkdirlockfile as _mlfLockFile = _mlf.MkdirLockFileFileLock = LockFile
# Copyright 2007 Google Inc.# Licensed to PSF under a Contributor Agreement."""A fast, lightweight IPv4/IPv6 manipulation library in Python.This library is used to create/poke/manipulate IPv4 and IPv6 addressesand networks."""from __future__ import unicode_literalsimport itertoolsimport struct__version__ = '1.0.17'# Compatibility functions_compat_int_types = (int,)try:_compat_int_types = (int, long)except NameError:passtry:_compat_str = unicodeexcept NameError:_compat_str = strassert bytes != strif b'\0'[0] == 0: # Python 3 semanticsdef _compat_bytes_to_byte_vals(byt):return bytelse:def _compat_bytes_to_byte_vals(byt):return [struct.unpack(b'!B', b)[0] for b in byt]try:_compat_int_from_byte_vals = int.from_bytesexcept AttributeError:def _compat_int_from_byte_vals(bytvals, endianess):assert endianess == 'big'res = 0for bv in bytvals:assert isinstance(bv, _compat_int_types)res = (res << 8) + bvreturn resdef _compat_to_bytes(intval, length, endianess):assert isinstance(intval, _compat_int_types)assert endianess == 'big'if length == 4:if intval < 0 or intval >= 2 ** 32:raise struct.error("integer out of range for 'I' format code")return struct.pack(b'!I', intval)elif length == 16:if intval < 0 or intval >= 2 ** 128:raise struct.error("integer out of range for 'QQ' format code")return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff)else:raise NotImplementedError()if hasattr(int, 'bit_length'):# Not int.bit_length , since that won't work in 2.7 where long existsdef _compat_bit_length(i):return i.bit_length()else:def _compat_bit_length(i):for res in itertools.count():if i >> res == 0:return resdef _compat_range(start, end, step=1):assert step > 0i = startwhile i < end:yield ii += stepclass _TotalOrderingMixin(object):__slots__ = ()# Helper that derives the other comparison operations from# __lt__ and __eq__# We avoid functools.total_ordering because it doesn't handle# NotImplemented correctly yet (http://bugs.python.org/issue10042)def __eq__(self, other):raise NotImplementedErrordef __ne__(self, other):equal = self.__eq__(other)if equal is NotImplemented:return NotImplementedreturn not equaldef __lt__(self, other):raise NotImplementedErrordef __le__(self, other):less = self.__lt__(other)if less is NotImplemented or not less:return self.__eq__(other)return lessdef __gt__(self, other):less = self.__lt__(other)if less is NotImplemented:return NotImplementedequal = self.__eq__(other)if equal is NotImplemented:return NotImplementedreturn not (less or equal)def __ge__(self, other):less = self.__lt__(other)if less is NotImplemented:return NotImplementedreturn not lessIPV4LENGTH = 32IPV6LENGTH = 128class AddressValueError(ValueError):"""A Value Error related to the address."""class NetmaskValueError(ValueError):"""A Value Error related to the netmask."""def ip_address(address):"""Take an IP string/int and return an object of the correct type.Args:address: A string or integer, the IP address. Either IPv4 orIPv6 addresses may be supplied; integers less than 2**32 willbe considered to be IPv4 by default.Returns:An IPv4Address or IPv6Address object.Raises:ValueError: if the *address* passed isn't either a v4 or a v6address"""try:return IPv4Address(address)except (AddressValueError, NetmaskValueError):passtry:return IPv6Address(address)except (AddressValueError, NetmaskValueError):passif isinstance(address, bytes):raise AddressValueError('%r does not appear to be an IPv4 or IPv6 address. ''Did you pass in a bytes (str in Python 2) instead of'' a unicode object?' % address)raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %address)def ip_network(address, strict=True):"""Take an IP string/int and return an object of the correct type.Args:address: A string or integer, the IP network. Either IPv4 orIPv6 networks may be supplied; integers less than 2**32 willbe considered to be IPv4 by default.Returns:An IPv4Network or IPv6Network object.Raises:ValueError: if the string passed isn't either a v4 or a v6address. Or if the network has host bits set."""try:return IPv4Network(address, strict)except (AddressValueError, NetmaskValueError):passtry:return IPv6Network(address, strict)except (AddressValueError, NetmaskValueError):passif isinstance(address, bytes):raise AddressValueError('%r does not appear to be an IPv4 or IPv6 network. ''Did you pass in a bytes (str in Python 2) instead of'' a unicode object?' % address)raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %address)def ip_interface(address):"""Take an IP string/int and return an object of the correct type.Args:address: A string or integer, the IP address. Either IPv4 orIPv6 addresses may be supplied; integers less than 2**32 willbe considered to be IPv4 by default.Returns:An IPv4Interface or IPv6Interface object.Raises:ValueError: if the string passed isn't either a v4 or a v6address.Notes:The IPv?Interface classes describe an Address on a particularNetwork, so they're basically a combination of both the Addressand Network classes."""try:return IPv4Interface(address)except (AddressValueError, NetmaskValueError):passtry:return IPv6Interface(address)except (AddressValueError, NetmaskValueError):passraise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %address)def v4_int_to_packed(address):"""Represent an address as 4 packed bytes in network (big-endian) order.Args:address: An integer representation of an IPv4 IP address.Returns:The integer address packed as 4 bytes in network (big-endian) order.Raises:ValueError: If the integer is negative or too large to be anIPv4 IP address."""try:return _compat_to_bytes(address, 4, 'big')except (struct.error, OverflowError):raise ValueError("Address negative or too large for IPv4")def v6_int_to_packed(address):"""Represent an address as 16 packed bytes in network (big-endian) order.Args:address: An integer representation of an IPv6 IP address.Returns:The integer address packed as 16 bytes in network (big-endian) order."""try:return _compat_to_bytes(address, 16, 'big')except (struct.error, OverflowError):raise ValueError("Address negative or too large for IPv6")def _split_optional_netmask(address):"""Helper to split the netmask and raise AddressValueError if needed"""addr = _compat_str(address).split('/')if len(addr) > 2:raise AddressValueError("Only one '/' permitted in %r" % address)return addrdef _find_address_range(addresses):"""Find a sequence of sorted deduplicated IPv#Address.Args:addresses: a list of IPv#Address objects.Yields:A tuple containing the first and last IP addresses in the sequence."""it = iter(addresses)first = last = next(it)for ip in it:if ip._ip != last._ip + 1:yield first, lastfirst = iplast = ipyield first, lastdef _count_righthand_zero_bits(number, bits):"""Count the number of zero bits on the right hand side.Args:number: an integer.bits: maximum number of bits to count.Returns:The number of zero bits on the right hand side of the number."""if number == 0:return bitsreturn min(bits, _compat_bit_length(~number & (number - 1)))def summarize_address_range(first, last):"""Summarize a network range given the first and last IP addresses.Example:>>> list(summarize_address_range(IPv4Address('192.0.2.0'),... IPv4Address('192.0.2.130')))... #doctest: +NORMALIZE_WHITESPACE[IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),IPv4Network('192.0.2.130/32')]Args:first: the first IPv4Address or IPv6Address in the range.last: the last IPv4Address or IPv6Address in the range.Returns:An iterator of the summarized IPv(4|6) network objects.Raise:TypeError:If the first and last objects are not IP addresses.If the first and last objects are not the same version.ValueError:If the last object is not greater than the first.If the version of the first address is not 4 or 6."""if (not (isinstance(first, _BaseAddress) andisinstance(last, _BaseAddress))):raise TypeError('first and last must be IP addresses, not networks')if first.version != last.version:raise TypeError("%s and %s are not of the same version" % (first, last))if first > last:raise ValueError('last IP address must be greater than first')if first.version == 4:ip = IPv4Networkelif first.version == 6:ip = IPv6Networkelse:raise ValueError('unknown IP version')ip_bits = first._max_prefixlenfirst_int = first._iplast_int = last._ipwhile first_int <= last_int:nbits = min(_count_righthand_zero_bits(first_int, ip_bits),_compat_bit_length(last_int - first_int + 1) - 1)net = ip((first_int, ip_bits - nbits))yield netfirst_int += 1 << nbitsif first_int - 1 == ip._ALL_ONES:breakdef _collapse_addresses_internal(addresses):"""Loops through the addresses, collapsing concurrent netblocks.Example:ip1 = IPv4Network('192.0.2.0/26')ip2 = IPv4Network('192.0.2.64/26')ip3 = IPv4Network('192.0.2.128/26')ip4 = IPv4Network('192.0.2.192/26')_collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->[IPv4Network('192.0.2.0/24')]This shouldn't be called directly; it is called viacollapse_addresses([]).Args:addresses: A list of IPv4Network's or IPv6Network'sReturns:A list of IPv4Network's or IPv6Network's depending on what we werepassed."""# First mergeto_merge = list(addresses)subnets = {}while to_merge:net = to_merge.pop()supernet = net.supernet()existing = subnets.get(supernet)if existing is None:subnets[supernet] = netelif existing != net:# Merge consecutive subnetsdel subnets[supernet]to_merge.append(supernet)# Then iterate over resulting networks, skipping subsumed subnetslast = Nonefor net in sorted(subnets.values()):if last is not None:# Since they are sorted,# last.network_address <= net.network_address is a given.if last.broadcast_address >= net.broadcast_address:continueyield netlast = netdef collapse_addresses(addresses):"""Collapse a list of IP objects.Example:collapse_addresses([IPv4Network('192.0.2.0/25'),IPv4Network('192.0.2.128/25')]) ->[IPv4Network('192.0.2.0/24')]Args:addresses: An iterator of IPv4Network or IPv6Network objects.Returns:An iterator of the collapsed IPv(4|6)Network objects.Raises:TypeError: If passed a list of mixed version objects."""addrs = []ips = []nets = []# split IP addresses and networksfor ip in addresses:if isinstance(ip, _BaseAddress):if ips and ips[-1]._version != ip._version:raise TypeError("%s and %s are not of the same version" % (ip, ips[-1]))ips.append(ip)elif ip._prefixlen == ip._max_prefixlen:if ips and ips[-1]._version != ip._version:raise TypeError("%s and %s are not of the same version" % (ip, ips[-1]))try:ips.append(ip.ip)except AttributeError:ips.append(ip.network_address)else:if nets and nets[-1]._version != ip._version:raise TypeError("%s and %s are not of the same version" % (ip, nets[-1]))nets.append(ip)# sort and dedupips = sorted(set(ips))# find consecutive address ranges in the sorted sequence and summarize themif ips:for first, last in _find_address_range(ips):addrs.extend(summarize_address_range(first, last))return _collapse_addresses_internal(addrs + nets)def get_mixed_type_key(obj):"""Return a key suitable for sorting between networks and addresses.Address and Network objects are not sortable by default; they'refundamentally different so the expressionIPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')doesn't make any sense. There are some times however, where you may wishto have ipaddress sort these for you anyway. If you need to do this, youcan use this function as the key= argument to sorted().Args:obj: either a Network or Address object.Returns:appropriate key."""if isinstance(obj, _BaseNetwork):return obj._get_networks_key()elif isinstance(obj, _BaseAddress):return obj._get_address_key()return NotImplementedclass _IPAddressBase(_TotalOrderingMixin):"""The mother class."""__slots__ = ()@propertydef exploded(self):"""Return the longhand version of the IP address as a string."""return self._explode_shorthand_ip_string()@propertydef compressed(self):"""Return the shorthand version of the IP address as a string."""return _compat_str(self)@propertydef reverse_pointer(self):"""The name of the reverse DNS pointer for the IP address, e.g.:>>> ipaddress.ip_address("127.0.0.1").reverse_pointer'1.0.0.127.in-addr.arpa'>>> ipaddress.ip_address("2001:db8::1").reverse_pointer'1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'"""return self._reverse_pointer()@propertydef version(self):msg = '%200s has no version specified' % (type(self),)raise NotImplementedError(msg)def _check_int_address(self, address):if address < 0:msg = "%d (< 0) is not permitted as an IPv%d address"raise AddressValueError(msg % (address, self._version))if address > self._ALL_ONES:msg = "%d (>= 2**%d) is not permitted as an IPv%d address"raise AddressValueError(msg % (address, self._max_prefixlen,self._version))def _check_packed_address(self, address, expected_len):address_len = len(address)if address_len != expected_len:msg = ('%r (len %d != %d) is not permitted as an IPv%d address. ''Did you pass in a bytes (str in Python 2) instead of'' a unicode object?')raise AddressValueError(msg % (address, address_len,expected_len, self._version))@classmethoddef _ip_int_from_prefix(cls, prefixlen):"""Turn the prefix length into a bitwise netmaskArgs:prefixlen: An integer, the prefix length.Returns:An integer."""return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)@classmethoddef _prefix_from_ip_int(cls, ip_int):"""Return prefix length from the bitwise netmask.Args:ip_int: An integer, the netmask in expanded bitwise formatReturns:An integer, the prefix length.Raises:ValueError: If the input intermingles zeroes & ones"""trailing_zeroes = _count_righthand_zero_bits(ip_int,cls._max_prefixlen)prefixlen = cls._max_prefixlen - trailing_zeroesleading_ones = ip_int >> trailing_zeroesall_ones = (1 << prefixlen) - 1if leading_ones != all_ones:byteslen = cls._max_prefixlen // 8details = _compat_to_bytes(ip_int, byteslen, 'big')msg = 'Netmask pattern %r mixes zeroes & ones'raise ValueError(msg % details)return prefixlen@classmethoddef _report_invalid_netmask(cls, netmask_str):msg = '%r is not a valid netmask' % netmask_strraise NetmaskValueError(msg)@classmethoddef _prefix_from_prefix_string(cls, prefixlen_str):"""Return prefix length from a numeric stringArgs:prefixlen_str: The string to be convertedReturns:An integer, the prefix length.Raises:NetmaskValueError: If the input is not a valid netmask"""# int allows a leading +/- as well as surrounding whitespace,# so we ensure that isn't the caseif not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):cls._report_invalid_netmask(prefixlen_str)try:prefixlen = int(prefixlen_str)except ValueError:cls._report_invalid_netmask(prefixlen_str)if not (0 <= prefixlen <= cls._max_prefixlen):cls._report_invalid_netmask(prefixlen_str)return prefixlen@classmethoddef _prefix_from_ip_string(cls, ip_str):"""Turn a netmask/hostmask string into a prefix lengthArgs:ip_str: The netmask/hostmask to be convertedReturns:An integer, the prefix length.Raises:NetmaskValueError: If the input is not a valid netmask/hostmask"""# Parse the netmask/hostmask like an IP address.try:ip_int = cls._ip_int_from_string(ip_str)except AddressValueError:cls._report_invalid_netmask(ip_str)# Try matching a netmask (this would be /1*0*/ as a bitwise regexp).# Note that the two ambiguous cases (all-ones and all-zeroes) are# treated as netmasks.try:return cls._prefix_from_ip_int(ip_int)except ValueError:pass# Invert the bits, and try matching a /0+1+/ hostmask instead.ip_int ^= cls._ALL_ONEStry:return cls._prefix_from_ip_int(ip_int)except ValueError:cls._report_invalid_netmask(ip_str)def __reduce__(self):return self.__class__, (_compat_str(self),)class _BaseAddress(_IPAddressBase):"""A generic IP object.This IP class contains the version independent methods which areused by single IP addresses."""__slots__ = ()def __int__(self):return self._ipdef __eq__(self, other):try:return (self._ip == other._ip andself._version == other._version)except AttributeError:return NotImplementeddef __lt__(self, other):if not isinstance(other, _IPAddressBase):return NotImplementedif not isinstance(other, _BaseAddress):raise TypeError('%s and %s are not of the same type' % (self, other))if self._version != other._version:raise TypeError('%s and %s are not of the same version' % (self, other))if self._ip != other._ip:return self._ip < other._ipreturn False# Shorthand for Integer addition and subtraction. This is not# meant to ever support addition/subtraction of addresses.def __add__(self, other):if not isinstance(other, _compat_int_types):return NotImplementedreturn self.__class__(int(self) + other)def __sub__(self, other):if not isinstance(other, _compat_int_types):return NotImplementedreturn self.__class__(int(self) - other)def __repr__(self):return '%s(%r)' % (self.__class__.__name__, _compat_str(self))def __str__(self):return _compat_str(self._string_from_ip_int(self._ip))def __hash__(self):return hash(hex(int(self._ip)))def _get_address_key(self):return (self._version, self)def __reduce__(self):return self.__class__, (self._ip,)class _BaseNetwork(_IPAddressBase):"""A generic IP network object.This IP class contains the version independent methods which areused by networks."""def __init__(self, address):self._cache = {}def __repr__(self):return '%s(%r)' % (self.__class__.__name__, _compat_str(self))def __str__(self):return '%s/%d' % (self.network_address, self.prefixlen)def hosts(self):"""Generate Iterator over usable hosts in a network.This is like __iter__ except it doesn't return the networkor broadcast addresses."""network = int(self.network_address)broadcast = int(self.broadcast_address)for x in _compat_range(network + 1, broadcast):yield self._address_class(x)def __iter__(self):network = int(self.network_address)broadcast = int(self.broadcast_address)for x in _compat_range(network, broadcast + 1):yield self._address_class(x)def __getitem__(self, n):network = int(self.network_address)broadcast = int(self.broadcast_address)if n >= 0:if network + n > broadcast:raise IndexError('address out of range')return self._address_class(network + n)else:n += 1if broadcast + n < network:raise IndexError('address out of range')return self._address_class(broadcast + n)def __lt__(self, other):if not isinstance(other, _IPAddressBase):return NotImplementedif not isinstance(other, _BaseNetwork):raise TypeError('%s and %s are not of the same type' % (self, other))if self._version != other._version:raise TypeError('%s and %s are not of the same version' % (self, other))if self.network_address != other.network_address:return self.network_address < other.network_addressif self.netmask != other.netmask:return self.netmask < other.netmaskreturn Falsedef __eq__(self, other):try:return (self._version == other._version andself.network_address == other.network_address andint(self.netmask) == int(other.netmask))except AttributeError:return NotImplementeddef __hash__(self):return hash(int(self.network_address) ^ int(self.netmask))def __contains__(self, other):# always false if one is v4 and the other is v6.if self._version != other._version:return False# dealing with another network.if isinstance(other, _BaseNetwork):return False# dealing with another addresselse:# addressreturn (int(self.network_address) <= int(other._ip) <=int(self.broadcast_address))def overlaps(self, other):"""Tell if self is partly contained in other."""return self.network_address in other or (self.broadcast_address in other or (other.network_address in self or (other.broadcast_address in self)))@propertydef broadcast_address(self):x = self._cache.get('broadcast_address')if x is None:x = self._address_class(int(self.network_address) |int(self.hostmask))self._cache['broadcast_address'] = xreturn x@propertydef hostmask(self):x = self._cache.get('hostmask')if x is None:x = self._address_class(int(self.netmask) ^ self._ALL_ONES)self._cache['hostmask'] = xreturn x@propertydef with_prefixlen(self):return '%s/%d' % (self.network_address, self._prefixlen)@propertydef with_netmask(self):return '%s/%s' % (self.network_address, self.netmask)@propertydef with_hostmask(self):return '%s/%s' % (self.network_address, self.hostmask)@propertydef num_addresses(self):"""Number of hosts in the current subnet."""return int(self.broadcast_address) - int(self.network_address) + 1@propertydef _address_class(self):# Returning bare address objects (rather than interfaces) allows for# more consistent behaviour across the network address, broadcast# address and individual host addresses.msg = '%200s has no associated address class' % (type(self),)raise NotImplementedError(msg)@propertydef prefixlen(self):return self._prefixlendef address_exclude(self, other):"""Remove an address from a larger block.For example:addr1 = ip_network('192.0.2.0/28')addr2 = ip_network('192.0.2.1/32')list(addr1.address_exclude(addr2)) =[IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]or IPv6:addr1 = ip_network('2001:db8::1/32')addr2 = ip_network('2001:db8::1/128')list(addr1.address_exclude(addr2)) =[ip_network('2001:db8::1/128'),ip_network('2001:db8::2/127'),ip_network('2001:db8::4/126'),ip_network('2001:db8::8/125'),...ip_network('2001:db8:8000::/33')]Args:other: An IPv4Network or IPv6Network object of the same type.Returns:An iterator of the IPv(4|6)Network objects which is selfminus other.Raises:TypeError: If self and other are of differing addressversions, or if other is not a network object.ValueError: If other is not completely contained by self."""if not self._version == other._version:raise TypeError("%s and %s are not of the same version" % (self, other))if not isinstance(other, _BaseNetwork):raise TypeError("%s is not a network object" % other)if not other.subnet_of(self):raise ValueError('%s not contained in %s' % (other, self))if other == self:return# Make sure we're comparing the network of other.other = other.__class__('%s/%s' % (other.network_address,other.prefixlen))s1, s2 = self.subnets()while s1 != other and s2 != other:if other.subnet_of(s1):yield s2s1, s2 = s1.subnets()elif other.subnet_of(s2):yield s1s1, s2 = s2.subnets()else:# If we got here, there's a bug somewhere.raise AssertionError('Error performing exclusion: ''s1: %s s2: %s other: %s' %(s1, s2, other))if s1 == other:yield s2elif s2 == other:yield s1else:# If we got here, there's a bug somewhere.raise AssertionError('Error performing exclusion: ''s1: %s s2: %s other: %s' %(s1, s2, other))def compare_networks(self, other):"""Compare two IP objects.This is only concerned about the comparison of the integerrepresentation of the network addresses. This means that thehost bits aren't considered at all in this method. If you wantto compare host bits, you can easily enough do a'HostA._ip < HostB._ip'Args:other: An IP object.Returns:If the IP versions of self and other are the same, returns:-1 if self < other:eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')IPv6Network('2001:db8::1000/124') <IPv6Network('2001:db8::2000/124')0 if self == othereg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')IPv6Network('2001:db8::1000/124') ==IPv6Network('2001:db8::1000/124')1 if self > othereg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')IPv6Network('2001:db8::2000/124') >IPv6Network('2001:db8::1000/124')Raises:TypeError if the IP versions are different."""# does this need to raise a ValueError?if self._version != other._version:raise TypeError('%s and %s are not of the same type' % (self, other))# self._version == other._version below here:if self.network_address < other.network_address:return -1if self.network_address > other.network_address:return 1# self.network_address == other.network_address below here:if self.netmask < other.netmask:return -1if self.netmask > other.netmask:return 1return 0def _get_networks_key(self):"""Network-only key function.Returns an object that identifies this address' network andnetmask. This function is a suitable "key" argument for sorted()and list.sort()."""return (self._version, self.network_address, self.netmask)def subnets(self, prefixlen_diff=1, new_prefix=None):"""The subnets which join to make the current subnet.In the case that self contains only one IP(self._prefixlen == 32 for IPv4 or self._prefixlen == 128for IPv6), yield an iterator with just ourself.Args:prefixlen_diff: An integer, the amount the prefix lengthshould be increased by. This should not be set ifnew_prefix is also set.new_prefix: The desired new prefix length. This must be alarger number (smaller prefix) than the existing prefix.This should not be set if prefixlen_diff is also set.Returns:An iterator of IPv(4|6) objects.Raises:ValueError: The prefixlen_diff is too small or too large.ORprefixlen_diff and new_prefix are both set or new_prefixis a smaller number than the current prefix (smallernumber means a larger network)"""if self._prefixlen == self._max_prefixlen:yield selfreturnif new_prefix is not None:if new_prefix < self._prefixlen:raise ValueError('new prefix must be longer')if prefixlen_diff != 1:raise ValueError('cannot set prefixlen_diff and new_prefix')prefixlen_diff = new_prefix - self._prefixlenif prefixlen_diff < 0:raise ValueError('prefix length diff must be > 0')new_prefixlen = self._prefixlen + prefixlen_diffif new_prefixlen > self._max_prefixlen:raise ValueError('prefix length diff %d is invalid for netblock %s' % (new_prefixlen, self))start = int(self.network_address)end = int(self.broadcast_address) + 1step = (int(self.hostmask) + 1) >> prefixlen_difffor new_addr in _compat_range(start, end, step):current = self.__class__((new_addr, new_prefixlen))yield currentdef supernet(self, prefixlen_diff=1, new_prefix=None):"""The supernet containing the current network.Args:prefixlen_diff: An integer, the amount the prefix length ofthe network should be decreased by. For example, given a/24 network and a prefixlen_diff of 3, a supernet with a/21 netmask is returned.Returns:An IPv4 network object.Raises:ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you havea negative prefix length.ORIf prefixlen_diff and new_prefix are both set or new_prefix is alarger number than the current prefix (larger number means asmaller network)"""if self._prefixlen == 0:return selfif new_prefix is not None:if new_prefix > self._prefixlen:raise ValueError('new prefix must be shorter')if prefixlen_diff != 1:raise ValueError('cannot set prefixlen_diff and new_prefix')prefixlen_diff = self._prefixlen - new_prefixnew_prefixlen = self.prefixlen - prefixlen_diffif new_prefixlen < 0:raise ValueError('current prefixlen is %d, cannot have a prefixlen_diff of %d' %(self.prefixlen, prefixlen_diff))return self.__class__((int(self.network_address) & (int(self.netmask) << prefixlen_diff),new_prefixlen))@propertydef is_multicast(self):"""Test if the address is reserved for multicast use.Returns:A boolean, True if the address is a multicast address.See RFC 2373 2.7 for details."""return (self.network_address.is_multicast andself.broadcast_address.is_multicast)def subnet_of(self, other):# always false if one is v4 and the other is v6.if self._version != other._version:return False# dealing with another network.if (hasattr(other, 'network_address') andhasattr(other, 'broadcast_address')):return (other.network_address <= self.network_address andother.broadcast_address >= self.broadcast_address)# dealing with another addresselse:raise TypeError('Unable to test subnet containment with element ''of type %s' % type(other))def supernet_of(self, other):# always false if one is v4 and the other is v6.if self._version != other._version:return False# dealing with another network.if (hasattr(other, 'network_address') andhasattr(other, 'broadcast_address')):return (other.network_address >= self.network_address andother.broadcast_address <= self.broadcast_address)# dealing with another addresselse:raise TypeError('Unable to test subnet containment with element ''of type %s' % type(other))@propertydef is_reserved(self):"""Test if the address is otherwise IETF reserved.Returns:A boolean, True if the address is within one of thereserved IPv6 Network ranges."""return (self.network_address.is_reserved andself.broadcast_address.is_reserved)@propertydef is_link_local(self):"""Test if the address is reserved for link-local.Returns:A boolean, True if the address is reserved per RFC 4291."""return (self.network_address.is_link_local andself.broadcast_address.is_link_local)@propertydef is_private(self):"""Test if this address is allocated for private networks.Returns:A boolean, True if the address is reserved periana-ipv4-special-registry or iana-ipv6-special-registry."""return (self.network_address.is_private andself.broadcast_address.is_private)@propertydef is_global(self):"""Test if this address is allocated for public networks.Returns:A boolean, True if the address is not reserved periana-ipv4-special-registry or iana-ipv6-special-registry."""return not self.is_private@propertydef is_unspecified(self):"""Test if the address is unspecified.Returns:A boolean, True if this is the unspecified address as defined inRFC 2373 2.5.2."""return (self.network_address.is_unspecified andself.broadcast_address.is_unspecified)@propertydef is_loopback(self):"""Test if the address is a loopback address.Returns:A boolean, True if the address is a loopback address as defined inRFC 2373 2.5.3."""return (self.network_address.is_loopback andself.broadcast_address.is_loopback)class _BaseV4(object):"""Base IPv4 object.The following methods are used by IPv4 objects in both single IPaddresses and networks."""__slots__ = ()_version = 4# Equivalent to 255.255.255.255 or 32 bits of 1's._ALL_ONES = (2 ** IPV4LENGTH) - 1_DECIMAL_DIGITS = frozenset('0123456789')# the valid octets for host and netmasks. only useful for IPv4._valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0])_max_prefixlen = IPV4LENGTH# There are only a handful of valid v4 netmasks, so we cache them all# when constructed (see _make_netmask())._netmask_cache = {}def _explode_shorthand_ip_string(self):return _compat_str(self)@classmethoddef _make_netmask(cls, arg):"""Make a (netmask, prefix_len) tuple from the given argument.Argument can be:- an integer (the prefix length)- a string representing the prefix length (e.g. "24")- a string representing the prefix netmask (e.g. "255.255.255.0")"""if arg not in cls._netmask_cache:if isinstance(arg, _compat_int_types):prefixlen = argelse:try:# Check for a netmask in prefix length formprefixlen = cls._prefix_from_prefix_string(arg)except NetmaskValueError:# Check for a netmask or hostmask in dotted-quad form.# This may raise NetmaskValueError.prefixlen = cls._prefix_from_ip_string(arg)netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))cls._netmask_cache[arg] = netmask, prefixlenreturn cls._netmask_cache[arg]@classmethoddef _ip_int_from_string(cls, ip_str):"""Turn the given IP string into an integer for comparison.Args:ip_str: A string, the IP ip_str.Returns:The IP ip_str as an integer.Raises:AddressValueError: if ip_str isn't a valid IPv4 Address."""if not ip_str:raise AddressValueError('Address cannot be empty')octets = ip_str.split('.')if len(octets) != 4:raise AddressValueError("Expected 4 octets in %r" % ip_str)try:return _compat_int_from_byte_vals(map(cls._parse_octet, octets), 'big')except ValueError as exc:raise AddressValueError("%s in %r" % (exc, ip_str))@classmethoddef _parse_octet(cls, octet_str):"""Convert a decimal octet into an integer.Args:octet_str: A string, the number to parse.Returns:The octet as an integer.Raises:ValueError: if the octet isn't strictly a decimal from [0..255]."""if not octet_str:raise ValueError("Empty octet not permitted")# Whitelist the characters, since int() allows a lot of bizarre stuff.if not cls._DECIMAL_DIGITS.issuperset(octet_str):msg = "Only decimal digits permitted in %r"raise ValueError(msg % octet_str)# We do the length check second, since the invalid character error# is likely to be more informative for the userif len(octet_str) > 3:msg = "At most 3 characters permitted in %r"raise ValueError(msg % octet_str)# Convert to integer (we know digits are legal)octet_int = int(octet_str, 10)# Any octets that look like they *might* be written in octal,# and which don't look exactly the same in both octal and# decimal are rejected as ambiguousif octet_int > 7 and octet_str[0] == '0':msg = "Ambiguous (octal/decimal) value in %r not permitted"raise ValueError(msg % octet_str)if octet_int > 255:raise ValueError("Octet %d (> 255) not permitted" % octet_int)return octet_int@classmethoddef _string_from_ip_int(cls, ip_int):"""Turns a 32-bit integer into dotted decimal notation.Args:ip_int: An integer, the IP address.Returns:The IP address as a string in dotted decimal notation."""return '.'.join(_compat_str(struct.unpack(b'!B', b)[0]if isinstance(b, bytes)else b)for b in _compat_to_bytes(ip_int, 4, 'big'))def _is_hostmask(self, ip_str):"""Test if the IP string is a hostmask (rather than a netmask).Args:ip_str: A string, the potential hostmask.Returns:A boolean, True if the IP string is a hostmask."""bits = ip_str.split('.')try:parts = [x for x in map(int, bits) if x in self._valid_mask_octets]except ValueError:return Falseif len(parts) != len(bits):return Falseif parts[0] < parts[-1]:return Truereturn Falsedef _reverse_pointer(self):"""Return the reverse DNS pointer name for the IPv4 address.This implements the method described in RFC1035 3.5."""reverse_octets = _compat_str(self).split('.')[::-1]return '.'.join(reverse_octets) + '.in-addr.arpa'@propertydef max_prefixlen(self):return self._max_prefixlen@propertydef version(self):return self._versionclass IPv4Address(_BaseV4, _BaseAddress):"""Represent and manipulate single IPv4 Addresses."""__slots__ = ('_ip', '__weakref__')def __init__(self, address):"""Args:address: A string or integer representing the IPAdditionally, an integer can be passed, soIPv4Address('192.0.2.1') == IPv4Address(3221225985).or, more generallyIPv4Address(int(IPv4Address('192.0.2.1'))) ==IPv4Address('192.0.2.1')Raises:AddressValueError: If ipaddress isn't a valid IPv4 address."""# Efficient constructor from integer.if isinstance(address, _compat_int_types):self._check_int_address(address)self._ip = addressreturn# Constructing from a packed addressif isinstance(address, bytes):self._check_packed_address(address, 4)bvs = _compat_bytes_to_byte_vals(address)self._ip = _compat_int_from_byte_vals(bvs, 'big')return# Assume input argument to be string or any object representation# which converts into a formatted IP string.addr_str = _compat_str(address)if '/' in addr_str:raise AddressValueError("Unexpected '/' in %r" % address)self._ip = self._ip_int_from_string(addr_str)@propertydef packed(self):"""The binary representation of this address."""return v4_int_to_packed(self._ip)@propertydef is_reserved(self):"""Test if the address is otherwise IETF reserved.Returns:A boolean, True if the address is within thereserved IPv4 Network range."""return self in self._constants._reserved_network@propertydef is_private(self):"""Test if this address is allocated for private networks.Returns:A boolean, True if the address is reserved periana-ipv4-special-registry."""return any(self in net for net in self._constants._private_networks)@propertydef is_global(self):return (self not in self._constants._public_network andnot self.is_private)@propertydef is_multicast(self):"""Test if the address is reserved for multicast use.Returns:A boolean, True if the address is multicast.See RFC 3171 for details."""return self in self._constants._multicast_network@propertydef is_unspecified(self):"""Test if the address is unspecified.Returns:A boolean, True if this is the unspecified address as defined inRFC 5735 3."""return self == self._constants._unspecified_address@propertydef is_loopback(self):"""Test if the address is a loopback address.Returns:A boolean, True if the address is a loopback per RFC 3330."""return self in self._constants._loopback_network@propertydef is_link_local(self):"""Test if the address is reserved for link-local.Returns:A boolean, True if the address is link-local per RFC 3927."""return self in self._constants._linklocal_networkclass IPv4Interface(IPv4Address):def __init__(self, address):if isinstance(address, (bytes, _compat_int_types)):IPv4Address.__init__(self, address)self.network = IPv4Network(self._ip)self._prefixlen = self._max_prefixlenreturnif isinstance(address, tuple):IPv4Address.__init__(self, address[0])if len(address) > 1:self._prefixlen = int(address[1])else:self._prefixlen = self._max_prefixlenself.network = IPv4Network(address, strict=False)self.netmask = self.network.netmaskself.hostmask = self.network.hostmaskreturnaddr = _split_optional_netmask(address)IPv4Address.__init__(self, addr[0])self.network = IPv4Network(address, strict=False)self._prefixlen = self.network._prefixlenself.netmask = self.network.netmaskself.hostmask = self.network.hostmaskdef __str__(self):return '%s/%d' % (self._string_from_ip_int(self._ip),self.network.prefixlen)def __eq__(self, other):address_equal = IPv4Address.__eq__(self, other)if not address_equal or address_equal is NotImplemented:return address_equaltry:return self.network == other.networkexcept AttributeError:# An interface with an associated network is NOT the# same as an unassociated address. That's why the hash# takes the extra info into account.return Falsedef __lt__(self, other):address_less = IPv4Address.__lt__(self, other)if address_less is NotImplemented:return NotImplementedtry:return self.network < other.networkexcept AttributeError:# We *do* allow addresses and interfaces to be sorted. The# unassociated address is considered less than all interfaces.return Falsedef __hash__(self):return self._ip ^ self._prefixlen ^ int(self.network.network_address)__reduce__ = _IPAddressBase.__reduce__@propertydef ip(self):return IPv4Address(self._ip)@propertydef with_prefixlen(self):return '%s/%s' % (self._string_from_ip_int(self._ip),self._prefixlen)@propertydef with_netmask(self):return '%s/%s' % (self._string_from_ip_int(self._ip),self.netmask)@propertydef with_hostmask(self):return '%s/%s' % (self._string_from_ip_int(self._ip),self.hostmask)class IPv4Network(_BaseV4, _BaseNetwork):"""This class represents and manipulates 32-bit IPv4 network + addresses..Attributes: [examples for IPv4Network('192.0.2.0/27')].network_address: IPv4Address('192.0.2.0').hostmask: IPv4Address('0.0.0.31').broadcast_address: IPv4Address('192.0.2.32').netmask: IPv4Address('255.255.255.224').prefixlen: 27"""# Class to use when creating address objects_address_class = IPv4Addressdef __init__(self, address, strict=True):"""Instantiate a new IPv4 network object.Args:address: A string or integer representing the IP [& network].'192.0.2.0/24''192.0.2.0/255.255.255.0''192.0.0.2/0.0.0.255'are all functionally the same in IPv4. Similarly,'192.0.2.1''192.0.2.1/255.255.255.255''192.0.2.1/32'are also functionally equivalent. That is to say, failing toprovide a subnetmask will create an object with a mask of /32.If the mask (portion after the / in the argument) is given indotted quad form, it is treated as a netmask if it starts with anon-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if itstarts with a zero field (e.g. 0.255.255.255 == /8), with thesingle exception of an all-zero mask which is treated as anetmask == /0. If no mask is given, a default of /32 is used.Additionally, an integer can be passed, soIPv4Network('192.0.2.1') == IPv4Network(3221225985)or, more generallyIPv4Interface(int(IPv4Interface('192.0.2.1'))) ==IPv4Interface('192.0.2.1')Raises:AddressValueError: If ipaddress isn't a valid IPv4 address.NetmaskValueError: If the netmask isn't valid foran IPv4 address.ValueError: If strict is True and a network address is notsupplied."""_BaseNetwork.__init__(self, address)# Constructing from a packed address or integerif isinstance(address, (_compat_int_types, bytes)):self.network_address = IPv4Address(address)self.netmask, self._prefixlen = self._make_netmask(self._max_prefixlen)# fixme: address/network test here.returnif isinstance(address, tuple):if len(address) > 1:arg = address[1]else:# We weren't given an address[1]arg = self._max_prefixlenself.network_address = IPv4Address(address[0])self.netmask, self._prefixlen = self._make_netmask(arg)packed = int(self.network_address)if packed & int(self.netmask) != packed:if strict:raise ValueError('%s has host bits set' % self)else:self.network_address = IPv4Address(packed &int(self.netmask))return# Assume input argument to be string or any object representation# which converts into a formatted IP prefix string.addr = _split_optional_netmask(address)self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))if len(addr) == 2:arg = addr[1]else:arg = self._max_prefixlenself.netmask, self._prefixlen = self._make_netmask(arg)if strict:if (IPv4Address(int(self.network_address) & int(self.netmask)) !=self.network_address):raise ValueError('%s has host bits set' % self)self.network_address = IPv4Address(int(self.network_address) &int(self.netmask))if self._prefixlen == (self._max_prefixlen - 1):self.hosts = self.__iter__@propertydef is_global(self):"""Test if this address is allocated for public networks.Returns:A boolean, True if the address is not reserved periana-ipv4-special-registry."""return (not (self.network_address in IPv4Network('100.64.0.0/10') andself.broadcast_address in IPv4Network('100.64.0.0/10')) andnot self.is_private)class _IPv4Constants(object):_linklocal_network = IPv4Network('169.254.0.0/16')_loopback_network = IPv4Network('127.0.0.0/8')_multicast_network = IPv4Network('224.0.0.0/4')_public_network = IPv4Network('100.64.0.0/10')_private_networks = [IPv4Network('0.0.0.0/8'),IPv4Network('10.0.0.0/8'),IPv4Network('127.0.0.0/8'),IPv4Network('169.254.0.0/16'),IPv4Network('172.16.0.0/12'),IPv4Network('192.0.0.0/29'),IPv4Network('192.0.0.170/31'),IPv4Network('192.0.2.0/24'),IPv4Network('192.168.0.0/16'),IPv4Network('198.18.0.0/15'),IPv4Network('198.51.100.0/24'),IPv4Network('203.0.113.0/24'),IPv4Network('240.0.0.0/4'),IPv4Network('255.255.255.255/32'),]_reserved_network = IPv4Network('240.0.0.0/4')_unspecified_address = IPv4Address('0.0.0.0')IPv4Address._constants = _IPv4Constantsclass _BaseV6(object):"""Base IPv6 object.The following methods are used by IPv6 objects in both single IPaddresses and networks."""__slots__ = ()_version = 6_ALL_ONES = (2 ** IPV6LENGTH) - 1_HEXTET_COUNT = 8_HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')_max_prefixlen = IPV6LENGTH# There are only a bunch of valid v6 netmasks, so we cache them all# when constructed (see _make_netmask())._netmask_cache = {}@classmethoddef _make_netmask(cls, arg):"""Make a (netmask, prefix_len) tuple from the given argument.Argument can be:- an integer (the prefix length)- a string representing the prefix length (e.g. "24")- a string representing the prefix netmask (e.g. "255.255.255.0")"""if arg not in cls._netmask_cache:if isinstance(arg, _compat_int_types):prefixlen = argelse:prefixlen = cls._prefix_from_prefix_string(arg)netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))cls._netmask_cache[arg] = netmask, prefixlenreturn cls._netmask_cache[arg]@classmethoddef _ip_int_from_string(cls, ip_str):"""Turn an IPv6 ip_str into an integer.Args:ip_str: A string, the IPv6 ip_str.Returns:An int, the IPv6 addressRaises:AddressValueError: if ip_str isn't a valid IPv6 Address."""if not ip_str:raise AddressValueError('Address cannot be empty')parts = ip_str.split(':')# An IPv6 address needs at least 2 colons (3 parts)._min_parts = 3if len(parts) < _min_parts:msg = "At least %d parts expected in %r" % (_min_parts, ip_str)raise AddressValueError(msg)# If the address has an IPv4-style suffix, convert it to hexadecimal.if '.' in parts[-1]:try:ipv4_int = IPv4Address(parts.pop())._ipexcept AddressValueError as exc:raise AddressValueError("%s in %r" % (exc, ip_str))parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))parts.append('%x' % (ipv4_int & 0xFFFF))# An IPv6 address can't have more than 8 colons (9 parts).# The extra colon comes from using the "::" notation for a single# leading or trailing zero part._max_parts = cls._HEXTET_COUNT + 1if len(parts) > _max_parts:msg = "At most %d colons permitted in %r" % (_max_parts - 1, ip_str)raise AddressValueError(msg)# Disregarding the endpoints, find '::' with nothing in between.# This indicates that a run of zeroes has been skipped.skip_index = Nonefor i in _compat_range(1, len(parts) - 1):if not parts[i]:if skip_index is not None:# Can't have more than one '::'msg = "At most one '::' permitted in %r" % ip_strraise AddressValueError(msg)skip_index = i# parts_hi is the number of parts to copy from above/before the '::'# parts_lo is the number of parts to copy from below/after the '::'if skip_index is not None:# If we found a '::', then check if it also covers the endpoints.parts_hi = skip_indexparts_lo = len(parts) - skip_index - 1if not parts[0]:parts_hi -= 1if parts_hi:msg = "Leading ':' only permitted as part of '::' in %r"raise AddressValueError(msg % ip_str) # ^: requires ^::if not parts[-1]:parts_lo -= 1if parts_lo:msg = "Trailing ':' only permitted as part of '::' in %r"raise AddressValueError(msg % ip_str) # :$ requires ::$parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)if parts_skipped < 1:msg = "Expected at most %d other parts with '::' in %r"raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str))else:# Otherwise, allocate the entire address to parts_hi. The# endpoints could still be empty, but _parse_hextet() will check# for that.if len(parts) != cls._HEXTET_COUNT:msg = "Exactly %d parts expected without '::' in %r"raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))if not parts[0]:msg = "Leading ':' only permitted as part of '::' in %r"raise AddressValueError(msg % ip_str) # ^: requires ^::if not parts[-1]:msg = "Trailing ':' only permitted as part of '::' in %r"raise AddressValueError(msg % ip_str) # :$ requires ::$parts_hi = len(parts)parts_lo = 0parts_skipped = 0try:# Now, parse the hextets into a 128-bit integer.ip_int = 0for i in range(parts_hi):ip_int <<= 16ip_int |= cls._parse_hextet(parts[i])ip_int <<= 16 * parts_skippedfor i in range(-parts_lo, 0):ip_int <<= 16ip_int |= cls._parse_hextet(parts[i])return ip_intexcept ValueError as exc:raise AddressValueError("%s in %r" % (exc, ip_str))@classmethoddef _parse_hextet(cls, hextet_str):"""Convert an IPv6 hextet string into an integer.Args:hextet_str: A string, the number to parse.Returns:The hextet as an integer.Raises:ValueError: if the input isn't strictly a hex number from[0..FFFF]."""# Whitelist the characters, since int() allows a lot of bizarre stuff.if not cls._HEX_DIGITS.issuperset(hextet_str):raise ValueError("Only hex digits permitted in %r" % hextet_str)# We do the length check second, since the invalid character error# is likely to be more informative for the userif len(hextet_str) > 4:msg = "At most 4 characters permitted in %r"raise ValueError(msg % hextet_str)# Length check means we can skip checking the integer valuereturn int(hextet_str, 16)@classmethoddef _compress_hextets(cls, hextets):"""Compresses a list of hextets.Compresses a list of strings, replacing the longest continuoussequence of "0" in the list with "" and adding empty strings atthe beginning or at the end of the string such that subsequentlycalling ":".join(hextets) will produce the compressed version ofthe IPv6 address.Args:hextets: A list of strings, the hextets to compress.Returns:A list of strings."""best_doublecolon_start = -1best_doublecolon_len = 0doublecolon_start = -1doublecolon_len = 0for index, hextet in enumerate(hextets):if hextet == '0':doublecolon_len += 1if doublecolon_start == -1:# Start of a sequence of zeros.doublecolon_start = indexif doublecolon_len > best_doublecolon_len:# This is the longest sequence of zeros so far.best_doublecolon_len = doublecolon_lenbest_doublecolon_start = doublecolon_startelse:doublecolon_len = 0doublecolon_start = -1if best_doublecolon_len > 1:best_doublecolon_end = (best_doublecolon_start +best_doublecolon_len)# For zeros at the end of the address.if best_doublecolon_end == len(hextets):hextets += ['']hextets[best_doublecolon_start:best_doublecolon_end] = ['']# For zeros at the beginning of the address.if best_doublecolon_start == 0:hextets = [''] + hextetsreturn hextets@classmethoddef _string_from_ip_int(cls, ip_int=None):"""Turns a 128-bit integer into hexadecimal notation.Args:ip_int: An integer, the IP address.Returns:A string, the hexadecimal representation of the address.Raises:ValueError: The address is bigger than 128 bits of all ones."""if ip_int is None:ip_int = int(cls._ip)if ip_int > cls._ALL_ONES:raise ValueError('IPv6 address is too large')hex_str = '%032x' % ip_inthextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)]hextets = cls._compress_hextets(hextets)return ':'.join(hextets)def _explode_shorthand_ip_string(self):"""Expand a shortened IPv6 address.Args:ip_str: A string, the IPv6 address.Returns:A string, the expanded IPv6 address."""if isinstance(self, IPv6Network):ip_str = _compat_str(self.network_address)elif isinstance(self, IPv6Interface):ip_str = _compat_str(self.ip)else:ip_str = _compat_str(self)ip_int = self._ip_int_from_string(ip_str)hex_str = '%032x' % ip_intparts = [hex_str[x:x + 4] for x in range(0, 32, 4)]if isinstance(self, (_BaseNetwork, IPv6Interface)):return '%s/%d' % (':'.join(parts), self._prefixlen)return ':'.join(parts)def _reverse_pointer(self):"""Return the reverse DNS pointer name for the IPv6 address.This implements the method described in RFC3596 2.5."""reverse_chars = self.exploded[::-1].replace(':', '')return '.'.join(reverse_chars) + '.ip6.arpa'@propertydef max_prefixlen(self):return self._max_prefixlen@propertydef version(self):return self._versionclass IPv6Address(_BaseV6, _BaseAddress):"""Represent and manipulate single IPv6 Addresses."""__slots__ = ('_ip', '__weakref__')def __init__(self, address):"""Instantiate a new IPv6 address object.Args:address: A string or integer representing the IPAdditionally, an integer can be passed, soIPv6Address('2001:db8::') ==IPv6Address(42540766411282592856903984951653826560)or, more generallyIPv6Address(int(IPv6Address('2001:db8::'))) ==IPv6Address('2001:db8::')Raises:AddressValueError: If address isn't a valid IPv6 address."""# Efficient constructor from integer.if isinstance(address, _compat_int_types):self._check_int_address(address)self._ip = addressreturn# Constructing from a packed addressif isinstance(address, bytes):self._check_packed_address(address, 16)bvs = _compat_bytes_to_byte_vals(address)self._ip = _compat_int_from_byte_vals(bvs, 'big')return# Assume input argument to be string or any object representation# which converts into a formatted IP string.addr_str = _compat_str(address)if '/' in addr_str:raise AddressValueError("Unexpected '/' in %r" % address)self._ip = self._ip_int_from_string(addr_str)@propertydef packed(self):"""The binary representation of this address."""return v6_int_to_packed(self._ip)@propertydef is_multicast(self):"""Test if the address is reserved for multicast use.Returns:A boolean, True if the address is a multicast address.See RFC 2373 2.7 for details."""return self in self._constants._multicast_network@propertydef is_reserved(self):"""Test if the address is otherwise IETF reserved.Returns:A boolean, True if the address is within one of thereserved IPv6 Network ranges."""return any(self in x for x in self._constants._reserved_networks)@propertydef is_link_local(self):"""Test if the address is reserved for link-local.Returns:A boolean, True if the address is reserved per RFC 4291."""return self in self._constants._linklocal_network@propertydef is_site_local(self):"""Test if the address is reserved for site-local.Note that the site-local address space has been deprecated by RFC 3879.Use is_private to test if this address is in the space of unique localaddresses as defined by RFC 4193.Returns:A boolean, True if the address is reserved per RFC 3513 2.5.6."""return self in self._constants._sitelocal_network@propertydef is_private(self):"""Test if this address is allocated for private networks.Returns:A boolean, True if the address is reserved periana-ipv6-special-registry."""return any(self in net for net in self._constants._private_networks)@propertydef is_global(self):"""Test if this address is allocated for public networks.Returns:A boolean, true if the address is not reserved periana-ipv6-special-registry."""return not self.is_private@propertydef is_unspecified(self):"""Test if the address is unspecified.Returns:A boolean, True if this is the unspecified address as defined inRFC 2373 2.5.2."""return self._ip == 0@propertydef is_loopback(self):"""Test if the address is a loopback address.Returns:A boolean, True if the address is a loopback address as defined inRFC 2373 2.5.3."""return self._ip == 1@propertydef ipv4_mapped(self):"""Return the IPv4 mapped address.Returns:If the IPv6 address is a v4 mapped address, return theIPv4 mapped address. Return None otherwise."""if (self._ip >> 32) != 0xFFFF:return Nonereturn IPv4Address(self._ip & 0xFFFFFFFF)@propertydef teredo(self):"""Tuple of embedded teredo IPs.Returns:Tuple of the (server, client) IPs or None if the addressdoesn't appear to be a teredo address (doesn't start with2001::/32)"""if (self._ip >> 96) != 0x20010000:return Nonereturn (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),IPv4Address(~self._ip & 0xFFFFFFFF))@propertydef sixtofour(self):"""Return the IPv4 6to4 embedded address.Returns:The IPv4 6to4-embedded address if present or None if theaddress doesn't appear to contain a 6to4 embedded address."""if (self._ip >> 112) != 0x2002:return Nonereturn IPv4Address((self._ip >> 80) & 0xFFFFFFFF)class IPv6Interface(IPv6Address):def __init__(self, address):if isinstance(address, (bytes, _compat_int_types)):IPv6Address.__init__(self, address)self.network = IPv6Network(self._ip)self._prefixlen = self._max_prefixlenreturnif isinstance(address, tuple):IPv6Address.__init__(self, address[0])if len(address) > 1:self._prefixlen = int(address[1])else:self._prefixlen = self._max_prefixlenself.network = IPv6Network(address, strict=False)self.netmask = self.network.netmaskself.hostmask = self.network.hostmaskreturnaddr = _split_optional_netmask(address)IPv6Address.__init__(self, addr[0])self.network = IPv6Network(address, strict=False)self.netmask = self.network.netmaskself._prefixlen = self.network._prefixlenself.hostmask = self.network.hostmaskdef __str__(self):return '%s/%d' % (self._string_from_ip_int(self._ip),self.network.prefixlen)def __eq__(self, other):address_equal = IPv6Address.__eq__(self, other)if not address_equal or address_equal is NotImplemented:return address_equaltry:return self.network == other.networkexcept AttributeError:# An interface with an associated network is NOT the# same as an unassociated address. That's why the hash# takes the extra info into account.return Falsedef __lt__(self, other):address_less = IPv6Address.__lt__(self, other)if address_less is NotImplemented:return NotImplementedtry:return self.network < other.networkexcept AttributeError:# We *do* allow addresses and interfaces to be sorted. The# unassociated address is considered less than all interfaces.return Falsedef __hash__(self):return self._ip ^ self._prefixlen ^ int(self.network.network_address)__reduce__ = _IPAddressBase.__reduce__@propertydef ip(self):return IPv6Address(self._ip)@propertydef with_prefixlen(self):return '%s/%s' % (self._string_from_ip_int(self._ip),self._prefixlen)@propertydef with_netmask(self):return '%s/%s' % (self._string_from_ip_int(self._ip),self.netmask)@propertydef with_hostmask(self):return '%s/%s' % (self._string_from_ip_int(self._ip),self.hostmask)@propertydef is_unspecified(self):return self._ip == 0 and self.network.is_unspecified@propertydef is_loopback(self):return self._ip == 1 and self.network.is_loopbackclass IPv6Network(_BaseV6, _BaseNetwork):"""This class represents and manipulates 128-bit IPv6 networks.Attributes: [examples for IPv6('2001:db8::1000/124')].network_address: IPv6Address('2001:db8::1000').hostmask: IPv6Address('::f').broadcast_address: IPv6Address('2001:db8::100f').netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0').prefixlen: 124"""# Class to use when creating address objects_address_class = IPv6Addressdef __init__(self, address, strict=True):"""Instantiate a new IPv6 Network object.Args:address: A string or integer representing the IPv6 network or theIP and prefix/netmask.'2001:db8::/128''2001:db8:0000:0000:0000:0000:0000:0000/128''2001:db8::'are all functionally the same in IPv6. That is to say,failing to provide a subnetmask will create an object witha mask of /128.Additionally, an integer can be passed, soIPv6Network('2001:db8::') ==IPv6Network(42540766411282592856903984951653826560)or, more generallyIPv6Network(int(IPv6Network('2001:db8::'))) ==IPv6Network('2001:db8::')strict: A boolean. If true, ensure that we have been passedA true network address, eg, 2001:db8::1000/124 and not anIP address on a network, eg, 2001:db8::1/124.Raises:AddressValueError: If address isn't a valid IPv6 address.NetmaskValueError: If the netmask isn't valid foran IPv6 address.ValueError: If strict was True and a network address was notsupplied."""_BaseNetwork.__init__(self, address)# Efficient constructor from integer or packed addressif isinstance(address, (bytes, _compat_int_types)):self.network_address = IPv6Address(address)self.netmask, self._prefixlen = self._make_netmask(self._max_prefixlen)returnif isinstance(address, tuple):if len(address) > 1:arg = address[1]else:arg = self._max_prefixlenself.netmask, self._prefixlen = self._make_netmask(arg)self.network_address = IPv6Address(address[0])packed = int(self.network_address)if packed & int(self.netmask) != packed:if strict:raise ValueError('%s has host bits set' % self)else:self.network_address = IPv6Address(packed &int(self.netmask))return# Assume input argument to be string or any object representation# which converts into a formatted IP prefix string.addr = _split_optional_netmask(address)self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))if len(addr) == 2:arg = addr[1]else:arg = self._max_prefixlenself.netmask, self._prefixlen = self._make_netmask(arg)if strict:if (IPv6Address(int(self.network_address) & int(self.netmask)) !=self.network_address):raise ValueError('%s has host bits set' % self)self.network_address = IPv6Address(int(self.network_address) &int(self.netmask))if self._prefixlen == (self._max_prefixlen - 1):self.hosts = self.__iter__def hosts(self):"""Generate Iterator over usable hosts in a network.This is like __iter__ except it doesn't return theSubnet-Router anycast address."""network = int(self.network_address)broadcast = int(self.broadcast_address)for x in _compat_range(network + 1, broadcast + 1):yield self._address_class(x)@propertydef is_site_local(self):"""Test if the address is reserved for site-local.Note that the site-local address space has been deprecated by RFC 3879.Use is_private to test if this address is in the space of unique localaddresses as defined by RFC 4193.Returns:A boolean, True if the address is reserved per RFC 3513 2.5.6."""return (self.network_address.is_site_local andself.broadcast_address.is_site_local)class _IPv6Constants(object):_linklocal_network = IPv6Network('fe80::/10')_multicast_network = IPv6Network('ff00::/8')_private_networks = [IPv6Network('::1/128'),IPv6Network('::/128'),IPv6Network('::ffff:0:0/96'),IPv6Network('100::/64'),IPv6Network('2001::/23'),IPv6Network('2001:2::/48'),IPv6Network('2001:db8::/32'),IPv6Network('2001:10::/28'),IPv6Network('fc00::/7'),IPv6Network('fe80::/10'),]_reserved_networks = [IPv6Network('::/8'), IPv6Network('100::/8'),IPv6Network('200::/7'), IPv6Network('400::/6'),IPv6Network('800::/5'), IPv6Network('1000::/4'),IPv6Network('4000::/3'), IPv6Network('6000::/3'),IPv6Network('8000::/3'), IPv6Network('A000::/3'),IPv6Network('C000::/3'), IPv6Network('E000::/4'),IPv6Network('F000::/5'), IPv6Network('F800::/6'),IPv6Network('FE00::/9'),]_sitelocal_network = IPv6Network('fec0::/10')IPv6Address._constants = _IPv6Constants
from __future__ import absolute_import, division, unicode_literalsfrom genshi.core import QNamefrom genshi.core import START, END, XML_NAMESPACE, DOCTYPE, TEXTfrom genshi.core import START_NS, END_NS, START_CDATA, END_CDATA, PI, COMMENTfrom . import basefrom ..constants import voidElements, namespacesclass TreeWalker(base.TreeWalker):def __iter__(self):# Buffer the events so we can pass in the following oneprevious = Nonefor event in self.tree:if previous is not None:for token in self.tokens(previous, event):yield tokenprevious = event# Don't forget the final event!if previous is not None:for token in self.tokens(previous, None):yield tokendef tokens(self, event, next):kind, data, _ = eventif kind == START:tag, attribs = dataname = tag.localnamenamespace = tag.namespaceconverted_attribs = {}for k, v in attribs:if isinstance(k, QName):converted_attribs[(k.namespace, k.localname)] = velse:converted_attribs[(None, k)] = vif namespace == namespaces["html"] and name in voidElements:for token in self.emptyTag(namespace, name, converted_attribs,not next or next[0] != END ornext[1] != tag):yield tokenelse:yield self.startTag(namespace, name, converted_attribs)elif kind == END:name = data.localnamenamespace = data.namespaceif namespace != namespaces["html"] or name not in voidElements:yield self.endTag(namespace, name)elif kind == COMMENT:yield self.comment(data)elif kind == TEXT:for token in self.text(data):yield tokenelif kind == DOCTYPE:yield self.doctype(*data)elif kind in (XML_NAMESPACE, DOCTYPE, START_NS, END_NS,START_CDATA, END_CDATA, PI):passelse:yield self.unknown(kind)
from __future__ import absolute_import, division, unicode_literalsfrom pip._vendor.six import text_typefrom lxml import etreefrom ..treebuilders.etree import tag_regexpfrom . import basefrom .. import _ihatexmldef ensure_str(s):if s is None:return Noneelif isinstance(s, text_type):return selse:return s.decode("ascii", "strict")class Root(object):def __init__(self, et):self.elementtree = etself.children = []try:if et.docinfo.internalDTD:self.children.append(Doctype(self,ensure_str(et.docinfo.root_name),ensure_str(et.docinfo.public_id),ensure_str(et.docinfo.system_url)))except AttributeError:passtry:node = et.getroot()except AttributeError:node = etwhile node.getprevious() is not None:node = node.getprevious()while node is not None:self.children.append(node)node = node.getnext()self.text = Noneself.tail = Nonedef __getitem__(self, key):return self.children[key]def getnext(self):return Nonedef __len__(self):return 1class Doctype(object):def __init__(self, root_node, name, public_id, system_id):self.root_node = root_nodeself.name = nameself.public_id = public_idself.system_id = system_idself.text = Noneself.tail = Nonedef getnext(self):return self.root_node.children[1]class FragmentRoot(Root):def __init__(self, children):self.children = [FragmentWrapper(self, child) for child in children]self.text = self.tail = Nonedef getnext(self):return Noneclass FragmentWrapper(object):def __init__(self, fragment_root, obj):self.root_node = fragment_rootself.obj = objif hasattr(self.obj, 'text'):self.text = ensure_str(self.obj.text)else:self.text = Noneif hasattr(self.obj, 'tail'):self.tail = ensure_str(self.obj.tail)else:self.tail = Nonedef __getattr__(self, name):return getattr(self.obj, name)def getnext(self):siblings = self.root_node.childrenidx = siblings.index(self)if idx < len(siblings) - 1:return siblings[idx + 1]else:return Nonedef __getitem__(self, key):return self.obj[key]def __bool__(self):return bool(self.obj)def getparent(self):return Nonedef __str__(self):return str(self.obj)def __unicode__(self):return str(self.obj)def __len__(self):return len(self.obj)class TreeWalker(base.NonRecursiveTreeWalker):def __init__(self, tree):# pylint:disable=redefined-variable-typeif isinstance(tree, list):self.fragmentChildren = set(tree)tree = FragmentRoot(tree)else:self.fragmentChildren = set()tree = Root(tree)base.NonRecursiveTreeWalker.__init__(self, tree)self.filter = _ihatexml.InfosetFilter()def getNodeDetails(self, node):if isinstance(node, tuple): # Text nodenode, key = nodeassert key in ("text", "tail"), "Text nodes are text or tail, found %s" % keyreturn base.TEXT, ensure_str(getattr(node, key))elif isinstance(node, Root):return (base.DOCUMENT,)elif isinstance(node, Doctype):return base.DOCTYPE, node.name, node.public_id, node.system_idelif isinstance(node, FragmentWrapper) and not hasattr(node, "tag"):return base.TEXT, ensure_str(node.obj)elif node.tag == etree.Comment:return base.COMMENT, ensure_str(node.text)elif node.tag == etree.Entity:return base.ENTITY, ensure_str(node.text)[1:-1] # strip &;else:# This is assumed to be an ordinary elementmatch = tag_regexp.match(ensure_str(node.tag))if match:namespace, tag = match.groups()else:namespace = Nonetag = ensure_str(node.tag)attrs = {}for name, value in list(node.attrib.items()):name = ensure_str(name)value = ensure_str(value)match = tag_regexp.match(name)if match:attrs[(match.group(1), match.group(2))] = valueelse:attrs[(None, name)] = valuereturn (base.ELEMENT, namespace, self.filter.fromXmlName(tag),attrs, len(node) > 0 or node.text)def getFirstChild(self, node):assert not isinstance(node, tuple), "Text nodes have no children"assert len(node) or node.text, "Node has no children"if node.text:return (node, "text")else:return node[0]def getNextSibling(self, node):if isinstance(node, tuple): # Text nodenode, key = nodeassert key in ("text", "tail"), "Text nodes are text or tail, found %s" % keyif key == "text":# XXX: we cannot use a "bool(node) and node[0] or None" construct here# because node[0] might evaluate to False if it has no child elementif len(node):return node[0]else:return Noneelse: # tailreturn node.getnext()return (node, "tail") if node.tail else node.getnext()def getParentNode(self, node):if isinstance(node, tuple): # Text nodenode, key = nodeassert key in ("text", "tail"), "Text nodes are text or tail, found %s" % keyif key == "text":return node# else: fallback to "normal" processingelif node in self.fragmentChildren:return Nonereturn node.getparent()
from __future__ import absolute_import, division, unicode_literalstry:from collections import OrderedDictexcept ImportError:try:from ordereddict import OrderedDictexcept ImportError:OrderedDict = dictimport refrom pip._vendor.six import string_typesfrom . import basefrom .._utils import moduleFactoryFactorytag_regexp = re.compile("{([^}]*)}(.*)")def getETreeBuilder(ElementTreeImplementation):ElementTree = ElementTreeImplementationElementTreeCommentType = ElementTree.Comment("asd").tagclass TreeWalker(base.NonRecursiveTreeWalker): # pylint:disable=unused-variable"""Given the particular ElementTree representation, this implementation,to avoid using recursion, returns "nodes" as tuples with the followingcontent:1. The current element2. The index of the element relative to its parent3. A stack of ancestor elements4. A flag "text", "tail" or None to indicate if the current node is atext node; either the text or tail of the current element (1)"""def getNodeDetails(self, node):if isinstance(node, tuple): # It might be the root Elementelt, _, _, flag = nodeif flag in ("text", "tail"):return base.TEXT, getattr(elt, flag)else:node = eltif not(hasattr(node, "tag")):node = node.getroot()if node.tag in ("DOCUMENT_ROOT", "DOCUMENT_FRAGMENT"):return (base.DOCUMENT,)elif node.tag == "<!DOCTYPE>":return (base.DOCTYPE, node.text,node.get("publicId"), node.get("systemId"))elif node.tag == ElementTreeCommentType:return base.COMMENT, node.textelse:assert isinstance(node.tag, string_types), type(node.tag)# This is assumed to be an ordinary elementmatch = tag_regexp.match(node.tag)if match:namespace, tag = match.groups()else:namespace = Nonetag = node.tagattrs = OrderedDict()for name, value in list(node.attrib.items()):match = tag_regexp.match(name)if match:attrs[(match.group(1), match.group(2))] = valueelse:attrs[(None, name)] = valuereturn (base.ELEMENT, namespace, tag,attrs, len(node) or node.text)def getFirstChild(self, node):if isinstance(node, tuple):element, key, parents, flag = nodeelse:element, key, parents, flag = node, None, [], Noneif flag in ("text", "tail"):return Noneelse:if element.text:return element, key, parents, "text"elif len(element):parents.append(element)return element[0], 0, parents, Noneelse:return Nonedef getNextSibling(self, node):if isinstance(node, tuple):element, key, parents, flag = nodeelse:return Noneif flag == "text":if len(element):parents.append(element)return element[0], 0, parents, Noneelse:return Noneelse:if element.tail and flag != "tail":return element, key, parents, "tail"elif key < len(parents[-1]) - 1:return parents[-1][key + 1], key + 1, parents, Noneelse:return Nonedef getParentNode(self, node):if isinstance(node, tuple):element, key, parents, flag = nodeelse:return Noneif flag == "text":if not parents:return elementelse:return element, key, parents, Noneelse:parent = parents.pop()if not parents:return parentelse:assert list(parents[-1]).count(parent) == 1return parent, list(parents[-1]).index(parent), parents, Nonereturn locals()getETreeModule = moduleFactoryFactory(getETreeBuilder)
from __future__ import absolute_import, division, unicode_literalsfrom xml.dom import Nodefrom . import baseclass TreeWalker(base.NonRecursiveTreeWalker):def getNodeDetails(self, node):if node.nodeType == Node.DOCUMENT_TYPE_NODE:return base.DOCTYPE, node.name, node.publicId, node.systemIdelif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):return base.TEXT, node.nodeValueelif node.nodeType == Node.ELEMENT_NODE:attrs = {}for attr in list(node.attributes.keys()):attr = node.getAttributeNode(attr)if attr.namespaceURI:attrs[(attr.namespaceURI, attr.localName)] = attr.valueelse:attrs[(None, attr.name)] = attr.valuereturn (base.ELEMENT, node.namespaceURI, node.nodeName,attrs, node.hasChildNodes())elif node.nodeType == Node.COMMENT_NODE:return base.COMMENT, node.nodeValueelif node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE):return (base.DOCUMENT,)else:return base.UNKNOWN, node.nodeTypedef getFirstChild(self, node):return node.firstChilddef getNextSibling(self, node):return node.nextSiblingdef getParentNode(self, node):return node.parentNode
from __future__ import absolute_import, division, unicode_literalsfrom xml.dom import Nodefrom ..constants import namespaces, voidElements, spaceCharacters__all__ = ["DOCUMENT", "DOCTYPE", "TEXT", "ELEMENT", "COMMENT", "ENTITY", "UNKNOWN","TreeWalker", "NonRecursiveTreeWalker"]DOCUMENT = Node.DOCUMENT_NODEDOCTYPE = Node.DOCUMENT_TYPE_NODETEXT = Node.TEXT_NODEELEMENT = Node.ELEMENT_NODECOMMENT = Node.COMMENT_NODEENTITY = Node.ENTITY_NODEUNKNOWN = "<#UNKNOWN#>"spaceCharacters = "".join(spaceCharacters)class TreeWalker(object):def __init__(self, tree):self.tree = treedef __iter__(self):raise NotImplementedErrordef error(self, msg):return {"type": "SerializeError", "data": msg}def emptyTag(self, namespace, name, attrs, hasChildren=False):yield {"type": "EmptyTag", "name": name,"namespace": namespace,"data": attrs}if hasChildren:yield self.error("Void element has children")def startTag(self, namespace, name, attrs):return {"type": "StartTag","name": name,"namespace": namespace,"data": attrs}def endTag(self, namespace, name):return {"type": "EndTag","name": name,"namespace": namespace}def text(self, data):data = datamiddle = data.lstrip(spaceCharacters)left = data[:len(data) - len(middle)]if left:yield {"type": "SpaceCharacters", "data": left}data = middlemiddle = data.rstrip(spaceCharacters)right = data[len(middle):]if middle:yield {"type": "Characters", "data": middle}if right:yield {"type": "SpaceCharacters", "data": right}def comment(self, data):return {"type": "Comment", "data": data}def doctype(self, name, publicId=None, systemId=None):return {"type": "Doctype","name": name,"publicId": publicId,"systemId": systemId}def entity(self, name):return {"type": "Entity", "name": name}def unknown(self, nodeType):return self.error("Unknown node type: " + nodeType)class NonRecursiveTreeWalker(TreeWalker):def getNodeDetails(self, node):raise NotImplementedErrordef getFirstChild(self, node):raise NotImplementedErrordef getNextSibling(self, node):raise NotImplementedErrordef getParentNode(self, node):raise NotImplementedErrordef __iter__(self):currentNode = self.treewhile currentNode is not None:details = self.getNodeDetails(currentNode)type, details = details[0], details[1:]hasChildren = Falseif type == DOCTYPE:yield self.doctype(*details)elif type == TEXT:for token in self.text(*details):yield tokenelif type == ELEMENT:namespace, name, attributes, hasChildren = detailsif (not namespace or namespace == namespaces["html"]) and name in voidElements:for token in self.emptyTag(namespace, name, attributes,hasChildren):yield tokenhasChildren = Falseelse:yield self.startTag(namespace, name, attributes)elif type == COMMENT:yield self.comment(details[0])elif type == ENTITY:yield self.entity(details[0])elif type == DOCUMENT:hasChildren = Trueelse:yield self.unknown(details[0])if hasChildren:firstChild = self.getFirstChild(currentNode)else:firstChild = Noneif firstChild is not None:currentNode = firstChildelse:while currentNode is not None:details = self.getNodeDetails(currentNode)type, details = details[0], details[1:]if type == ELEMENT:namespace, name, attributes, hasChildren = detailsif (namespace and namespace != namespaces["html"]) or name not in voidElements:yield self.endTag(namespace, name)if self.tree is currentNode:currentNode = NonebreaknextSibling = self.getNextSibling(currentNode)if nextSibling is not None:currentNode = nextSiblingbreakelse:currentNode = self.getParentNode(currentNode)
"""A collection of modules for iterating through different kinds oftree, generating tokens identical to those produced by the tokenizermodule.To create a tree walker for a new type of tree, you need to doimplement a tree walker object (called TreeWalker by convention) thatimplements a 'serialize' method taking a tree as sole argument andreturning an iterator generating tokens."""from __future__ import absolute_import, division, unicode_literalsfrom .. import constantsfrom .._utils import default_etree__all__ = ["getTreeWalker", "pprint", "dom", "etree", "genshi", "etree_lxml"]treeWalkerCache = {}def getTreeWalker(treeType, implementation=None, **kwargs):"""Get a TreeWalker class for various types of tree with built-in supportArgs:treeType (str): the name of the tree type required (case-insensitive).Supported values are:- "dom": The xml.dom.minidom DOM implementation- "etree": A generic walker for tree implementations exposing anelementtree-like interface (known to work withElementTree, cElementTree and lxml.etree).- "lxml": Optimized walker for lxml.etree- "genshi": a Genshi streamImplementation: A module implementing the tree type e.g.xml.etree.ElementTree or cElementTree (Currently applies to the"etree" tree type only)."""treeType = treeType.lower()if treeType not in treeWalkerCache:if treeType == "dom":from . import domtreeWalkerCache[treeType] = dom.TreeWalkerelif treeType == "genshi":from . import genshitreeWalkerCache[treeType] = genshi.TreeWalkerelif treeType == "lxml":from . import etree_lxmltreeWalkerCache[treeType] = etree_lxml.TreeWalkerelif treeType == "etree":from . import etreeif implementation is None:implementation = default_etree# XXX: NEVER cache here, caching is done in the etree submodulereturn etree.getETreeModule(implementation, **kwargs).TreeWalkerreturn treeWalkerCache.get(treeType)def concatenateCharacterTokens(tokens):pendingCharacters = []for token in tokens:type = token["type"]if type in ("Characters", "SpaceCharacters"):pendingCharacters.append(token["data"])else:if pendingCharacters:yield {"type": "Characters", "data": "".join(pendingCharacters)}pendingCharacters = []yield tokenif pendingCharacters:yield {"type": "Characters", "data": "".join(pendingCharacters)}def pprint(walker):"""Pretty printer for tree walkers"""output = []indent = 0for token in concatenateCharacterTokens(walker):type = token["type"]if type in ("StartTag", "EmptyTag"):# tag nameif token["namespace"] and token["namespace"] != constants.namespaces["html"]:if token["namespace"] in constants.prefixes:ns = constants.prefixes[token["namespace"]]else:ns = token["namespace"]name = "%s %s" % (ns, token["name"])else:name = token["name"]output.append("%s<%s>" % (" " * indent, name))indent += 2# attributes (sorted for consistent ordering)attrs = token["data"]for (namespace, localname), value in sorted(attrs.items()):if namespace:if namespace in constants.prefixes:ns = constants.prefixes[namespace]else:ns = namespacename = "%s %s" % (ns, localname)else:name = localnameoutput.append("%s%s=\"%s\"" % (" " * indent, name, value))# self-closingif type == "EmptyTag":indent -= 2elif type == "EndTag":indent -= 2elif type == "Comment":output.append("%s<!-- %s -->" % (" " * indent, token["data"]))elif type == "Doctype":if token["name"]:if token["publicId"]:output.append("""%s<!DOCTYPE %s "%s" "%s">""" %(" " * indent,token["name"],token["publicId"],token["systemId"] if token["systemId"] else ""))elif token["systemId"]:output.append("""%s<!DOCTYPE %s "" "%s">""" %(" " * indent,token["name"],token["systemId"]))else:output.append("%s<!DOCTYPE %s>" % (" " * indent,token["name"]))else:output.append("%s<!DOCTYPE >" % (" " * indent,))elif type == "Characters":output.append("%s\"%s\"" % (" " * indent, token["data"]))elif type == "SpaceCharacters":assert False, "concatenateCharacterTokens should have got rid of all Space tokens"else:raise ValueError("Unknown token type, %s" % type)return "\n".join(output)
"""Module for supporting the lxml.etree library. The idea here is to use as muchof the native library as possible, without using fragile hacks like custom elementnames that break between releases. The downside of this is that we cannot representall possible trees; specifically the following are known to cause problems:Text or comments as siblings of the root elementDocypes with no nameWhen any of these things occur, we emit a DataLossWarning"""from __future__ import absolute_import, division, unicode_literals# pylint:disable=protected-accessimport warningsimport reimport sysfrom . import basefrom ..constants import DataLossWarningfrom .. import constantsfrom . import etree as etree_buildersfrom .. import _ihatexmlimport lxml.etree as etreefullTree = Truetag_regexp = re.compile("{([^}]*)}(.*)")comment_type = etree.Comment("asd").tagclass DocumentType(object):def __init__(self, name, publicId, systemId):self.name = nameself.publicId = publicIdself.systemId = systemIdclass Document(object):def __init__(self):self._elementTree = Noneself._childNodes = []def appendChild(self, element):self._elementTree.getroot().addnext(element._element)def _getChildNodes(self):return self._childNodeschildNodes = property(_getChildNodes)def testSerializer(element):rv = []infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True)def serializeElement(element, indent=0):if not hasattr(element, "tag"):if hasattr(element, "getroot"):# Full tree caserv.append("#document")if element.docinfo.internalDTD:if not (element.docinfo.public_id orelement.docinfo.system_url):dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_nameelse:dtd_str = """<!DOCTYPE %s "%s" "%s">""" % (element.docinfo.root_name,element.docinfo.public_id,element.docinfo.system_url)rv.append("|%s%s" % (' ' * (indent + 2), dtd_str))next_element = element.getroot()while next_element.getprevious() is not None:next_element = next_element.getprevious()while next_element is not None:serializeElement(next_element, indent + 2)next_element = next_element.getnext()elif isinstance(element, str) or isinstance(element, bytes):# Text in a fragmentassert isinstance(element, str) or sys.version_info[0] == 2rv.append("|%s\"%s\"" % (' ' * indent, element))else:# Fragment caserv.append("#document-fragment")for next_element in element:serializeElement(next_element, indent + 2)elif element.tag == comment_type:rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))if hasattr(element, "tail") and element.tail:rv.append("|%s\"%s\"" % (' ' * indent, element.tail))else:assert isinstance(element, etree._Element)nsmatch = etree_builders.tag_regexp.match(element.tag)if nsmatch is not None:ns = nsmatch.group(1)tag = nsmatch.group(2)prefix = constants.prefixes[ns]rv.append("|%s<%s %s>" % (' ' * indent, prefix,infosetFilter.fromXmlName(tag)))else:rv.append("|%s<%s>" % (' ' * indent,infosetFilter.fromXmlName(element.tag)))if hasattr(element, "attrib"):attributes = []for name, value in element.attrib.items():nsmatch = tag_regexp.match(name)if nsmatch is not None:ns, name = nsmatch.groups()name = infosetFilter.fromXmlName(name)prefix = constants.prefixes[ns]attr_string = "%s %s" % (prefix, name)else:attr_string = infosetFilter.fromXmlName(name)attributes.append((attr_string, value))for name, value in sorted(attributes):rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))if element.text:rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))indent += 2for child in element:serializeElement(child, indent)if hasattr(element, "tail") and element.tail:rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))serializeElement(element, 0)return "\n".join(rv)def tostring(element):"""Serialize an element and its child nodes to a string"""rv = []def serializeElement(element):if not hasattr(element, "tag"):if element.docinfo.internalDTD:if element.docinfo.doctype:dtd_str = element.docinfo.doctypeelse:dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_namerv.append(dtd_str)serializeElement(element.getroot())elif element.tag == comment_type:rv.append("<!--%s-->" % (element.text,))else:# This is assumed to be an ordinary elementif not element.attrib:rv.append("<%s>" % (element.tag,))else:attr = " ".join(["%s=\"%s\"" % (name, value)for name, value in element.attrib.items()])rv.append("<%s %s>" % (element.tag, attr))if element.text:rv.append(element.text)for child in element:serializeElement(child)rv.append("</%s>" % (element.tag,))if hasattr(element, "tail") and element.tail:rv.append(element.tail)serializeElement(element)return "".join(rv)class TreeBuilder(base.TreeBuilder):documentClass = DocumentdoctypeClass = DocumentTypeelementClass = NonecommentClass = NonefragmentClass = Documentimplementation = etreedef __init__(self, namespaceHTMLElements, fullTree=False):builder = etree_builders.getETreeModule(etree, fullTree=fullTree)infosetFilter = self.infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True)self.namespaceHTMLElements = namespaceHTMLElementsclass Attributes(dict):def __init__(self, element, value=None):if value is None:value = {}self._element = elementdict.__init__(self, value) # pylint:disable=non-parent-init-calledfor key, value in self.items():if isinstance(key, tuple):name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1]))else:name = infosetFilter.coerceAttribute(key)self._element._element.attrib[name] = valuedef __setitem__(self, key, value):dict.__setitem__(self, key, value)if isinstance(key, tuple):name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1]))else:name = infosetFilter.coerceAttribute(key)self._element._element.attrib[name] = valueclass Element(builder.Element):def __init__(self, name, namespace):name = infosetFilter.coerceElement(name)builder.Element.__init__(self, name, namespace=namespace)self._attributes = Attributes(self)def _setName(self, name):self._name = infosetFilter.coerceElement(name)self._element.tag = self._getETreeTag(self._name, self._namespace)def _getName(self):return infosetFilter.fromXmlName(self._name)name = property(_getName, _setName)def _getAttributes(self):return self._attributesdef _setAttributes(self, attributes):self._attributes = Attributes(self, attributes)attributes = property(_getAttributes, _setAttributes)def insertText(self, data, insertBefore=None):data = infosetFilter.coerceCharacters(data)builder.Element.insertText(self, data, insertBefore)def appendChild(self, child):builder.Element.appendChild(self, child)class Comment(builder.Comment):def __init__(self, data):data = infosetFilter.coerceComment(data)builder.Comment.__init__(self, data)def _setData(self, data):data = infosetFilter.coerceComment(data)self._element.text = datadef _getData(self):return self._element.textdata = property(_getData, _setData)self.elementClass = Elementself.commentClass = Comment# self.fragmentClass = builder.DocumentFragmentbase.TreeBuilder.__init__(self, namespaceHTMLElements)def reset(self):base.TreeBuilder.reset(self)self.insertComment = self.insertCommentInitialself.initial_comments = []self.doctype = Nonedef testSerializer(self, element):return testSerializer(element)def getDocument(self):if fullTree:return self.document._elementTreeelse:return self.document._elementTree.getroot()def getFragment(self):fragment = []element = self.openElements[0]._elementif element.text:fragment.append(element.text)fragment.extend(list(element))if element.tail:fragment.append(element.tail)return fragmentdef insertDoctype(self, token):name = token["name"]publicId = token["publicId"]systemId = token["systemId"]if not name:warnings.warn("lxml cannot represent empty doctype", DataLossWarning)self.doctype = Noneelse:coercedName = self.infosetFilter.coerceElement(name)if coercedName != name:warnings.warn("lxml cannot represent non-xml doctype", DataLossWarning)doctype = self.doctypeClass(coercedName, publicId, systemId)self.doctype = doctypedef insertCommentInitial(self, data, parent=None):assert parent is None or parent is self.documentassert self.document._elementTree is Noneself.initial_comments.append(data)def insertCommentMain(self, data, parent=None):if (parent == self.document andself.document._elementTree.getroot()[-1].tag == comment_type):warnings.warn("lxml cannot represent adjacent comments beyond the root elements", DataLossWarning)super(TreeBuilder, self).insertComment(data, parent)def insertRoot(self, token):"""Create the document root"""# Because of the way libxml2 works, it doesn't seem to be possible to# alter information like the doctype after the tree has been parsed.# Therefore we need to use the built-in parser to create our initial# tree, after which we can add elements like normaldocStr = ""if self.doctype:assert self.doctype.namedocStr += "<!DOCTYPE %s" % self.doctype.nameif (self.doctype.publicId is not None orself.doctype.systemId is not None):docStr += (' PUBLIC "%s" ' %(self.infosetFilter.coercePubid(self.doctype.publicId or "")))if self.doctype.systemId:sysid = self.doctype.systemIdif sysid.find("'") >= 0 and sysid.find('"') >= 0:warnings.warn("DOCTYPE system cannot contain single and double quotes", DataLossWarning)sysid = sysid.replace("'", 'U00027')if sysid.find("'") >= 0:docStr += '"%s"' % sysidelse:docStr += "'%s'" % sysidelse:docStr += "''"docStr += ">"if self.doctype.name != token["name"]:warnings.warn("lxml cannot represent doctype with a different name to the root element", DataLossWarning)docStr += "<THIS_SHOULD_NEVER_APPEAR_PUBLICLY/>"root = etree.fromstring(docStr)# Append the initial comments:for comment_token in self.initial_comments:comment = self.commentClass(comment_token["data"])root.addprevious(comment._element)# Create the root document and add the ElementTree to itself.document = self.documentClass()self.document._elementTree = root.getroottree()# Give the root element the right namename = token["name"]namespace = token.get("namespace", self.defaultNamespace)if namespace is None:etree_tag = nameelse:etree_tag = "{%s}%s" % (namespace, name)root.tag = etree_tag# Add the root element to the internal child/open data structuresroot_element = self.elementClass(name, namespace)root_element._element = rootself.document._childNodes.append(root_element)self.openElements.append(root_element)# Reset to the default insert comment functionself.insertComment = self.insertCommentMain
from __future__ import absolute_import, division, unicode_literals# pylint:disable=protected-accessfrom pip._vendor.six import text_typeimport refrom . import basefrom .. import _ihatexmlfrom .. import constantsfrom ..constants import namespacesfrom .._utils import moduleFactoryFactorytag_regexp = re.compile("{([^}]*)}(.*)")def getETreeBuilder(ElementTreeImplementation, fullTree=False):ElementTree = ElementTreeImplementationElementTreeCommentType = ElementTree.Comment("asd").tagclass Element(base.Node):def __init__(self, name, namespace=None):self._name = nameself._namespace = namespaceself._element = ElementTree.Element(self._getETreeTag(name,namespace))if namespace is None:self.nameTuple = namespaces["html"], self._nameelse:self.nameTuple = self._namespace, self._nameself.parent = Noneself._childNodes = []self._flags = []def _getETreeTag(self, name, namespace):if namespace is None:etree_tag = nameelse:etree_tag = "{%s}%s" % (namespace, name)return etree_tagdef _setName(self, name):self._name = nameself._element.tag = self._getETreeTag(self._name, self._namespace)def _getName(self):return self._namename = property(_getName, _setName)def _setNamespace(self, namespace):self._namespace = namespaceself._element.tag = self._getETreeTag(self._name, self._namespace)def _getNamespace(self):return self._namespacenamespace = property(_getNamespace, _setNamespace)def _getAttributes(self):return self._element.attribdef _setAttributes(self, attributes):# Delete existing attributes first# XXX - there may be a better way to do this...for key in list(self._element.attrib.keys()):del self._element.attrib[key]for key, value in attributes.items():if isinstance(key, tuple):name = "{%s}%s" % (key[2], key[1])else:name = keyself._element.set(name, value)attributes = property(_getAttributes, _setAttributes)def _getChildNodes(self):return self._childNodesdef _setChildNodes(self, value):del self._element[:]self._childNodes = []for element in value:self.insertChild(element)childNodes = property(_getChildNodes, _setChildNodes)def hasContent(self):"""Return true if the node has children or text"""return bool(self._element.text or len(self._element))def appendChild(self, node):self._childNodes.append(node)self._element.append(node._element)node.parent = selfdef insertBefore(self, node, refNode):index = list(self._element).index(refNode._element)self._element.insert(index, node._element)node.parent = selfdef removeChild(self, node):self._childNodes.remove(node)self._element.remove(node._element)node.parent = Nonedef insertText(self, data, insertBefore=None):if not(len(self._element)):if not self._element.text:self._element.text = ""self._element.text += dataelif insertBefore is None:# Insert the text as the tail of the last child elementif not self._element[-1].tail:self._element[-1].tail = ""self._element[-1].tail += dataelse:# Insert the text before the specified nodechildren = list(self._element)index = children.index(insertBefore._element)if index > 0:if not self._element[index - 1].tail:self._element[index - 1].tail = ""self._element[index - 1].tail += dataelse:if not self._element.text:self._element.text = ""self._element.text += datadef cloneNode(self):element = type(self)(self.name, self.namespace)for name, value in self.attributes.items():element.attributes[name] = valuereturn elementdef reparentChildren(self, newParent):if newParent.childNodes:newParent.childNodes[-1]._element.tail += self._element.textelse:if not newParent._element.text:newParent._element.text = ""if self._element.text is not None:newParent._element.text += self._element.textself._element.text = ""base.Node.reparentChildren(self, newParent)class Comment(Element):def __init__(self, data):# Use the superclass constructor to set all properties on the# wrapper elementself._element = ElementTree.Comment(data)self.parent = Noneself._childNodes = []self._flags = []def _getData(self):return self._element.textdef _setData(self, value):self._element.text = valuedata = property(_getData, _setData)class DocumentType(Element):def __init__(self, name, publicId, systemId):Element.__init__(self, "<!DOCTYPE>")self._element.text = nameself.publicId = publicIdself.systemId = systemIddef _getPublicId(self):return self._element.get("publicId", "")def _setPublicId(self, value):if value is not None:self._element.set("publicId", value)publicId = property(_getPublicId, _setPublicId)def _getSystemId(self):return self._element.get("systemId", "")def _setSystemId(self, value):if value is not None:self._element.set("systemId", value)systemId = property(_getSystemId, _setSystemId)class Document(Element):def __init__(self):Element.__init__(self, "DOCUMENT_ROOT")class DocumentFragment(Element):def __init__(self):Element.__init__(self, "DOCUMENT_FRAGMENT")def testSerializer(element):rv = []def serializeElement(element, indent=0):if not(hasattr(element, "tag")):element = element.getroot()if element.tag == "<!DOCTYPE>":if element.get("publicId") or element.get("systemId"):publicId = element.get("publicId") or ""systemId = element.get("systemId") or ""rv.append("""<!DOCTYPE %s "%s" "%s">""" %(element.text, publicId, systemId))else:rv.append("<!DOCTYPE %s>" % (element.text,))elif element.tag == "DOCUMENT_ROOT":rv.append("#document")if element.text is not None:rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))if element.tail is not None:raise TypeError("Document node cannot have tail")if hasattr(element, "attrib") and len(element.attrib):raise TypeError("Document node cannot have attributes")elif element.tag == ElementTreeCommentType:rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))else:assert isinstance(element.tag, text_type), \"Expected unicode, got %s, %s" % (type(element.tag), element.tag)nsmatch = tag_regexp.match(element.tag)if nsmatch is None:name = element.tagelse:ns, name = nsmatch.groups()prefix = constants.prefixes[ns]name = "%s %s" % (prefix, name)rv.append("|%s<%s>" % (' ' * indent, name))if hasattr(element, "attrib"):attributes = []for name, value in element.attrib.items():nsmatch = tag_regexp.match(name)if nsmatch is not None:ns, name = nsmatch.groups()prefix = constants.prefixes[ns]attr_string = "%s %s" % (prefix, name)else:attr_string = nameattributes.append((attr_string, value))for name, value in sorted(attributes):rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))if element.text:rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))indent += 2for child in element:serializeElement(child, indent)if element.tail:rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))serializeElement(element, 0)return "\n".join(rv)def tostring(element): # pylint:disable=unused-variable"""Serialize an element and its child nodes to a string"""rv = []filter = _ihatexml.InfosetFilter()def serializeElement(element):if isinstance(element, ElementTree.ElementTree):element = element.getroot()if element.tag == "<!DOCTYPE>":if element.get("publicId") or element.get("systemId"):publicId = element.get("publicId") or ""systemId = element.get("systemId") or ""rv.append("""<!DOCTYPE %s PUBLIC "%s" "%s">""" %(element.text, publicId, systemId))else:rv.append("<!DOCTYPE %s>" % (element.text,))elif element.tag == "DOCUMENT_ROOT":if element.text is not None:rv.append(element.text)if element.tail is not None:raise TypeError("Document node cannot have tail")if hasattr(element, "attrib") and len(element.attrib):raise TypeError("Document node cannot have attributes")for child in element:serializeElement(child)elif element.tag == ElementTreeCommentType:rv.append("<!--%s-->" % (element.text,))else:# This is assumed to be an ordinary elementif not element.attrib:rv.append("<%s>" % (filter.fromXmlName(element.tag),))else:attr = " ".join(["%s=\"%s\"" % (filter.fromXmlName(name), value)for name, value in element.attrib.items()])rv.append("<%s %s>" % (element.tag, attr))if element.text:rv.append(element.text)for child in element:serializeElement(child)rv.append("</%s>" % (element.tag,))if element.tail:rv.append(element.tail)serializeElement(element)return "".join(rv)class TreeBuilder(base.TreeBuilder): # pylint:disable=unused-variabledocumentClass = DocumentdoctypeClass = DocumentTypeelementClass = ElementcommentClass = CommentfragmentClass = DocumentFragmentimplementation = ElementTreeImplementationdef testSerializer(self, element):return testSerializer(element)def getDocument(self):if fullTree:return self.document._elementelse:if self.defaultNamespace is not None:return self.document._element.find("{%s}html" % self.defaultNamespace)else:return self.document._element.find("html")def getFragment(self):return base.TreeBuilder.getFragment(self)._elementreturn locals()getETreeModule = moduleFactoryFactory(getETreeBuilder)
from __future__ import absolute_import, division, unicode_literalsfrom collections import MutableMappingfrom xml.dom import minidom, Nodeimport weakreffrom . import basefrom .. import constantsfrom ..constants import namespacesfrom .._utils import moduleFactoryFactorydef getDomBuilder(DomImplementation):Dom = DomImplementationclass AttrList(MutableMapping):def __init__(self, element):self.element = elementdef __iter__(self):return iter(self.element.attributes.keys())def __setitem__(self, name, value):if isinstance(name, tuple):raise NotImplementedErrorelse:attr = self.element.ownerDocument.createAttribute(name)attr.value = valueself.element.attributes[name] = attrdef __len__(self):return len(self.element.attributes)def items(self):return list(self.element.attributes.items())def values(self):return list(self.element.attributes.values())def __getitem__(self, name):if isinstance(name, tuple):raise NotImplementedErrorelse:return self.element.attributes[name].valuedef __delitem__(self, name):if isinstance(name, tuple):raise NotImplementedErrorelse:del self.element.attributes[name]class NodeBuilder(base.Node):def __init__(self, element):base.Node.__init__(self, element.nodeName)self.element = elementnamespace = property(lambda self: hasattr(self.element, "namespaceURI") andself.element.namespaceURI or None)def appendChild(self, node):node.parent = selfself.element.appendChild(node.element)def insertText(self, data, insertBefore=None):text = self.element.ownerDocument.createTextNode(data)if insertBefore:self.element.insertBefore(text, insertBefore.element)else:self.element.appendChild(text)def insertBefore(self, node, refNode):self.element.insertBefore(node.element, refNode.element)node.parent = selfdef removeChild(self, node):if node.element.parentNode == self.element:self.element.removeChild(node.element)node.parent = Nonedef reparentChildren(self, newParent):while self.element.hasChildNodes():child = self.element.firstChildself.element.removeChild(child)newParent.element.appendChild(child)self.childNodes = []def getAttributes(self):return AttrList(self.element)def setAttributes(self, attributes):if attributes:for name, value in list(attributes.items()):if isinstance(name, tuple):if name[0] is not None:qualifiedName = (name[0] + ":" + name[1])else:qualifiedName = name[1]self.element.setAttributeNS(name[2], qualifiedName,value)else:self.element.setAttribute(name, value)attributes = property(getAttributes, setAttributes)def cloneNode(self):return NodeBuilder(self.element.cloneNode(False))def hasContent(self):return self.element.hasChildNodes()def getNameTuple(self):if self.namespace is None:return namespaces["html"], self.nameelse:return self.namespace, self.namenameTuple = property(getNameTuple)class TreeBuilder(base.TreeBuilder): # pylint:disable=unused-variabledef documentClass(self):self.dom = Dom.getDOMImplementation().createDocument(None, None, None)return weakref.proxy(self)def insertDoctype(self, token):name = token["name"]publicId = token["publicId"]systemId = token["systemId"]domimpl = Dom.getDOMImplementation()doctype = domimpl.createDocumentType(name, publicId, systemId)self.document.appendChild(NodeBuilder(doctype))if Dom == minidom:doctype.ownerDocument = self.domdef elementClass(self, name, namespace=None):if namespace is None and self.defaultNamespace is None:node = self.dom.createElement(name)else:node = self.dom.createElementNS(namespace, name)return NodeBuilder(node)def commentClass(self, data):return NodeBuilder(self.dom.createComment(data))def fragmentClass(self):return NodeBuilder(self.dom.createDocumentFragment())def appendChild(self, node):self.dom.appendChild(node.element)def testSerializer(self, element):return testSerializer(element)def getDocument(self):return self.domdef getFragment(self):return base.TreeBuilder.getFragment(self).elementdef insertText(self, data, parent=None):data = dataif parent != self:base.TreeBuilder.insertText(self, data, parent)else:# HACK: allow text nodes as children of the document nodeif hasattr(self.dom, '_child_node_types'):# pylint:disable=protected-accessif Node.TEXT_NODE not in self.dom._child_node_types:self.dom._child_node_types = list(self.dom._child_node_types)self.dom._child_node_types.append(Node.TEXT_NODE)self.dom.appendChild(self.dom.createTextNode(data))implementation = DomImplementationname = Nonedef testSerializer(element):element.normalize()rv = []def serializeElement(element, indent=0):if element.nodeType == Node.DOCUMENT_TYPE_NODE:if element.name:if element.publicId or element.systemId:publicId = element.publicId or ""systemId = element.systemId or ""rv.append("""|%s<!DOCTYPE %s "%s" "%s">""" %(' ' * indent, element.name, publicId, systemId))else:rv.append("|%s<!DOCTYPE %s>" % (' ' * indent, element.name))else:rv.append("|%s<!DOCTYPE >" % (' ' * indent,))elif element.nodeType == Node.DOCUMENT_NODE:rv.append("#document")elif element.nodeType == Node.DOCUMENT_FRAGMENT_NODE:rv.append("#document-fragment")elif element.nodeType == Node.COMMENT_NODE:rv.append("|%s<!-- %s -->" % (' ' * indent, element.nodeValue))elif element.nodeType == Node.TEXT_NODE:rv.append("|%s\"%s\"" % (' ' * indent, element.nodeValue))else:if (hasattr(element, "namespaceURI") andelement.namespaceURI is not None):name = "%s %s" % (constants.prefixes[element.namespaceURI],element.nodeName)else:name = element.nodeNamerv.append("|%s<%s>" % (' ' * indent, name))if element.hasAttributes():attributes = []for i in range(len(element.attributes)):attr = element.attributes.item(i)name = attr.nodeNamevalue = attr.valuens = attr.namespaceURIif ns:name = "%s %s" % (constants.prefixes[ns], attr.localName)else:name = attr.nodeNameattributes.append((name, value))for name, value in sorted(attributes):rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))indent += 2for child in element.childNodes:serializeElement(child, indent)serializeElement(element, 0)return "\n".join(rv)return locals()# The actual means to get a module!getDomModule = moduleFactoryFactory(getDomBuilder)
from __future__ import absolute_import, division, unicode_literalsfrom pip._vendor.six import text_typefrom ..constants import scopingElements, tableInsertModeElements, namespaces# The scope markers are inserted when entering object elements,# marquees, table cells, and table captions, and are used to prevent formatting# from "leaking" into tables, object elements, and marquees.Marker = NonelistElementsMap = {None: (frozenset(scopingElements), False),"button": (frozenset(scopingElements | set([(namespaces["html"], "button")])), False),"list": (frozenset(scopingElements | set([(namespaces["html"], "ol"),(namespaces["html"], "ul")])), False),"table": (frozenset([(namespaces["html"], "html"),(namespaces["html"], "table")]), False),"select": (frozenset([(namespaces["html"], "optgroup"),(namespaces["html"], "option")]), True)}class Node(object):def __init__(self, name):"""Node representing an item in the tree.name - The tag name associated with the nodeparent - The parent of the current node (or None for the document node)value - The value of the current node (applies to text nodes andcommentsattributes - a dict holding name, value pairs for attributes of the nodechildNodes - a list of child nodes of the current node. This mustinclude all elements but not necessarily other node types_flags - A list of miscellaneous flags that can be set on the node"""self.name = nameself.parent = Noneself.value = Noneself.attributes = {}self.childNodes = []self._flags = []def __str__(self):attributesStr = " ".join(["%s=\"%s\"" % (name, value)for name, value inself.attributes.items()])if attributesStr:return "<%s %s>" % (self.name, attributesStr)else:return "<%s>" % (self.name)def __repr__(self):return "<%s>" % (self.name)def appendChild(self, node):"""Insert node as a child of the current node"""raise NotImplementedErrordef insertText(self, data, insertBefore=None):"""Insert data as text in the current node, positioned before thestart of node insertBefore or to the end of the node's text."""raise NotImplementedErrordef insertBefore(self, node, refNode):"""Insert node as a child of the current node, before refNode in thelist of child nodes. Raises ValueError if refNode is not a child ofthe current node"""raise NotImplementedErrordef removeChild(self, node):"""Remove node from the children of the current node"""raise NotImplementedErrordef reparentChildren(self, newParent):"""Move all the children of the current node to newParent.This is needed so that trees that don't store text as nodes move thetext in the correct way"""# XXX - should this method be made more general?for child in self.childNodes:newParent.appendChild(child)self.childNodes = []def cloneNode(self):"""Return a shallow copy of the current node i.e. a node with the samename and attributes but with no parent or child nodes"""raise NotImplementedErrordef hasContent(self):"""Return true if the node has children or text, false otherwise"""raise NotImplementedErrorclass ActiveFormattingElements(list):def append(self, node):equalCount = 0if node != Marker:for element in self[::-1]:if element == Marker:breakif self.nodesEqual(element, node):equalCount += 1if equalCount == 3:self.remove(element)breaklist.append(self, node)def nodesEqual(self, node1, node2):if not node1.nameTuple == node2.nameTuple:return Falseif not node1.attributes == node2.attributes:return Falsereturn Trueclass TreeBuilder(object):"""Base treebuilder implementationdocumentClass - the class to use for the bottommost node of a documentelementClass - the class to use for HTML ElementscommentClass - the class to use for commentsdoctypeClass - the class to use for doctypes"""# pylint:disable=not-callable# Document classdocumentClass = None# The class to use for creating a nodeelementClass = None# The class to use for creating commentscommentClass = None# The class to use for creating doctypesdoctypeClass = None# Fragment classfragmentClass = Nonedef __init__(self, namespaceHTMLElements):if namespaceHTMLElements:self.defaultNamespace = "http://www.w3.org/1999/xhtml"else:self.defaultNamespace = Noneself.reset()def reset(self):self.openElements = []self.activeFormattingElements = ActiveFormattingElements()# XXX - rename these to headElement, formElementself.headPointer = Noneself.formPointer = Noneself.insertFromTable = Falseself.document = self.documentClass()def elementInScope(self, target, variant=None):# If we pass a node in we match that. if we pass a string# match any node with that nameexactNode = hasattr(target, "nameTuple")if not exactNode:if isinstance(target, text_type):target = (namespaces["html"], target)assert isinstance(target, tuple)listElements, invert = listElementsMap[variant]for node in reversed(self.openElements):if exactNode and node == target:return Trueelif not exactNode and node.nameTuple == target:return Trueelif (invert ^ (node.nameTuple in listElements)):return Falseassert False # We should never reach this pointdef reconstructActiveFormattingElements(self):# Within this algorithm the order of steps described in the# specification is not quite the same as the order of steps in the# code. It should still do the same though.# Step 1: stop the algorithm when there's nothing to do.if not self.activeFormattingElements:return# Step 2 and step 3: we start with the last element. So i is -1.i = len(self.activeFormattingElements) - 1entry = self.activeFormattingElements[i]if entry == Marker or entry in self.openElements:return# Step 6while entry != Marker and entry not in self.openElements:if i == 0:# This will be reset to 0 belowi = -1breaki -= 1# Step 5: let entry be one earlier in the list.entry = self.activeFormattingElements[i]while True:# Step 7i += 1# Step 8entry = self.activeFormattingElements[i]clone = entry.cloneNode() # Mainly to get a new copy of the attributes# Step 9element = self.insertElement({"type": "StartTag","name": clone.name,"namespace": clone.namespace,"data": clone.attributes})# Step 10self.activeFormattingElements[i] = element# Step 11if element == self.activeFormattingElements[-1]:breakdef clearActiveFormattingElements(self):entry = self.activeFormattingElements.pop()while self.activeFormattingElements and entry != Marker:entry = self.activeFormattingElements.pop()def elementInActiveFormattingElements(self, name):"""Check if an element exists between the end of the activeformatting elements and the last marker. If it does, return it, elsereturn false"""for item in self.activeFormattingElements[::-1]:# Check for Marker first because if it's a Marker it doesn't have a# name attribute.if item == Marker:breakelif item.name == name:return itemreturn Falsedef insertRoot(self, token):element = self.createElement(token)self.openElements.append(element)self.document.appendChild(element)def insertDoctype(self, token):name = token["name"]publicId = token["publicId"]systemId = token["systemId"]doctype = self.doctypeClass(name, publicId, systemId)self.document.appendChild(doctype)def insertComment(self, token, parent=None):if parent is None:parent = self.openElements[-1]parent.appendChild(self.commentClass(token["data"]))def createElement(self, token):"""Create an element but don't insert it anywhere"""name = token["name"]namespace = token.get("namespace", self.defaultNamespace)element = self.elementClass(name, namespace)element.attributes = token["data"]return elementdef _getInsertFromTable(self):return self._insertFromTabledef _setInsertFromTable(self, value):"""Switch the function used to insert an element from thenormal one to the misnested table one and back again"""self._insertFromTable = valueif value:self.insertElement = self.insertElementTableelse:self.insertElement = self.insertElementNormalinsertFromTable = property(_getInsertFromTable, _setInsertFromTable)def insertElementNormal(self, token):name = token["name"]assert isinstance(name, text_type), "Element %s not unicode" % namenamespace = token.get("namespace", self.defaultNamespace)element = self.elementClass(name, namespace)element.attributes = token["data"]self.openElements[-1].appendChild(element)self.openElements.append(element)return elementdef insertElementTable(self, token):"""Create an element and insert it into the tree"""element = self.createElement(token)if self.openElements[-1].name not in tableInsertModeElements:return self.insertElementNormal(token)else:# We should be in the InTable mode. This means we want to do# special magic element rearrangingparent, insertBefore = self.getTableMisnestedNodePosition()if insertBefore is None:parent.appendChild(element)else:parent.insertBefore(element, insertBefore)self.openElements.append(element)return elementdef insertText(self, data, parent=None):"""Insert text data."""if parent is None:parent = self.openElements[-1]if (not self.insertFromTable or (self.insertFromTable andself.openElements[-1].namenot in tableInsertModeElements)):parent.insertText(data)else:# We should be in the InTable mode. This means we want to do# special magic element rearrangingparent, insertBefore = self.getTableMisnestedNodePosition()parent.insertText(data, insertBefore)def getTableMisnestedNodePosition(self):"""Get the foster parent element, and sibling to insert before(or None) when inserting a misnested table node"""# The foster parent element is the one which comes before the most# recently opened table element# XXX - this is really inelegantlastTable = NonefosterParent = NoneinsertBefore = Nonefor elm in self.openElements[::-1]:if elm.name == "table":lastTable = elmbreakif lastTable:# XXX - we should really check that this parent is actually a# node hereif lastTable.parent:fosterParent = lastTable.parentinsertBefore = lastTableelse:fosterParent = self.openElements[self.openElements.index(lastTable) - 1]else:fosterParent = self.openElements[0]return fosterParent, insertBeforedef generateImpliedEndTags(self, exclude=None):name = self.openElements[-1].name# XXX td, th and tr are not actually neededif (name in frozenset(("dd", "dt", "li", "option", "optgroup", "p", "rp", "rt")) andname != exclude):self.openElements.pop()# XXX This is not entirely what the specification says. We should# investigate it more closely.self.generateImpliedEndTags(exclude)def getDocument(self):"Return the final tree"return self.documentdef getFragment(self):"Return the final fragment"# assert self.innerHTMLfragment = self.fragmentClass()self.openElements[0].reparentChildren(fragment)return fragmentdef testSerializer(self, node):"""Serialize the subtree of node in the format required by unit testsnode - the node from which to start serializing"""raise NotImplementedError
"""A collection of modules for building different kinds of tree fromHTML documents.To create a treebuilder for a new type of tree, you need to doimplement several things:1) A set of classes for various types of elements: Document, Doctype,Comment, Element. These must implement the interface of_base.treebuilders.Node (although comment nodes have a differentsignature for their constructor, see treebuilders.etree.Comment)Textual content may also be implemented as another node type, or not, asyour tree implementation requires.2) A treebuilder object (called TreeBuilder by convention) thatinherits from treebuilders._base.TreeBuilder. This has 4 required attributes:documentClass - the class to use for the bottommost node of a documentelementClass - the class to use for HTML ElementscommentClass - the class to use for commentsdoctypeClass - the class to use for doctypesIt also has one required method:getDocument - Returns the root node of the complete document tree3) If you wish to run the unit tests, you must also create atestSerializer method on your treebuilder which accepts a node andreturns a string containing Node and its children serialized accordingto the format used in the unittests"""from __future__ import absolute_import, division, unicode_literalsfrom .._utils import default_etreetreeBuilderCache = {}def getTreeBuilder(treeType, implementation=None, **kwargs):"""Get a TreeBuilder class for various types of tree with built-in supporttreeType - the name of the tree type required (case-insensitive). Supportedvalues are:"dom" - A generic builder for DOM implementations, defaulting toa xml.dom.minidom based implementation."etree" - A generic builder for tree implementations exposing anElementTree-like interface, defaulting toxml.etree.cElementTree if available andxml.etree.ElementTree if not."lxml" - A etree-based builder for lxml.etree, handlinglimitations of lxml's implementation.implementation - (Currently applies to the "etree" and "dom" tree types). Amodule implementing the tree type e.g.xml.etree.ElementTree or xml.etree.cElementTree."""treeType = treeType.lower()if treeType not in treeBuilderCache:if treeType == "dom":from . import dom# Come up with a sane default (pref. from the stdlib)if implementation is None:from xml.dom import minidomimplementation = minidom# NEVER cache here, caching is done in the dom submodulereturn dom.getDomModule(implementation, **kwargs).TreeBuilderelif treeType == "lxml":from . import etree_lxmltreeBuilderCache[treeType] = etree_lxml.TreeBuilderelif treeType == "etree":from . import etreeif implementation is None:implementation = default_etree# NEVER cache here, caching is done in the etree submodulereturn etree.getETreeModule(implementation, **kwargs).TreeBuilderelse:raise ValueError("""Unrecognised treebuilder "%s" """ % treeType)return treeBuilderCache.get(treeType)
from __future__ import absolute_import, division, unicode_literalsfrom xml.sax.xmlreader import AttributesNSImplfrom ..constants import adjustForeignAttributes, unadjustForeignAttributesprefix_mapping = {}for prefix, localName, namespace in adjustForeignAttributes.values():if prefix is not None:prefix_mapping[prefix] = namespacedef to_sax(walker, handler):"""Call SAX-like content handler based on treewalker walker"""handler.startDocument()for prefix, namespace in prefix_mapping.items():handler.startPrefixMapping(prefix, namespace)for token in walker:type = token["type"]if type == "Doctype":continueelif type in ("StartTag", "EmptyTag"):attrs = AttributesNSImpl(token["data"],unadjustForeignAttributes)handler.startElementNS((token["namespace"], token["name"]),token["name"],attrs)if type == "EmptyTag":handler.endElementNS((token["namespace"], token["name"]),token["name"])elif type == "EndTag":handler.endElementNS((token["namespace"], token["name"]),token["name"])elif type in ("Characters", "SpaceCharacters"):handler.characters(token["data"])elif type == "Comment":passelse:assert False, "Unknown token type"for prefix, namespace in prefix_mapping.items():handler.endPrefixMapping(prefix)handler.endDocument()
from __future__ import absolute_import, division, unicode_literalsfrom genshi.core import QName, Attrsfrom genshi.core import START, END, TEXT, COMMENT, DOCTYPEdef to_genshi(walker):text = []for token in walker:type = token["type"]if type in ("Characters", "SpaceCharacters"):text.append(token["data"])elif text:yield TEXT, "".join(text), (None, -1, -1)text = []if type in ("StartTag", "EmptyTag"):if token["namespace"]:name = "{%s}%s" % (token["namespace"], token["name"])else:name = token["name"]attrs = Attrs([(QName("{%s}%s" % attr if attr[0] is not None else attr[1]), value)for attr, value in token["data"].items()])yield (START, (QName(name), attrs), (None, -1, -1))if type == "EmptyTag":type = "EndTag"if type == "EndTag":if token["namespace"]:name = "{%s}%s" % (token["namespace"], token["name"])else:name = token["name"]yield END, QName(name), (None, -1, -1)elif type == "Comment":yield COMMENT, token["data"], (None, -1, -1)elif type == "Doctype":yield DOCTYPE, (token["name"], token["publicId"],token["systemId"]), (None, -1, -1)else:pass # FIXME: What to do?if text:yield TEXT, "".join(text), (None, -1, -1)
from __future__ import absolute_import, division, unicode_literalsfrom . import sax__all__ = ["sax"]try:from . import genshi # noqaexcept ImportError:passelse:__all__.append("genshi")
from __future__ import absolute_import, division, unicode_literalsfrom pip._vendor.six import text_typeimport refrom codecs import register_error, xmlcharrefreplace_errorsfrom .constants import voidElements, booleanAttributes, spaceCharactersfrom .constants import rcdataElements, entities, xmlEntitiesfrom . import treewalkers, _utilsfrom xml.sax.saxutils import escape_quoteAttributeSpecChars = "".join(spaceCharacters) + "\"'=<>`"_quoteAttributeSpec = re.compile("[" + _quoteAttributeSpecChars + "]")_quoteAttributeLegacy = re.compile("[" + _quoteAttributeSpecChars +"\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n""\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15""\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f""\x20\x2f\x60\xa0\u1680\u180e\u180f\u2000""\u2001\u2002\u2003\u2004\u2005\u2006\u2007""\u2008\u2009\u200a\u2028\u2029\u202f\u205f""\u3000]")_encode_entity_map = {}_is_ucs4 = len("\U0010FFFF") == 1for k, v in list(entities.items()):# skip multi-character entitiesif ((_is_ucs4 and len(v) > 1) or(not _is_ucs4 and len(v) > 2)):continueif v != "&":if len(v) == 2:v = _utils.surrogatePairToCodepoint(v)else:v = ord(v)if v not in _encode_entity_map or k.islower():# prefer < over < and similarly for &, >, etc._encode_entity_map[v] = kdef htmlentityreplace_errors(exc):if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)):res = []codepoints = []skip = Falsefor i, c in enumerate(exc.object[exc.start:exc.end]):if skip:skip = Falsecontinueindex = i + exc.startif _utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]):codepoint = _utils.surrogatePairToCodepoint(exc.object[index:index + 2])skip = Trueelse:codepoint = ord(c)codepoints.append(codepoint)for cp in codepoints:e = _encode_entity_map.get(cp)if e:res.append("&")res.append(e)if not e.endswith(";"):res.append(";")else:res.append("&#x%s;" % (hex(cp)[2:]))return ("".join(res), exc.end)else:return xmlcharrefreplace_errors(exc)register_error("htmlentityreplace", htmlentityreplace_errors)def serialize(input, tree="etree", encoding=None, **serializer_opts):# XXX: Should we cache this?walker = treewalkers.getTreeWalker(tree)s = HTMLSerializer(**serializer_opts)return s.render(walker(input), encoding)class HTMLSerializer(object):# attribute quoting optionsquote_attr_values = "legacy" # be secure by defaultquote_char = '"'use_best_quote_char = True# tag syntax optionsomit_optional_tags = Trueminimize_boolean_attributes = Trueuse_trailing_solidus = Falsespace_before_trailing_solidus = True# escaping optionsescape_lt_in_attrs = Falseescape_rcdata = Falseresolve_entities = True# miscellaneous optionsalphabetical_attributes = Falseinject_meta_charset = Truestrip_whitespace = Falsesanitize = Falseoptions = ("quote_attr_values", "quote_char", "use_best_quote_char","omit_optional_tags", "minimize_boolean_attributes","use_trailing_solidus", "space_before_trailing_solidus","escape_lt_in_attrs", "escape_rcdata", "resolve_entities","alphabetical_attributes", "inject_meta_charset","strip_whitespace", "sanitize")def __init__(self, **kwargs):"""Initialize HTMLSerializer.Keyword options (default given first unless specified) include:inject_meta_charset=True|FalseWhether it insert a meta element to define the character set of thedocument.quote_attr_values="legacy"|"spec"|"always"Whether to quote attribute values that don't require quotingper legacy browser behaviour, when required by the standard, or always.quote_char=u'"'|u"'"Use given quote character for attribute quoting. Default is touse double quote unless attribute value contains a double quote,in which case single quotes are used instead.escape_lt_in_attrs=False|TrueWhether to escape < in attribute values.escape_rcdata=False|TrueWhether to escape characters that need to be escaped within normalelements within rcdata elements such as style.resolve_entities=True|FalseWhether to resolve named character entities that appear in thesource tree. The XML predefined entities < > & " 'are unaffected by this setting.strip_whitespace=False|TrueWhether to remove semantically meaningless whitespace. (Thiscompresses all whitespace to a single space except within pre.)minimize_boolean_attributes=True|FalseShortens boolean attributes to give just the attribute value,for example <input disabled="disabled"> becomes <input disabled>.use_trailing_solidus=False|TrueIncludes a close-tag slash at the end of the start tag of voidelements (empty elements whose end tag is forbidden). E.g. <hr/>.space_before_trailing_solidus=True|FalsePlaces a space immediately before the closing slash in a tagusing a trailing solidus. E.g. <hr />. Requires use_trailing_solidus.sanitize=False|TrueStrip all unsafe or unknown constructs from output.See `html5lib user documentation`_omit_optional_tags=True|FalseOmit start/end tags that are optional.alphabetical_attributes=False|TrueReorder attributes to be in alphabetical order... _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation"""unexpected_args = frozenset(kwargs) - frozenset(self.options)if len(unexpected_args) > 0:raise TypeError("__init__() got an unexpected keyword argument '%s'" % next(iter(unexpected_args)))if 'quote_char' in kwargs:self.use_best_quote_char = Falsefor attr in self.options:setattr(self, attr, kwargs.get(attr, getattr(self, attr)))self.errors = []self.strict = Falsedef encode(self, string):assert(isinstance(string, text_type))if self.encoding:return string.encode(self.encoding, "htmlentityreplace")else:return stringdef encodeStrict(self, string):assert(isinstance(string, text_type))if self.encoding:return string.encode(self.encoding, "strict")else:return stringdef serialize(self, treewalker, encoding=None):# pylint:disable=too-many-nested-blocksself.encoding = encodingin_cdata = Falseself.errors = []if encoding and self.inject_meta_charset:from .filters.inject_meta_charset import Filtertreewalker = Filter(treewalker, encoding)# Alphabetical attributes is here under the assumption that none of# the later filters add or change order of attributes; it needs to be# before the sanitizer so escaped elements come out correctlyif self.alphabetical_attributes:from .filters.alphabeticalattributes import Filtertreewalker = Filter(treewalker)# WhitespaceFilter should be used before OptionalTagFilter# for maximum efficiently of this latter filterif self.strip_whitespace:from .filters.whitespace import Filtertreewalker = Filter(treewalker)if self.sanitize:from .filters.sanitizer import Filtertreewalker = Filter(treewalker)if self.omit_optional_tags:from .filters.optionaltags import Filtertreewalker = Filter(treewalker)for token in treewalker:type = token["type"]if type == "Doctype":doctype = "<!DOCTYPE %s" % token["name"]if token["publicId"]:doctype += ' PUBLIC "%s"' % token["publicId"]elif token["systemId"]:doctype += " SYSTEM"if token["systemId"]:if token["systemId"].find('"') >= 0:if token["systemId"].find("'") >= 0:self.serializeError("System identifer contains both single and double quote characters")quote_char = "'"else:quote_char = '"'doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char)doctype += ">"yield self.encodeStrict(doctype)elif type in ("Characters", "SpaceCharacters"):if type == "SpaceCharacters" or in_cdata:if in_cdata and token["data"].find("</") >= 0:self.serializeError("Unexpected </ in CDATA")yield self.encode(token["data"])else:yield self.encode(escape(token["data"]))elif type in ("StartTag", "EmptyTag"):name = token["name"]yield self.encodeStrict("<%s" % name)if name in rcdataElements and not self.escape_rcdata:in_cdata = Trueelif in_cdata:self.serializeError("Unexpected child element of a CDATA element")for (_, attr_name), attr_value in token["data"].items():# TODO: Add namespace support herek = attr_namev = attr_valueyield self.encodeStrict(' ')yield self.encodeStrict(k)if not self.minimize_boolean_attributes or \(k not in booleanAttributes.get(name, tuple()) andk not in booleanAttributes.get("", tuple())):yield self.encodeStrict("=")if self.quote_attr_values == "always" or len(v) == 0:quote_attr = Trueelif self.quote_attr_values == "spec":quote_attr = _quoteAttributeSpec.search(v) is not Noneelif self.quote_attr_values == "legacy":quote_attr = _quoteAttributeLegacy.search(v) is not Noneelse:raise ValueError("quote_attr_values must be one of: ""'always', 'spec', or 'legacy'")v = v.replace("&", "&")if self.escape_lt_in_attrs:v = v.replace("<", "<")if quote_attr:quote_char = self.quote_charif self.use_best_quote_char:if "'" in v and '"' not in v:quote_char = '"'elif '"' in v and "'" not in v:quote_char = "'"if quote_char == "'":v = v.replace("'", "'")else:v = v.replace('"', """)yield self.encodeStrict(quote_char)yield self.encode(v)yield self.encodeStrict(quote_char)else:yield self.encode(v)if name in voidElements and self.use_trailing_solidus:if self.space_before_trailing_solidus:yield self.encodeStrict(" /")else:yield self.encodeStrict("/")yield self.encode(">")elif type == "EndTag":name = token["name"]if name in rcdataElements:in_cdata = Falseelif in_cdata:self.serializeError("Unexpected child element of a CDATA element")yield self.encodeStrict("</%s>" % name)elif type == "Comment":data = token["data"]if data.find("--") >= 0:self.serializeError("Comment contains --")yield self.encodeStrict("<!--%s-->" % token["data"])elif type == "Entity":name = token["name"]key = name + ";"if key not in entities:self.serializeError("Entity %s not recognized" % name)if self.resolve_entities and key not in xmlEntities:data = entities[key]else:data = "&%s;" % nameyield self.encodeStrict(data)else:self.serializeError(token["data"])def render(self, treewalker, encoding=None):if encoding:return b"".join(list(self.serialize(treewalker, encoding)))else:return "".join(list(self.serialize(treewalker)))def serializeError(self, data="XXX ERROR MESSAGE NEEDED"):# XXX The idea is to make data mandatory.self.errors.append(data)if self.strict:raise SerializeErrorclass SerializeError(Exception):"""Error in serialized tree"""pass
from __future__ import absolute_import, division, unicode_literalsfrom pip._vendor.six import with_metaclass, viewkeys, PY3import typestry:from collections import OrderedDictexcept ImportError:from pip._vendor.ordereddict import OrderedDictfrom . import _inputstreamfrom . import _tokenizerfrom . import treebuildersfrom .treebuilders.base import Markerfrom . import _utilsfrom .constants import (spaceCharacters, asciiUpper2Lower,specialElements, headingElements, cdataElements, rcdataElements,tokenTypes, tagTokenTypes,namespaces,htmlIntegrationPointElements, mathmlTextIntegrationPointElements,adjustForeignAttributes as adjustForeignAttributesMap,adjustMathMLAttributes, adjustSVGAttributes,E,ReparseException)def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs):"""Parse a string or file-like object into a tree"""tb = treebuilders.getTreeBuilder(treebuilder)p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)return p.parse(doc, **kwargs)def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs):tb = treebuilders.getTreeBuilder(treebuilder)p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)return p.parseFragment(doc, container=container, **kwargs)def method_decorator_metaclass(function):class Decorated(type):def __new__(meta, classname, bases, classDict):for attributeName, attribute in classDict.items():if isinstance(attribute, types.FunctionType):attribute = function(attribute)classDict[attributeName] = attributereturn type.__new__(meta, classname, bases, classDict)return Decoratedclass HTMLParser(object):"""HTML parser. Generates a tree structure from a stream of (possiblymalformed) HTML"""def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False):"""strict - raise an exception when a parse error is encounteredtree - a treebuilder class controlling the type of tree that will bereturned. Built in treebuilders can be accessed throughhtml5lib.treebuilders.getTreeBuilder(treeType)"""# Raise an exception on the first error encounteredself.strict = strictif tree is None:tree = treebuilders.getTreeBuilder("etree")self.tree = tree(namespaceHTMLElements)self.errors = []self.phases = dict([(name, cls(self, self.tree)) for name, cls ingetPhases(debug).items()])def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs):self.innerHTMLMode = innerHTMLself.container = containerself.scripting = scriptingself.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs)self.reset()try:self.mainLoop()except ReparseException:self.reset()self.mainLoop()def reset(self):self.tree.reset()self.firstStartTag = Falseself.errors = []self.log = [] # only used with debug mode# "quirks" / "limited quirks" / "no quirks"self.compatMode = "no quirks"if self.innerHTMLMode:self.innerHTML = self.container.lower()if self.innerHTML in cdataElements:self.tokenizer.state = self.tokenizer.rcdataStateelif self.innerHTML in rcdataElements:self.tokenizer.state = self.tokenizer.rawtextStateelif self.innerHTML == 'plaintext':self.tokenizer.state = self.tokenizer.plaintextStateelse:# state already is data state# self.tokenizer.state = self.tokenizer.dataStatepassself.phase = self.phases["beforeHtml"]self.phase.insertHtmlElement()self.resetInsertionMode()else:self.innerHTML = False # pylint:disable=redefined-variable-typeself.phase = self.phases["initial"]self.lastPhase = Noneself.beforeRCDataPhase = Noneself.framesetOK = True@propertydef documentEncoding(self):"""The name of the character encodingthat was used to decode the input stream,or :obj:`None` if that is not determined yet."""if not hasattr(self, 'tokenizer'):return Nonereturn self.tokenizer.stream.charEncoding[0].namedef isHTMLIntegrationPoint(self, element):if (element.name == "annotation-xml" andelement.namespace == namespaces["mathml"]):return ("encoding" in element.attributes andelement.attributes["encoding"].translate(asciiUpper2Lower) in("text/html", "application/xhtml+xml"))else:return (element.namespace, element.name) in htmlIntegrationPointElementsdef isMathMLTextIntegrationPoint(self, element):return (element.namespace, element.name) in mathmlTextIntegrationPointElementsdef mainLoop(self):CharactersToken = tokenTypes["Characters"]SpaceCharactersToken = tokenTypes["SpaceCharacters"]StartTagToken = tokenTypes["StartTag"]EndTagToken = tokenTypes["EndTag"]CommentToken = tokenTypes["Comment"]DoctypeToken = tokenTypes["Doctype"]ParseErrorToken = tokenTypes["ParseError"]for token in self.normalizedTokens():prev_token = Nonenew_token = tokenwhile new_token is not None:prev_token = new_tokencurrentNode = self.tree.openElements[-1] if self.tree.openElements else NonecurrentNodeNamespace = currentNode.namespace if currentNode else NonecurrentNodeName = currentNode.name if currentNode else Nonetype = new_token["type"]if type == ParseErrorToken:self.parseError(new_token["data"], new_token.get("datavars", {}))new_token = Noneelse:if (len(self.tree.openElements) == 0 orcurrentNodeNamespace == self.tree.defaultNamespace or(self.isMathMLTextIntegrationPoint(currentNode) and((type == StartTagToken andtoken["name"] not in frozenset(["mglyph", "malignmark"])) ortype in (CharactersToken, SpaceCharactersToken))) or(currentNodeNamespace == namespaces["mathml"] andcurrentNodeName == "annotation-xml" andtype == StartTagToken andtoken["name"] == "svg") or(self.isHTMLIntegrationPoint(currentNode) andtype in (StartTagToken, CharactersToken, SpaceCharactersToken))):phase = self.phaseelse:phase = self.phases["inForeignContent"]if type == CharactersToken:new_token = phase.processCharacters(new_token)elif type == SpaceCharactersToken:new_token = phase.processSpaceCharacters(new_token)elif type == StartTagToken:new_token = phase.processStartTag(new_token)elif type == EndTagToken:new_token = phase.processEndTag(new_token)elif type == CommentToken:new_token = phase.processComment(new_token)elif type == DoctypeToken:new_token = phase.processDoctype(new_token)if (type == StartTagToken and prev_token["selfClosing"] andnot prev_token["selfClosingAcknowledged"]):self.parseError("non-void-element-with-trailing-solidus",{"name": prev_token["name"]})# When the loop finishes it's EOFreprocess = Truephases = []while reprocess:phases.append(self.phase)reprocess = self.phase.processEOF()if reprocess:assert self.phase not in phasesdef normalizedTokens(self):for token in self.tokenizer:yield self.normalizeToken(token)def parse(self, stream, *args, **kwargs):"""Parse a HTML document into a well-formed treestream - a filelike object or string containing the HTML to be parsedThe optional encoding parameter must be a string that indicatesthe encoding. If specified, that encoding will be used,regardless of any BOM or later declaration (such as in a metaelement)scripting - treat noscript elements as if javascript was turned on"""self._parse(stream, False, None, *args, **kwargs)return self.tree.getDocument()def parseFragment(self, stream, *args, **kwargs):"""Parse a HTML fragment into a well-formed tree fragmentcontainer - name of the element we're setting the innerHTML propertyif set to None, default to 'div'stream - a filelike object or string containing the HTML to be parsedThe optional encoding parameter must be a string that indicatesthe encoding. If specified, that encoding will be used,regardless of any BOM or later declaration (such as in a metaelement)scripting - treat noscript elements as if javascript was turned on"""self._parse(stream, True, *args, **kwargs)return self.tree.getFragment()def parseError(self, errorcode="XXX-undefined-error", datavars=None):# XXX The idea is to make errorcode mandatory.if datavars is None:datavars = {}self.errors.append((self.tokenizer.stream.position(), errorcode, datavars))if self.strict:raise ParseError(E[errorcode] % datavars)def normalizeToken(self, token):""" HTML5 specific normalizations to the token stream """if token["type"] == tokenTypes["StartTag"]:raw = token["data"]token["data"] = OrderedDict(raw)if len(raw) > len(token["data"]):# we had some duplicated attribute, fix so first winstoken["data"].update(raw[::-1])return tokendef adjustMathMLAttributes(self, token):adjust_attributes(token, adjustMathMLAttributes)def adjustSVGAttributes(self, token):adjust_attributes(token, adjustSVGAttributes)def adjustForeignAttributes(self, token):adjust_attributes(token, adjustForeignAttributesMap)def reparseTokenNormal(self, token):# pylint:disable=unused-argumentself.parser.phase()def resetInsertionMode(self):# The name of this method is mostly historical. (It's also used in the# specification.)last = FalsenewModes = {"select": "inSelect","td": "inCell","th": "inCell","tr": "inRow","tbody": "inTableBody","thead": "inTableBody","tfoot": "inTableBody","caption": "inCaption","colgroup": "inColumnGroup","table": "inTable","head": "inBody","body": "inBody","frameset": "inFrameset","html": "beforeHead"}for node in self.tree.openElements[::-1]:nodeName = node.namenew_phase = Noneif node == self.tree.openElements[0]:assert self.innerHTMLlast = TruenodeName = self.innerHTML# Check for conditions that should only happen in the innerHTML# caseif nodeName in ("select", "colgroup", "head", "html"):assert self.innerHTMLif not last and node.namespace != self.tree.defaultNamespace:continueif nodeName in newModes:new_phase = self.phases[newModes[nodeName]]breakelif last:new_phase = self.phases["inBody"]breakself.phase = new_phasedef parseRCDataRawtext(self, token, contentType):"""Generic RCDATA/RAWTEXT Parsing algorithmcontentType - RCDATA or RAWTEXT"""assert contentType in ("RAWTEXT", "RCDATA")self.tree.insertElement(token)if contentType == "RAWTEXT":self.tokenizer.state = self.tokenizer.rawtextStateelse:self.tokenizer.state = self.tokenizer.rcdataStateself.originalPhase = self.phaseself.phase = self.phases["text"]@_utils.memoizedef getPhases(debug):def log(function):"""Logger that records which phase processes each token"""type_names = dict((value, key) for key, value intokenTypes.items())def wrapped(self, *args, **kwargs):if function.__name__.startswith("process") and len(args) > 0:token = args[0]try:info = {"type": type_names[token['type']]}except:raiseif token['type'] in tagTokenTypes:info["name"] = token['name']self.parser.log.append((self.parser.tokenizer.state.__name__,self.parser.phase.__class__.__name__,self.__class__.__name__,function.__name__,info))return function(self, *args, **kwargs)else:return function(self, *args, **kwargs)return wrappeddef getMetaclass(use_metaclass, metaclass_func):if use_metaclass:return method_decorator_metaclass(metaclass_func)else:return type# pylint:disable=unused-argumentclass Phase(with_metaclass(getMetaclass(debug, log))):"""Base class for helper object that implements each phase of processing"""def __init__(self, parser, tree):self.parser = parserself.tree = treedef processEOF(self):raise NotImplementedErrordef processComment(self, token):# For most phases the following is correct. Where it's not it will be# overridden.self.tree.insertComment(token, self.tree.openElements[-1])def processDoctype(self, token):self.parser.parseError("unexpected-doctype")def processCharacters(self, token):self.tree.insertText(token["data"])def processSpaceCharacters(self, token):self.tree.insertText(token["data"])def processStartTag(self, token):return self.startTagHandler[token["name"]](token)def startTagHtml(self, token):if not self.parser.firstStartTag and token["name"] == "html":self.parser.parseError("non-html-root")# XXX Need a check here to see if the first start tag token emitted is# this token... If it's not, invoke self.parser.parseError().for attr, value in token["data"].items():if attr not in self.tree.openElements[0].attributes:self.tree.openElements[0].attributes[attr] = valueself.parser.firstStartTag = Falsedef processEndTag(self, token):return self.endTagHandler[token["name"]](token)class InitialPhase(Phase):def processSpaceCharacters(self, token):passdef processComment(self, token):self.tree.insertComment(token, self.tree.document)def processDoctype(self, token):name = token["name"]publicId = token["publicId"]systemId = token["systemId"]correct = token["correct"]if (name != "html" or publicId is not None orsystemId is not None and systemId != "about:legacy-compat"):self.parser.parseError("unknown-doctype")if publicId is None:publicId = ""self.tree.insertDoctype(token)if publicId != "":publicId = publicId.translate(asciiUpper2Lower)if (not correct or token["name"] != "html" orpublicId.startswith(("+//silmaril//dtd html pro v0r11 19970101//","-//advasoft ltd//dtd html 3.0 aswedit + extensions//","-//as//dtd html 3.0 aswedit + extensions//","-//ietf//dtd html 2.0 level 1//","-//ietf//dtd html 2.0 level 2//","-//ietf//dtd html 2.0 strict level 1//","-//ietf//dtd html 2.0 strict level 2//","-//ietf//dtd html 2.0 strict//","-//ietf//dtd html 2.0//","-//ietf//dtd html 2.1e//","-//ietf//dtd html 3.0//","-//ietf//dtd html 3.2 final//","-//ietf//dtd html 3.2//","-//ietf//dtd html 3//","-//ietf//dtd html level 0//","-//ietf//dtd html level 1//","-//ietf//dtd html level 2//","-//ietf//dtd html level 3//","-//ietf//dtd html strict level 0//","-//ietf//dtd html strict level 1//","-//ietf//dtd html strict level 2//","-//ietf//dtd html strict level 3//","-//ietf//dtd html strict//","-//ietf//dtd html//","-//metrius//dtd metrius presentational//","-//microsoft//dtd internet explorer 2.0 html strict//","-//microsoft//dtd internet explorer 2.0 html//","-//microsoft//dtd internet explorer 2.0 tables//","-//microsoft//dtd internet explorer 3.0 html strict//","-//microsoft//dtd internet explorer 3.0 html//","-//microsoft//dtd internet explorer 3.0 tables//","-//netscape comm. corp.//dtd html//","-//netscape comm. corp.//dtd strict html//","-//o'reilly and associates//dtd html 2.0//","-//o'reilly and associates//dtd html extended 1.0//","-//o'reilly and associates//dtd html extended relaxed 1.0//","-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//","-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//","-//spyglass//dtd html 2.0 extended//","-//sq//dtd html 2.0 hotmetal + extensions//","-//sun microsystems corp.//dtd hotjava html//","-//sun microsystems corp.//dtd hotjava strict html//","-//w3c//dtd html 3 1995-03-24//","-//w3c//dtd html 3.2 draft//","-//w3c//dtd html 3.2 final//","-//w3c//dtd html 3.2//","-//w3c//dtd html 3.2s draft//","-//w3c//dtd html 4.0 frameset//","-//w3c//dtd html 4.0 transitional//","-//w3c//dtd html experimental 19960712//","-//w3c//dtd html experimental 970421//","-//w3c//dtd w3 html//","-//w3o//dtd w3 html 3.0//","-//webtechs//dtd mozilla html 2.0//","-//webtechs//dtd mozilla html//")) orpublicId in ("-//w3o//dtd w3 html strict 3.0//en//","-/w3c/dtd html 4.0 transitional/en","html") orpublicId.startswith(("-//w3c//dtd html 4.01 frameset//","-//w3c//dtd html 4.01 transitional//")) andsystemId is None orsystemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"):self.parser.compatMode = "quirks"elif (publicId.startswith(("-//w3c//dtd xhtml 1.0 frameset//","-//w3c//dtd xhtml 1.0 transitional//")) orpublicId.startswith(("-//w3c//dtd html 4.01 frameset//","-//w3c//dtd html 4.01 transitional//")) andsystemId is not None):self.parser.compatMode = "limited quirks"self.parser.phase = self.parser.phases["beforeHtml"]def anythingElse(self):self.parser.compatMode = "quirks"self.parser.phase = self.parser.phases["beforeHtml"]def processCharacters(self, token):self.parser.parseError("expected-doctype-but-got-chars")self.anythingElse()return tokendef processStartTag(self, token):self.parser.parseError("expected-doctype-but-got-start-tag",{"name": token["name"]})self.anythingElse()return tokendef processEndTag(self, token):self.parser.parseError("expected-doctype-but-got-end-tag",{"name": token["name"]})self.anythingElse()return tokendef processEOF(self):self.parser.parseError("expected-doctype-but-got-eof")self.anythingElse()return Trueclass BeforeHtmlPhase(Phase):# helper methodsdef insertHtmlElement(self):self.tree.insertRoot(impliedTagToken("html", "StartTag"))self.parser.phase = self.parser.phases["beforeHead"]# otherdef processEOF(self):self.insertHtmlElement()return Truedef processComment(self, token):self.tree.insertComment(token, self.tree.document)def processSpaceCharacters(self, token):passdef processCharacters(self, token):self.insertHtmlElement()return tokendef processStartTag(self, token):if token["name"] == "html":self.parser.firstStartTag = Trueself.insertHtmlElement()return tokendef processEndTag(self, token):if token["name"] not in ("head", "body", "html", "br"):self.parser.parseError("unexpected-end-tag-before-html",{"name": token["name"]})else:self.insertHtmlElement()return tokenclass BeforeHeadPhase(Phase):def __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),("head", self.startTagHead)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([(("head", "body", "html", "br"), self.endTagImplyHead)])self.endTagHandler.default = self.endTagOtherdef processEOF(self):self.startTagHead(impliedTagToken("head", "StartTag"))return Truedef processSpaceCharacters(self, token):passdef processCharacters(self, token):self.startTagHead(impliedTagToken("head", "StartTag"))return tokendef startTagHtml(self, token):return self.parser.phases["inBody"].processStartTag(token)def startTagHead(self, token):self.tree.insertElement(token)self.tree.headPointer = self.tree.openElements[-1]self.parser.phase = self.parser.phases["inHead"]def startTagOther(self, token):self.startTagHead(impliedTagToken("head", "StartTag"))return tokendef endTagImplyHead(self, token):self.startTagHead(impliedTagToken("head", "StartTag"))return tokendef endTagOther(self, token):self.parser.parseError("end-tag-after-implied-root",{"name": token["name"]})class InHeadPhase(Phase):def __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),("title", self.startTagTitle),(("noframes", "style"), self.startTagNoFramesStyle),("noscript", self.startTagNoscript),("script", self.startTagScript),(("base", "basefont", "bgsound", "command", "link"),self.startTagBaseLinkCommand),("meta", self.startTagMeta),("head", self.startTagHead)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([("head", self.endTagHead),(("br", "html", "body"), self.endTagHtmlBodyBr)])self.endTagHandler.default = self.endTagOther# the real thingdef processEOF(self):self.anythingElse()return Truedef processCharacters(self, token):self.anythingElse()return tokendef startTagHtml(self, token):return self.parser.phases["inBody"].processStartTag(token)def startTagHead(self, token):self.parser.parseError("two-heads-are-not-better-than-one")def startTagBaseLinkCommand(self, token):self.tree.insertElement(token)self.tree.openElements.pop()token["selfClosingAcknowledged"] = Truedef startTagMeta(self, token):self.tree.insertElement(token)self.tree.openElements.pop()token["selfClosingAcknowledged"] = Trueattributes = token["data"]if self.parser.tokenizer.stream.charEncoding[1] == "tentative":if "charset" in attributes:self.parser.tokenizer.stream.changeEncoding(attributes["charset"])elif ("content" in attributes and"http-equiv" in attributes andattributes["http-equiv"].lower() == "content-type"):# Encoding it as UTF-8 here is a hack, as really we should pass# the abstract Unicode string, and just use the# ContentAttrParser on that, but using UTF-8 allows all chars# to be encoded and as a ASCII-superset works.data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8"))parser = _inputstream.ContentAttrParser(data)codec = parser.parse()self.parser.tokenizer.stream.changeEncoding(codec)def startTagTitle(self, token):self.parser.parseRCDataRawtext(token, "RCDATA")def startTagNoFramesStyle(self, token):# Need to decide whether to implement the scripting-disabled caseself.parser.parseRCDataRawtext(token, "RAWTEXT")def startTagNoscript(self, token):if self.parser.scripting:self.parser.parseRCDataRawtext(token, "RAWTEXT")else:self.tree.insertElement(token)self.parser.phase = self.parser.phases["inHeadNoscript"]def startTagScript(self, token):self.tree.insertElement(token)self.parser.tokenizer.state = self.parser.tokenizer.scriptDataStateself.parser.originalPhase = self.parser.phaseself.parser.phase = self.parser.phases["text"]def startTagOther(self, token):self.anythingElse()return tokendef endTagHead(self, token):node = self.parser.tree.openElements.pop()assert node.name == "head", "Expected head got %s" % node.nameself.parser.phase = self.parser.phases["afterHead"]def endTagHtmlBodyBr(self, token):self.anythingElse()return tokendef endTagOther(self, token):self.parser.parseError("unexpected-end-tag", {"name": token["name"]})def anythingElse(self):self.endTagHead(impliedTagToken("head"))class InHeadNoscriptPhase(Phase):def __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),(("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand),(("head", "noscript"), self.startTagHeadNoscript),])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([("noscript", self.endTagNoscript),("br", self.endTagBr),])self.endTagHandler.default = self.endTagOtherdef processEOF(self):self.parser.parseError("eof-in-head-noscript")self.anythingElse()return Truedef processComment(self, token):return self.parser.phases["inHead"].processComment(token)def processCharacters(self, token):self.parser.parseError("char-in-head-noscript")self.anythingElse()return tokendef processSpaceCharacters(self, token):return self.parser.phases["inHead"].processSpaceCharacters(token)def startTagHtml(self, token):return self.parser.phases["inBody"].processStartTag(token)def startTagBaseLinkCommand(self, token):return self.parser.phases["inHead"].processStartTag(token)def startTagHeadNoscript(self, token):self.parser.parseError("unexpected-start-tag", {"name": token["name"]})def startTagOther(self, token):self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]})self.anythingElse()return tokendef endTagNoscript(self, token):node = self.parser.tree.openElements.pop()assert node.name == "noscript", "Expected noscript got %s" % node.nameself.parser.phase = self.parser.phases["inHead"]def endTagBr(self, token):self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]})self.anythingElse()return tokendef endTagOther(self, token):self.parser.parseError("unexpected-end-tag", {"name": token["name"]})def anythingElse(self):# Caller must raise parse error first!self.endTagNoscript(impliedTagToken("noscript"))class AfterHeadPhase(Phase):def __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),("body", self.startTagBody),("frameset", self.startTagFrameset),(("base", "basefont", "bgsound", "link", "meta", "noframes", "script","style", "title"),self.startTagFromHead),("head", self.startTagHead)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"),self.endTagHtmlBodyBr)])self.endTagHandler.default = self.endTagOtherdef processEOF(self):self.anythingElse()return Truedef processCharacters(self, token):self.anythingElse()return tokendef startTagHtml(self, token):return self.parser.phases["inBody"].processStartTag(token)def startTagBody(self, token):self.parser.framesetOK = Falseself.tree.insertElement(token)self.parser.phase = self.parser.phases["inBody"]def startTagFrameset(self, token):self.tree.insertElement(token)self.parser.phase = self.parser.phases["inFrameset"]def startTagFromHead(self, token):self.parser.parseError("unexpected-start-tag-out-of-my-head",{"name": token["name"]})self.tree.openElements.append(self.tree.headPointer)self.parser.phases["inHead"].processStartTag(token)for node in self.tree.openElements[::-1]:if node.name == "head":self.tree.openElements.remove(node)breakdef startTagHead(self, token):self.parser.parseError("unexpected-start-tag", {"name": token["name"]})def startTagOther(self, token):self.anythingElse()return tokendef endTagHtmlBodyBr(self, token):self.anythingElse()return tokendef endTagOther(self, token):self.parser.parseError("unexpected-end-tag", {"name": token["name"]})def anythingElse(self):self.tree.insertElement(impliedTagToken("body", "StartTag"))self.parser.phase = self.parser.phases["inBody"]self.parser.framesetOK = Trueclass InBodyPhase(Phase):# http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody# the really-really-really-very crazy modedef __init__(self, parser, tree):Phase.__init__(self, parser, tree)# Set this to the default handlerself.processSpaceCharacters = self.processSpaceCharactersNonPreself.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),(("base", "basefont", "bgsound", "command", "link", "meta","script", "style", "title"),self.startTagProcessInHead),("body", self.startTagBody),("frameset", self.startTagFrameset),(("address", "article", "aside", "blockquote", "center", "details","dir", "div", "dl", "fieldset", "figcaption", "figure","footer", "header", "hgroup", "main", "menu", "nav", "ol", "p","section", "summary", "ul"),self.startTagCloseP),(headingElements, self.startTagHeading),(("pre", "listing"), self.startTagPreListing),("form", self.startTagForm),(("li", "dd", "dt"), self.startTagListItem),("plaintext", self.startTagPlaintext),("a", self.startTagA),(("b", "big", "code", "em", "font", "i", "s", "small", "strike","strong", "tt", "u"), self.startTagFormatting),("nobr", self.startTagNobr),("button", self.startTagButton),(("applet", "marquee", "object"), self.startTagAppletMarqueeObject),("xmp", self.startTagXmp),("table", self.startTagTable),(("area", "br", "embed", "img", "keygen", "wbr"),self.startTagVoidFormatting),(("param", "source", "track"), self.startTagParamSource),("input", self.startTagInput),("hr", self.startTagHr),("image", self.startTagImage),("isindex", self.startTagIsIndex),("textarea", self.startTagTextarea),("iframe", self.startTagIFrame),("noscript", self.startTagNoscript),(("noembed", "noframes"), self.startTagRawtext),("select", self.startTagSelect),(("rp", "rt"), self.startTagRpRt),(("option", "optgroup"), self.startTagOpt),(("math"), self.startTagMath),(("svg"), self.startTagSvg),(("caption", "col", "colgroup", "frame", "head","tbody", "td", "tfoot", "th", "thead","tr"), self.startTagMisplaced)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([("body", self.endTagBody),("html", self.endTagHtml),(("address", "article", "aside", "blockquote", "button", "center","details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure","footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre","section", "summary", "ul"), self.endTagBlock),("form", self.endTagForm),("p", self.endTagP),(("dd", "dt", "li"), self.endTagListItem),(headingElements, self.endTagHeading),(("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small","strike", "strong", "tt", "u"), self.endTagFormatting),(("applet", "marquee", "object"), self.endTagAppletMarqueeObject),("br", self.endTagBr),])self.endTagHandler.default = self.endTagOtherdef isMatchingFormattingElement(self, node1, node2):return (node1.name == node2.name andnode1.namespace == node2.namespace andnode1.attributes == node2.attributes)# helperdef addFormattingElement(self, token):self.tree.insertElement(token)element = self.tree.openElements[-1]matchingElements = []for node in self.tree.activeFormattingElements[::-1]:if node is Marker:breakelif self.isMatchingFormattingElement(node, element):matchingElements.append(node)assert len(matchingElements) <= 3if len(matchingElements) == 3:self.tree.activeFormattingElements.remove(matchingElements[-1])self.tree.activeFormattingElements.append(element)# the real dealdef processEOF(self):allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td","tfoot", "th", "thead", "tr", "body","html"))for node in self.tree.openElements[::-1]:if node.name not in allowed_elements:self.parser.parseError("expected-closing-tag-but-got-eof")break# Stop parsingdef processSpaceCharactersDropNewline(self, token):# Sometimes (start of <pre>, <listing>, and <textarea> blocks) we# want to drop leading newlinesdata = token["data"]self.processSpaceCharacters = self.processSpaceCharactersNonPreif (data.startswith("\n") andself.tree.openElements[-1].name in ("pre", "listing", "textarea") andnot self.tree.openElements[-1].hasContent()):data = data[1:]if data:self.tree.reconstructActiveFormattingElements()self.tree.insertText(data)def processCharacters(self, token):if token["data"] == "\u0000":# The tokenizer should always emit null on its ownreturnself.tree.reconstructActiveFormattingElements()self.tree.insertText(token["data"])# This must be bad for performanceif (self.parser.framesetOK andany([char not in spaceCharactersfor char in token["data"]])):self.parser.framesetOK = Falsedef processSpaceCharactersNonPre(self, token):self.tree.reconstructActiveFormattingElements()self.tree.insertText(token["data"])def startTagProcessInHead(self, token):return self.parser.phases["inHead"].processStartTag(token)def startTagBody(self, token):self.parser.parseError("unexpected-start-tag", {"name": "body"})if (len(self.tree.openElements) == 1 orself.tree.openElements[1].name != "body"):assert self.parser.innerHTMLelse:self.parser.framesetOK = Falsefor attr, value in token["data"].items():if attr not in self.tree.openElements[1].attributes:self.tree.openElements[1].attributes[attr] = valuedef startTagFrameset(self, token):self.parser.parseError("unexpected-start-tag", {"name": "frameset"})if (len(self.tree.openElements) == 1 or self.tree.openElements[1].name != "body"):assert self.parser.innerHTMLelif not self.parser.framesetOK:passelse:if self.tree.openElements[1].parent:self.tree.openElements[1].parent.removeChild(self.tree.openElements[1])while self.tree.openElements[-1].name != "html":self.tree.openElements.pop()self.tree.insertElement(token)self.parser.phase = self.parser.phases["inFrameset"]def startTagCloseP(self, token):if self.tree.elementInScope("p", variant="button"):self.endTagP(impliedTagToken("p"))self.tree.insertElement(token)def startTagPreListing(self, token):if self.tree.elementInScope("p", variant="button"):self.endTagP(impliedTagToken("p"))self.tree.insertElement(token)self.parser.framesetOK = Falseself.processSpaceCharacters = self.processSpaceCharactersDropNewlinedef startTagForm(self, token):if self.tree.formPointer:self.parser.parseError("unexpected-start-tag", {"name": "form"})else:if self.tree.elementInScope("p", variant="button"):self.endTagP(impliedTagToken("p"))self.tree.insertElement(token)self.tree.formPointer = self.tree.openElements[-1]def startTagListItem(self, token):self.parser.framesetOK = FalsestopNamesMap = {"li": ["li"],"dt": ["dt", "dd"],"dd": ["dt", "dd"]}stopNames = stopNamesMap[token["name"]]for node in reversed(self.tree.openElements):if node.name in stopNames:self.parser.phase.processEndTag(impliedTagToken(node.name, "EndTag"))breakif (node.nameTuple in specialElements andnode.name not in ("address", "div", "p")):breakif self.tree.elementInScope("p", variant="button"):self.parser.phase.processEndTag(impliedTagToken("p", "EndTag"))self.tree.insertElement(token)def startTagPlaintext(self, token):if self.tree.elementInScope("p", variant="button"):self.endTagP(impliedTagToken("p"))self.tree.insertElement(token)self.parser.tokenizer.state = self.parser.tokenizer.plaintextStatedef startTagHeading(self, token):if self.tree.elementInScope("p", variant="button"):self.endTagP(impliedTagToken("p"))if self.tree.openElements[-1].name in headingElements:self.parser.parseError("unexpected-start-tag", {"name": token["name"]})self.tree.openElements.pop()self.tree.insertElement(token)def startTagA(self, token):afeAElement = self.tree.elementInActiveFormattingElements("a")if afeAElement:self.parser.parseError("unexpected-start-tag-implies-end-tag",{"startName": "a", "endName": "a"})self.endTagFormatting(impliedTagToken("a"))if afeAElement in self.tree.openElements:self.tree.openElements.remove(afeAElement)if afeAElement in self.tree.activeFormattingElements:self.tree.activeFormattingElements.remove(afeAElement)self.tree.reconstructActiveFormattingElements()self.addFormattingElement(token)def startTagFormatting(self, token):self.tree.reconstructActiveFormattingElements()self.addFormattingElement(token)def startTagNobr(self, token):self.tree.reconstructActiveFormattingElements()if self.tree.elementInScope("nobr"):self.parser.parseError("unexpected-start-tag-implies-end-tag",{"startName": "nobr", "endName": "nobr"})self.processEndTag(impliedTagToken("nobr"))# XXX Need tests that trigger the followingself.tree.reconstructActiveFormattingElements()self.addFormattingElement(token)def startTagButton(self, token):if self.tree.elementInScope("button"):self.parser.parseError("unexpected-start-tag-implies-end-tag",{"startName": "button", "endName": "button"})self.processEndTag(impliedTagToken("button"))return tokenelse:self.tree.reconstructActiveFormattingElements()self.tree.insertElement(token)self.parser.framesetOK = Falsedef startTagAppletMarqueeObject(self, token):self.tree.reconstructActiveFormattingElements()self.tree.insertElement(token)self.tree.activeFormattingElements.append(Marker)self.parser.framesetOK = Falsedef startTagXmp(self, token):if self.tree.elementInScope("p", variant="button"):self.endTagP(impliedTagToken("p"))self.tree.reconstructActiveFormattingElements()self.parser.framesetOK = Falseself.parser.parseRCDataRawtext(token, "RAWTEXT")def startTagTable(self, token):if self.parser.compatMode != "quirks":if self.tree.elementInScope("p", variant="button"):self.processEndTag(impliedTagToken("p"))self.tree.insertElement(token)self.parser.framesetOK = Falseself.parser.phase = self.parser.phases["inTable"]def startTagVoidFormatting(self, token):self.tree.reconstructActiveFormattingElements()self.tree.insertElement(token)self.tree.openElements.pop()token["selfClosingAcknowledged"] = Trueself.parser.framesetOK = Falsedef startTagInput(self, token):framesetOK = self.parser.framesetOKself.startTagVoidFormatting(token)if ("type" in token["data"] andtoken["data"]["type"].translate(asciiUpper2Lower) == "hidden"):# input type=hidden doesn't change framesetOKself.parser.framesetOK = framesetOKdef startTagParamSource(self, token):self.tree.insertElement(token)self.tree.openElements.pop()token["selfClosingAcknowledged"] = Truedef startTagHr(self, token):if self.tree.elementInScope("p", variant="button"):self.endTagP(impliedTagToken("p"))self.tree.insertElement(token)self.tree.openElements.pop()token["selfClosingAcknowledged"] = Trueself.parser.framesetOK = Falsedef startTagImage(self, token):# No really...self.parser.parseError("unexpected-start-tag-treated-as",{"originalName": "image", "newName": "img"})self.processStartTag(impliedTagToken("img", "StartTag",attributes=token["data"],selfClosing=token["selfClosing"]))def startTagIsIndex(self, token):self.parser.parseError("deprecated-tag", {"name": "isindex"})if self.tree.formPointer:returnform_attrs = {}if "action" in token["data"]:form_attrs["action"] = token["data"]["action"]self.processStartTag(impliedTagToken("form", "StartTag",attributes=form_attrs))self.processStartTag(impliedTagToken("hr", "StartTag"))self.processStartTag(impliedTagToken("label", "StartTag"))# XXX Localization ...if "prompt" in token["data"]:prompt = token["data"]["prompt"]else:prompt = "This is a searchable index. Enter search keywords: "self.processCharacters({"type": tokenTypes["Characters"], "data": prompt})attributes = token["data"].copy()if "action" in attributes:del attributes["action"]if "prompt" in attributes:del attributes["prompt"]attributes["name"] = "isindex"self.processStartTag(impliedTagToken("input", "StartTag",attributes=attributes,selfClosing=token["selfClosing"]))self.processEndTag(impliedTagToken("label"))self.processStartTag(impliedTagToken("hr", "StartTag"))self.processEndTag(impliedTagToken("form"))def startTagTextarea(self, token):self.tree.insertElement(token)self.parser.tokenizer.state = self.parser.tokenizer.rcdataStateself.processSpaceCharacters = self.processSpaceCharactersDropNewlineself.parser.framesetOK = Falsedef startTagIFrame(self, token):self.parser.framesetOK = Falseself.startTagRawtext(token)def startTagNoscript(self, token):if self.parser.scripting:self.startTagRawtext(token)else:self.startTagOther(token)def startTagRawtext(self, token):"""iframe, noembed noframes, noscript(if scripting enabled)"""self.parser.parseRCDataRawtext(token, "RAWTEXT")def startTagOpt(self, token):if self.tree.openElements[-1].name == "option":self.parser.phase.processEndTag(impliedTagToken("option"))self.tree.reconstructActiveFormattingElements()self.parser.tree.insertElement(token)def startTagSelect(self, token):self.tree.reconstructActiveFormattingElements()self.tree.insertElement(token)self.parser.framesetOK = Falseif self.parser.phase in (self.parser.phases["inTable"],self.parser.phases["inCaption"],self.parser.phases["inColumnGroup"],self.parser.phases["inTableBody"],self.parser.phases["inRow"],self.parser.phases["inCell"]):self.parser.phase = self.parser.phases["inSelectInTable"]else:self.parser.phase = self.parser.phases["inSelect"]def startTagRpRt(self, token):if self.tree.elementInScope("ruby"):self.tree.generateImpliedEndTags()if self.tree.openElements[-1].name != "ruby":self.parser.parseError()self.tree.insertElement(token)def startTagMath(self, token):self.tree.reconstructActiveFormattingElements()self.parser.adjustMathMLAttributes(token)self.parser.adjustForeignAttributes(token)token["namespace"] = namespaces["mathml"]self.tree.insertElement(token)# Need to get the parse error right for the case where the token# has a namespace not equal to the xmlns attributeif token["selfClosing"]:self.tree.openElements.pop()token["selfClosingAcknowledged"] = Truedef startTagSvg(self, token):self.tree.reconstructActiveFormattingElements()self.parser.adjustSVGAttributes(token)self.parser.adjustForeignAttributes(token)token["namespace"] = namespaces["svg"]self.tree.insertElement(token)# Need to get the parse error right for the case where the token# has a namespace not equal to the xmlns attributeif token["selfClosing"]:self.tree.openElements.pop()token["selfClosingAcknowledged"] = Truedef startTagMisplaced(self, token):""" Elements that should be children of other elements that have adifferent insertion mode; here they are ignored"caption", "col", "colgroup", "frame", "frameset", "head","option", "optgroup", "tbody", "td", "tfoot", "th", "thead","tr", "noscript""""self.parser.parseError("unexpected-start-tag-ignored", {"name": token["name"]})def startTagOther(self, token):self.tree.reconstructActiveFormattingElements()self.tree.insertElement(token)def endTagP(self, token):if not self.tree.elementInScope("p", variant="button"):self.startTagCloseP(impliedTagToken("p", "StartTag"))self.parser.parseError("unexpected-end-tag", {"name": "p"})self.endTagP(impliedTagToken("p", "EndTag"))else:self.tree.generateImpliedEndTags("p")if self.tree.openElements[-1].name != "p":self.parser.parseError("unexpected-end-tag", {"name": "p"})node = self.tree.openElements.pop()while node.name != "p":node = self.tree.openElements.pop()def endTagBody(self, token):if not self.tree.elementInScope("body"):self.parser.parseError()returnelif self.tree.openElements[-1].name != "body":for node in self.tree.openElements[2:]:if node.name not in frozenset(("dd", "dt", "li", "optgroup","option", "p", "rp", "rt","tbody", "td", "tfoot","th", "thead", "tr", "body","html")):# Not sure this is the correct name for the parse errorself.parser.parseError("expected-one-end-tag-but-got-another",{"gotName": "body", "expectedName": node.name})breakself.parser.phase = self.parser.phases["afterBody"]def endTagHtml(self, token):# We repeat the test for the body end tag token being ignored hereif self.tree.elementInScope("body"):self.endTagBody(impliedTagToken("body"))return tokendef endTagBlock(self, token):# Put us back in the right whitespace handling modeif token["name"] == "pre":self.processSpaceCharacters = self.processSpaceCharactersNonPreinScope = self.tree.elementInScope(token["name"])if inScope:self.tree.generateImpliedEndTags()if self.tree.openElements[-1].name != token["name"]:self.parser.parseError("end-tag-too-early", {"name": token["name"]})if inScope:node = self.tree.openElements.pop()while node.name != token["name"]:node = self.tree.openElements.pop()def endTagForm(self, token):node = self.tree.formPointerself.tree.formPointer = Noneif node is None or not self.tree.elementInScope(node):self.parser.parseError("unexpected-end-tag",{"name": "form"})else:self.tree.generateImpliedEndTags()if self.tree.openElements[-1] != node:self.parser.parseError("end-tag-too-early-ignored",{"name": "form"})self.tree.openElements.remove(node)def endTagListItem(self, token):if token["name"] == "li":variant = "list"else:variant = Noneif not self.tree.elementInScope(token["name"], variant=variant):self.parser.parseError("unexpected-end-tag", {"name": token["name"]})else:self.tree.generateImpliedEndTags(exclude=token["name"])if self.tree.openElements[-1].name != token["name"]:self.parser.parseError("end-tag-too-early",{"name": token["name"]})node = self.tree.openElements.pop()while node.name != token["name"]:node = self.tree.openElements.pop()def endTagHeading(self, token):for item in headingElements:if self.tree.elementInScope(item):self.tree.generateImpliedEndTags()breakif self.tree.openElements[-1].name != token["name"]:self.parser.parseError("end-tag-too-early", {"name": token["name"]})for item in headingElements:if self.tree.elementInScope(item):item = self.tree.openElements.pop()while item.name not in headingElements:item = self.tree.openElements.pop()breakdef endTagFormatting(self, token):"""The much-feared adoption agency algorithm"""# http://svn.whatwg.org/webapps/complete.html#adoptionAgency revision 7867# XXX Better parseError messages appreciated.# Step 1outerLoopCounter = 0# Step 2while outerLoopCounter < 8:# Step 3outerLoopCounter += 1# Step 4:# Let the formatting element be the last element in# the list of active formatting elements that:# - is between the end of the list and the last scope# marker in the list, if any, or the start of the list# otherwise, and# - has the same tag name as the token.formattingElement = self.tree.elementInActiveFormattingElements(token["name"])if (not formattingElement or(formattingElement in self.tree.openElements andnot self.tree.elementInScope(formattingElement.name))):# If there is no such node, then abort these steps# and instead act as described in the "any other# end tag" entry below.self.endTagOther(token)return# Otherwise, if there is such a node, but that node is# not in the stack of open elements, then this is a# parse error; remove the element from the list, and# abort these steps.elif formattingElement not in self.tree.openElements:self.parser.parseError("adoption-agency-1.2", {"name": token["name"]})self.tree.activeFormattingElements.remove(formattingElement)return# Otherwise, if there is such a node, and that node is# also in the stack of open elements, but the element# is not in scope, then this is a parse error; ignore# the token, and abort these steps.elif not self.tree.elementInScope(formattingElement.name):self.parser.parseError("adoption-agency-4.4", {"name": token["name"]})return# Otherwise, there is a formatting element and that# element is in the stack and is in scope. If the# element is not the current node, this is a parse# error. In any case, proceed with the algorithm as# written in the following steps.else:if formattingElement != self.tree.openElements[-1]:self.parser.parseError("adoption-agency-1.3", {"name": token["name"]})# Step 5:# Let the furthest block be the topmost node in the# stack of open elements that is lower in the stack# than the formatting element, and is an element in# the special category. There might not be one.afeIndex = self.tree.openElements.index(formattingElement)furthestBlock = Nonefor element in self.tree.openElements[afeIndex:]:if element.nameTuple in specialElements:furthestBlock = elementbreak# Step 6:# If there is no furthest block, then the UA must# first pop all the nodes from the bottom of the stack# of open elements, from the current node up to and# including the formatting element, then remove the# formatting element from the list of active# formatting elements, and finally abort these steps.if furthestBlock is None:element = self.tree.openElements.pop()while element != formattingElement:element = self.tree.openElements.pop()self.tree.activeFormattingElements.remove(element)return# Step 7commonAncestor = self.tree.openElements[afeIndex - 1]# Step 8:# The bookmark is supposed to help us identify where to reinsert# nodes in step 15. We have to ensure that we reinsert nodes after# the node before the active formatting element. Note the bookmark# can move in step 9.7bookmark = self.tree.activeFormattingElements.index(formattingElement)# Step 9lastNode = node = furthestBlockinnerLoopCounter = 0index = self.tree.openElements.index(node)while innerLoopCounter < 3:innerLoopCounter += 1# Node is element before node in open elementsindex -= 1node = self.tree.openElements[index]if node not in self.tree.activeFormattingElements:self.tree.openElements.remove(node)continue# Step 9.6if node == formattingElement:break# Step 9.7if lastNode == furthestBlock:bookmark = self.tree.activeFormattingElements.index(node) + 1# Step 9.8clone = node.cloneNode()# Replace node with cloneself.tree.activeFormattingElements[self.tree.activeFormattingElements.index(node)] = cloneself.tree.openElements[self.tree.openElements.index(node)] = clonenode = clone# Step 9.9# Remove lastNode from its parents, if anyif lastNode.parent:lastNode.parent.removeChild(lastNode)node.appendChild(lastNode)# Step 9.10lastNode = node# Step 10# Foster parent lastNode if commonAncestor is a# table, tbody, tfoot, thead, or tr we need to foster# parent the lastNodeif lastNode.parent:lastNode.parent.removeChild(lastNode)if commonAncestor.name in frozenset(("table", "tbody", "tfoot", "thead", "tr")):parent, insertBefore = self.tree.getTableMisnestedNodePosition()parent.insertBefore(lastNode, insertBefore)else:commonAncestor.appendChild(lastNode)# Step 11clone = formattingElement.cloneNode()# Step 12furthestBlock.reparentChildren(clone)# Step 13furthestBlock.appendChild(clone)# Step 14self.tree.activeFormattingElements.remove(formattingElement)self.tree.activeFormattingElements.insert(bookmark, clone)# Step 15self.tree.openElements.remove(formattingElement)self.tree.openElements.insert(self.tree.openElements.index(furthestBlock) + 1, clone)def endTagAppletMarqueeObject(self, token):if self.tree.elementInScope(token["name"]):self.tree.generateImpliedEndTags()if self.tree.openElements[-1].name != token["name"]:self.parser.parseError("end-tag-too-early", {"name": token["name"]})if self.tree.elementInScope(token["name"]):element = self.tree.openElements.pop()while element.name != token["name"]:element = self.tree.openElements.pop()self.tree.clearActiveFormattingElements()def endTagBr(self, token):self.parser.parseError("unexpected-end-tag-treated-as",{"originalName": "br", "newName": "br element"})self.tree.reconstructActiveFormattingElements()self.tree.insertElement(impliedTagToken("br", "StartTag"))self.tree.openElements.pop()def endTagOther(self, token):for node in self.tree.openElements[::-1]:if node.name == token["name"]:self.tree.generateImpliedEndTags(exclude=token["name"])if self.tree.openElements[-1].name != token["name"]:self.parser.parseError("unexpected-end-tag", {"name": token["name"]})while self.tree.openElements.pop() != node:passbreakelse:if node.nameTuple in specialElements:self.parser.parseError("unexpected-end-tag", {"name": token["name"]})breakclass TextPhase(Phase):def __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([("script", self.endTagScript)])self.endTagHandler.default = self.endTagOtherdef processCharacters(self, token):self.tree.insertText(token["data"])def processEOF(self):self.parser.parseError("expected-named-closing-tag-but-got-eof",{"name": self.tree.openElements[-1].name})self.tree.openElements.pop()self.parser.phase = self.parser.originalPhasereturn Truedef startTagOther(self, token):assert False, "Tried to process start tag %s in RCDATA/RAWTEXT mode" % token['name']def endTagScript(self, token):node = self.tree.openElements.pop()assert node.name == "script"self.parser.phase = self.parser.originalPhase# The rest of this method is all stuff that only happens if# document.write worksdef endTagOther(self, token):self.tree.openElements.pop()self.parser.phase = self.parser.originalPhaseclass InTablePhase(Phase):# http://www.whatwg.org/specs/web-apps/current-work/#in-tabledef __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),("caption", self.startTagCaption),("colgroup", self.startTagColgroup),("col", self.startTagCol),(("tbody", "tfoot", "thead"), self.startTagRowGroup),(("td", "th", "tr"), self.startTagImplyTbody),("table", self.startTagTable),(("style", "script"), self.startTagStyleScript),("input", self.startTagInput),("form", self.startTagForm)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([("table", self.endTagTable),(("body", "caption", "col", "colgroup", "html", "tbody", "td","tfoot", "th", "thead", "tr"), self.endTagIgnore)])self.endTagHandler.default = self.endTagOther# helper methodsdef clearStackToTableContext(self):# "clear the stack back to a table context"while self.tree.openElements[-1].name not in ("table", "html"):# self.parser.parseError("unexpected-implied-end-tag-in-table",# {"name": self.tree.openElements[-1].name})self.tree.openElements.pop()# When the current node is <html> it's an innerHTML case# processing methodsdef processEOF(self):if self.tree.openElements[-1].name != "html":self.parser.parseError("eof-in-table")else:assert self.parser.innerHTML# Stop parsingdef processSpaceCharacters(self, token):originalPhase = self.parser.phaseself.parser.phase = self.parser.phases["inTableText"]self.parser.phase.originalPhase = originalPhaseself.parser.phase.processSpaceCharacters(token)def processCharacters(self, token):originalPhase = self.parser.phaseself.parser.phase = self.parser.phases["inTableText"]self.parser.phase.originalPhase = originalPhaseself.parser.phase.processCharacters(token)def insertText(self, token):# If we get here there must be at least one non-whitespace character# Do the table magic!self.tree.insertFromTable = Trueself.parser.phases["inBody"].processCharacters(token)self.tree.insertFromTable = Falsedef startTagCaption(self, token):self.clearStackToTableContext()self.tree.activeFormattingElements.append(Marker)self.tree.insertElement(token)self.parser.phase = self.parser.phases["inCaption"]def startTagColgroup(self, token):self.clearStackToTableContext()self.tree.insertElement(token)self.parser.phase = self.parser.phases["inColumnGroup"]def startTagCol(self, token):self.startTagColgroup(impliedTagToken("colgroup", "StartTag"))return tokendef startTagRowGroup(self, token):self.clearStackToTableContext()self.tree.insertElement(token)self.parser.phase = self.parser.phases["inTableBody"]def startTagImplyTbody(self, token):self.startTagRowGroup(impliedTagToken("tbody", "StartTag"))return tokendef startTagTable(self, token):self.parser.parseError("unexpected-start-tag-implies-end-tag",{"startName": "table", "endName": "table"})self.parser.phase.processEndTag(impliedTagToken("table"))if not self.parser.innerHTML:return tokendef startTagStyleScript(self, token):return self.parser.phases["inHead"].processStartTag(token)def startTagInput(self, token):if ("type" in token["data"] andtoken["data"]["type"].translate(asciiUpper2Lower) == "hidden"):self.parser.parseError("unexpected-hidden-input-in-table")self.tree.insertElement(token)# XXX associate with formself.tree.openElements.pop()else:self.startTagOther(token)def startTagForm(self, token):self.parser.parseError("unexpected-form-in-table")if self.tree.formPointer is None:self.tree.insertElement(token)self.tree.formPointer = self.tree.openElements[-1]self.tree.openElements.pop()def startTagOther(self, token):self.parser.parseError("unexpected-start-tag-implies-table-voodoo", {"name": token["name"]})# Do the table magic!self.tree.insertFromTable = Trueself.parser.phases["inBody"].processStartTag(token)self.tree.insertFromTable = Falsedef endTagTable(self, token):if self.tree.elementInScope("table", variant="table"):self.tree.generateImpliedEndTags()if self.tree.openElements[-1].name != "table":self.parser.parseError("end-tag-too-early-named",{"gotName": "table","expectedName": self.tree.openElements[-1].name})while self.tree.openElements[-1].name != "table":self.tree.openElements.pop()self.tree.openElements.pop()self.parser.resetInsertionMode()else:# innerHTML caseassert self.parser.innerHTMLself.parser.parseError()def endTagIgnore(self, token):self.parser.parseError("unexpected-end-tag", {"name": token["name"]})def endTagOther(self, token):self.parser.parseError("unexpected-end-tag-implies-table-voodoo", {"name": token["name"]})# Do the table magic!self.tree.insertFromTable = Trueself.parser.phases["inBody"].processEndTag(token)self.tree.insertFromTable = Falseclass InTableTextPhase(Phase):def __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.originalPhase = Noneself.characterTokens = []def flushCharacters(self):data = "".join([item["data"] for item in self.characterTokens])if any([item not in spaceCharacters for item in data]):token = {"type": tokenTypes["Characters"], "data": data}self.parser.phases["inTable"].insertText(token)elif data:self.tree.insertText(data)self.characterTokens = []def processComment(self, token):self.flushCharacters()self.parser.phase = self.originalPhasereturn tokendef processEOF(self):self.flushCharacters()self.parser.phase = self.originalPhasereturn Truedef processCharacters(self, token):if token["data"] == "\u0000":returnself.characterTokens.append(token)def processSpaceCharacters(self, token):# pretty sure we should never reach hereself.characterTokens.append(token)# assert Falsedef processStartTag(self, token):self.flushCharacters()self.parser.phase = self.originalPhasereturn tokendef processEndTag(self, token):self.flushCharacters()self.parser.phase = self.originalPhasereturn tokenclass InCaptionPhase(Phase):# http://www.whatwg.org/specs/web-apps/current-work/#in-captiondef __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),(("caption", "col", "colgroup", "tbody", "td", "tfoot", "th","thead", "tr"), self.startTagTableElement)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([("caption", self.endTagCaption),("table", self.endTagTable),(("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th","thead", "tr"), self.endTagIgnore)])self.endTagHandler.default = self.endTagOtherdef ignoreEndTagCaption(self):return not self.tree.elementInScope("caption", variant="table")def processEOF(self):self.parser.phases["inBody"].processEOF()def processCharacters(self, token):return self.parser.phases["inBody"].processCharacters(token)def startTagTableElement(self, token):self.parser.parseError()# XXX Have to duplicate logic here to find out if the tag is ignoredignoreEndTag = self.ignoreEndTagCaption()self.parser.phase.processEndTag(impliedTagToken("caption"))if not ignoreEndTag:return tokendef startTagOther(self, token):return self.parser.phases["inBody"].processStartTag(token)def endTagCaption(self, token):if not self.ignoreEndTagCaption():# AT this code is quite similar to endTagTable in "InTable"self.tree.generateImpliedEndTags()if self.tree.openElements[-1].name != "caption":self.parser.parseError("expected-one-end-tag-but-got-another",{"gotName": "caption","expectedName": self.tree.openElements[-1].name})while self.tree.openElements[-1].name != "caption":self.tree.openElements.pop()self.tree.openElements.pop()self.tree.clearActiveFormattingElements()self.parser.phase = self.parser.phases["inTable"]else:# innerHTML caseassert self.parser.innerHTMLself.parser.parseError()def endTagTable(self, token):self.parser.parseError()ignoreEndTag = self.ignoreEndTagCaption()self.parser.phase.processEndTag(impliedTagToken("caption"))if not ignoreEndTag:return tokendef endTagIgnore(self, token):self.parser.parseError("unexpected-end-tag", {"name": token["name"]})def endTagOther(self, token):return self.parser.phases["inBody"].processEndTag(token)class InColumnGroupPhase(Phase):# http://www.whatwg.org/specs/web-apps/current-work/#in-columndef __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),("col", self.startTagCol)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([("colgroup", self.endTagColgroup),("col", self.endTagCol)])self.endTagHandler.default = self.endTagOtherdef ignoreEndTagColgroup(self):return self.tree.openElements[-1].name == "html"def processEOF(self):if self.tree.openElements[-1].name == "html":assert self.parser.innerHTMLreturnelse:ignoreEndTag = self.ignoreEndTagColgroup()self.endTagColgroup(impliedTagToken("colgroup"))if not ignoreEndTag:return Truedef processCharacters(self, token):ignoreEndTag = self.ignoreEndTagColgroup()self.endTagColgroup(impliedTagToken("colgroup"))if not ignoreEndTag:return tokendef startTagCol(self, token):self.tree.insertElement(token)self.tree.openElements.pop()token["selfClosingAcknowledged"] = Truedef startTagOther(self, token):ignoreEndTag = self.ignoreEndTagColgroup()self.endTagColgroup(impliedTagToken("colgroup"))if not ignoreEndTag:return tokendef endTagColgroup(self, token):if self.ignoreEndTagColgroup():# innerHTML caseassert self.parser.innerHTMLself.parser.parseError()else:self.tree.openElements.pop()self.parser.phase = self.parser.phases["inTable"]def endTagCol(self, token):self.parser.parseError("no-end-tag", {"name": "col"})def endTagOther(self, token):ignoreEndTag = self.ignoreEndTagColgroup()self.endTagColgroup(impliedTagToken("colgroup"))if not ignoreEndTag:return tokenclass InTableBodyPhase(Phase):# http://www.whatwg.org/specs/web-apps/current-work/#in-table0def __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),("tr", self.startTagTr),(("td", "th"), self.startTagTableCell),(("caption", "col", "colgroup", "tbody", "tfoot", "thead"),self.startTagTableOther)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([(("tbody", "tfoot", "thead"), self.endTagTableRowGroup),("table", self.endTagTable),(("body", "caption", "col", "colgroup", "html", "td", "th","tr"), self.endTagIgnore)])self.endTagHandler.default = self.endTagOther# helper methodsdef clearStackToTableBodyContext(self):while self.tree.openElements[-1].name not in ("tbody", "tfoot","thead", "html"):# self.parser.parseError("unexpected-implied-end-tag-in-table",# {"name": self.tree.openElements[-1].name})self.tree.openElements.pop()if self.tree.openElements[-1].name == "html":assert self.parser.innerHTML# the restdef processEOF(self):self.parser.phases["inTable"].processEOF()def processSpaceCharacters(self, token):return self.parser.phases["inTable"].processSpaceCharacters(token)def processCharacters(self, token):return self.parser.phases["inTable"].processCharacters(token)def startTagTr(self, token):self.clearStackToTableBodyContext()self.tree.insertElement(token)self.parser.phase = self.parser.phases["inRow"]def startTagTableCell(self, token):self.parser.parseError("unexpected-cell-in-table-body",{"name": token["name"]})self.startTagTr(impliedTagToken("tr", "StartTag"))return tokendef startTagTableOther(self, token):# XXX AT Any ideas on how to share this with endTagTable?if (self.tree.elementInScope("tbody", variant="table") orself.tree.elementInScope("thead", variant="table") orself.tree.elementInScope("tfoot", variant="table")):self.clearStackToTableBodyContext()self.endTagTableRowGroup(impliedTagToken(self.tree.openElements[-1].name))return tokenelse:# innerHTML caseassert self.parser.innerHTMLself.parser.parseError()def startTagOther(self, token):return self.parser.phases["inTable"].processStartTag(token)def endTagTableRowGroup(self, token):if self.tree.elementInScope(token["name"], variant="table"):self.clearStackToTableBodyContext()self.tree.openElements.pop()self.parser.phase = self.parser.phases["inTable"]else:self.parser.parseError("unexpected-end-tag-in-table-body",{"name": token["name"]})def endTagTable(self, token):if (self.tree.elementInScope("tbody", variant="table") orself.tree.elementInScope("thead", variant="table") orself.tree.elementInScope("tfoot", variant="table")):self.clearStackToTableBodyContext()self.endTagTableRowGroup(impliedTagToken(self.tree.openElements[-1].name))return tokenelse:# innerHTML caseassert self.parser.innerHTMLself.parser.parseError()def endTagIgnore(self, token):self.parser.parseError("unexpected-end-tag-in-table-body",{"name": token["name"]})def endTagOther(self, token):return self.parser.phases["inTable"].processEndTag(token)class InRowPhase(Phase):# http://www.whatwg.org/specs/web-apps/current-work/#in-rowdef __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),(("td", "th"), self.startTagTableCell),(("caption", "col", "colgroup", "tbody", "tfoot", "thead","tr"), self.startTagTableOther)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([("tr", self.endTagTr),("table", self.endTagTable),(("tbody", "tfoot", "thead"), self.endTagTableRowGroup),(("body", "caption", "col", "colgroup", "html", "td", "th"),self.endTagIgnore)])self.endTagHandler.default = self.endTagOther# helper methods (XXX unify this with other table helper methods)def clearStackToTableRowContext(self):while self.tree.openElements[-1].name not in ("tr", "html"):self.parser.parseError("unexpected-implied-end-tag-in-table-row",{"name": self.tree.openElements[-1].name})self.tree.openElements.pop()def ignoreEndTagTr(self):return not self.tree.elementInScope("tr", variant="table")# the restdef processEOF(self):self.parser.phases["inTable"].processEOF()def processSpaceCharacters(self, token):return self.parser.phases["inTable"].processSpaceCharacters(token)def processCharacters(self, token):return self.parser.phases["inTable"].processCharacters(token)def startTagTableCell(self, token):self.clearStackToTableRowContext()self.tree.insertElement(token)self.parser.phase = self.parser.phases["inCell"]self.tree.activeFormattingElements.append(Marker)def startTagTableOther(self, token):ignoreEndTag = self.ignoreEndTagTr()self.endTagTr(impliedTagToken("tr"))# XXX how are we sure it's always ignored in the innerHTML case?if not ignoreEndTag:return tokendef startTagOther(self, token):return self.parser.phases["inTable"].processStartTag(token)def endTagTr(self, token):if not self.ignoreEndTagTr():self.clearStackToTableRowContext()self.tree.openElements.pop()self.parser.phase = self.parser.phases["inTableBody"]else:# innerHTML caseassert self.parser.innerHTMLself.parser.parseError()def endTagTable(self, token):ignoreEndTag = self.ignoreEndTagTr()self.endTagTr(impliedTagToken("tr"))# Reprocess the current tag if the tr end tag was not ignored# XXX how are we sure it's always ignored in the innerHTML case?if not ignoreEndTag:return tokendef endTagTableRowGroup(self, token):if self.tree.elementInScope(token["name"], variant="table"):self.endTagTr(impliedTagToken("tr"))return tokenelse:self.parser.parseError()def endTagIgnore(self, token):self.parser.parseError("unexpected-end-tag-in-table-row",{"name": token["name"]})def endTagOther(self, token):return self.parser.phases["inTable"].processEndTag(token)class InCellPhase(Phase):# http://www.whatwg.org/specs/web-apps/current-work/#in-celldef __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),(("caption", "col", "colgroup", "tbody", "td", "tfoot", "th","thead", "tr"), self.startTagTableOther)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([(("td", "th"), self.endTagTableCell),(("body", "caption", "col", "colgroup", "html"), self.endTagIgnore),(("table", "tbody", "tfoot", "thead", "tr"), self.endTagImply)])self.endTagHandler.default = self.endTagOther# helperdef closeCell(self):if self.tree.elementInScope("td", variant="table"):self.endTagTableCell(impliedTagToken("td"))elif self.tree.elementInScope("th", variant="table"):self.endTagTableCell(impliedTagToken("th"))# the restdef processEOF(self):self.parser.phases["inBody"].processEOF()def processCharacters(self, token):return self.parser.phases["inBody"].processCharacters(token)def startTagTableOther(self, token):if (self.tree.elementInScope("td", variant="table") orself.tree.elementInScope("th", variant="table")):self.closeCell()return tokenelse:# innerHTML caseassert self.parser.innerHTMLself.parser.parseError()def startTagOther(self, token):return self.parser.phases["inBody"].processStartTag(token)def endTagTableCell(self, token):if self.tree.elementInScope(token["name"], variant="table"):self.tree.generateImpliedEndTags(token["name"])if self.tree.openElements[-1].name != token["name"]:self.parser.parseError("unexpected-cell-end-tag",{"name": token["name"]})while True:node = self.tree.openElements.pop()if node.name == token["name"]:breakelse:self.tree.openElements.pop()self.tree.clearActiveFormattingElements()self.parser.phase = self.parser.phases["inRow"]else:self.parser.parseError("unexpected-end-tag", {"name": token["name"]})def endTagIgnore(self, token):self.parser.parseError("unexpected-end-tag", {"name": token["name"]})def endTagImply(self, token):if self.tree.elementInScope(token["name"], variant="table"):self.closeCell()return tokenelse:# sometimes innerHTML caseself.parser.parseError()def endTagOther(self, token):return self.parser.phases["inBody"].processEndTag(token)class InSelectPhase(Phase):def __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),("option", self.startTagOption),("optgroup", self.startTagOptgroup),("select", self.startTagSelect),(("input", "keygen", "textarea"), self.startTagInput),("script", self.startTagScript)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([("option", self.endTagOption),("optgroup", self.endTagOptgroup),("select", self.endTagSelect)])self.endTagHandler.default = self.endTagOther# http://www.whatwg.org/specs/web-apps/current-work/#in-selectdef processEOF(self):if self.tree.openElements[-1].name != "html":self.parser.parseError("eof-in-select")else:assert self.parser.innerHTMLdef processCharacters(self, token):if token["data"] == "\u0000":returnself.tree.insertText(token["data"])def startTagOption(self, token):# We need to imply </option> if <option> is the current node.if self.tree.openElements[-1].name == "option":self.tree.openElements.pop()self.tree.insertElement(token)def startTagOptgroup(self, token):if self.tree.openElements[-1].name == "option":self.tree.openElements.pop()if self.tree.openElements[-1].name == "optgroup":self.tree.openElements.pop()self.tree.insertElement(token)def startTagSelect(self, token):self.parser.parseError("unexpected-select-in-select")self.endTagSelect(impliedTagToken("select"))def startTagInput(self, token):self.parser.parseError("unexpected-input-in-select")if self.tree.elementInScope("select", variant="select"):self.endTagSelect(impliedTagToken("select"))return tokenelse:assert self.parser.innerHTMLdef startTagScript(self, token):return self.parser.phases["inHead"].processStartTag(token)def startTagOther(self, token):self.parser.parseError("unexpected-start-tag-in-select",{"name": token["name"]})def endTagOption(self, token):if self.tree.openElements[-1].name == "option":self.tree.openElements.pop()else:self.parser.parseError("unexpected-end-tag-in-select",{"name": "option"})def endTagOptgroup(self, token):# </optgroup> implicitly closes <option>if (self.tree.openElements[-1].name == "option" andself.tree.openElements[-2].name == "optgroup"):self.tree.openElements.pop()# It also closes </optgroup>if self.tree.openElements[-1].name == "optgroup":self.tree.openElements.pop()# But nothing elseelse:self.parser.parseError("unexpected-end-tag-in-select",{"name": "optgroup"})def endTagSelect(self, token):if self.tree.elementInScope("select", variant="select"):node = self.tree.openElements.pop()while node.name != "select":node = self.tree.openElements.pop()self.parser.resetInsertionMode()else:# innerHTML caseassert self.parser.innerHTMLself.parser.parseError()def endTagOther(self, token):self.parser.parseError("unexpected-end-tag-in-select",{"name": token["name"]})class InSelectInTablePhase(Phase):def __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([(("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"),self.startTagTable)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([(("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"),self.endTagTable)])self.endTagHandler.default = self.endTagOtherdef processEOF(self):self.parser.phases["inSelect"].processEOF()def processCharacters(self, token):return self.parser.phases["inSelect"].processCharacters(token)def startTagTable(self, token):self.parser.parseError("unexpected-table-element-start-tag-in-select-in-table", {"name": token["name"]})self.endTagOther(impliedTagToken("select"))return tokendef startTagOther(self, token):return self.parser.phases["inSelect"].processStartTag(token)def endTagTable(self, token):self.parser.parseError("unexpected-table-element-end-tag-in-select-in-table", {"name": token["name"]})if self.tree.elementInScope(token["name"], variant="table"):self.endTagOther(impliedTagToken("select"))return tokendef endTagOther(self, token):return self.parser.phases["inSelect"].processEndTag(token)class InForeignContentPhase(Phase):breakoutElements = frozenset(["b", "big", "blockquote", "body", "br","center", "code", "dd", "div", "dl", "dt","em", "embed", "h1", "h2", "h3","h4", "h5", "h6", "head", "hr", "i", "img","li", "listing", "menu", "meta", "nobr","ol", "p", "pre", "ruby", "s", "small","span", "strong", "strike", "sub", "sup","table", "tt", "u", "ul", "var"])def __init__(self, parser, tree):Phase.__init__(self, parser, tree)def adjustSVGTagNames(self, token):replacements = {"altglyph": "altGlyph","altglyphdef": "altGlyphDef","altglyphitem": "altGlyphItem","animatecolor": "animateColor","animatemotion": "animateMotion","animatetransform": "animateTransform","clippath": "clipPath","feblend": "feBlend","fecolormatrix": "feColorMatrix","fecomponenttransfer": "feComponentTransfer","fecomposite": "feComposite","feconvolvematrix": "feConvolveMatrix","fediffuselighting": "feDiffuseLighting","fedisplacementmap": "feDisplacementMap","fedistantlight": "feDistantLight","feflood": "feFlood","fefunca": "feFuncA","fefuncb": "feFuncB","fefuncg": "feFuncG","fefuncr": "feFuncR","fegaussianblur": "feGaussianBlur","feimage": "feImage","femerge": "feMerge","femergenode": "feMergeNode","femorphology": "feMorphology","feoffset": "feOffset","fepointlight": "fePointLight","fespecularlighting": "feSpecularLighting","fespotlight": "feSpotLight","fetile": "feTile","feturbulence": "feTurbulence","foreignobject": "foreignObject","glyphref": "glyphRef","lineargradient": "linearGradient","radialgradient": "radialGradient","textpath": "textPath"}if token["name"] in replacements:token["name"] = replacements[token["name"]]def processCharacters(self, token):if token["data"] == "\u0000":token["data"] = "\uFFFD"elif (self.parser.framesetOK andany(char not in spaceCharacters for char in token["data"])):self.parser.framesetOK = FalsePhase.processCharacters(self, token)def processStartTag(self, token):currentNode = self.tree.openElements[-1]if (token["name"] in self.breakoutElements or(token["name"] == "font" andset(token["data"].keys()) & set(["color", "face", "size"]))):self.parser.parseError("unexpected-html-element-in-foreign-content",{"name": token["name"]})while (self.tree.openElements[-1].namespace !=self.tree.defaultNamespace andnot self.parser.isHTMLIntegrationPoint(self.tree.openElements[-1]) andnot self.parser.isMathMLTextIntegrationPoint(self.tree.openElements[-1])):self.tree.openElements.pop()return tokenelse:if currentNode.namespace == namespaces["mathml"]:self.parser.adjustMathMLAttributes(token)elif currentNode.namespace == namespaces["svg"]:self.adjustSVGTagNames(token)self.parser.adjustSVGAttributes(token)self.parser.adjustForeignAttributes(token)token["namespace"] = currentNode.namespaceself.tree.insertElement(token)if token["selfClosing"]:self.tree.openElements.pop()token["selfClosingAcknowledged"] = Truedef processEndTag(self, token):nodeIndex = len(self.tree.openElements) - 1node = self.tree.openElements[-1]if node.name.translate(asciiUpper2Lower) != token["name"]:self.parser.parseError("unexpected-end-tag", {"name": token["name"]})while True:if node.name.translate(asciiUpper2Lower) == token["name"]:# XXX this isn't in the spec but it seems necessaryif self.parser.phase == self.parser.phases["inTableText"]:self.parser.phase.flushCharacters()self.parser.phase = self.parser.phase.originalPhasewhile self.tree.openElements.pop() != node:assert self.tree.openElementsnew_token = NonebreaknodeIndex -= 1node = self.tree.openElements[nodeIndex]if node.namespace != self.tree.defaultNamespace:continueelse:new_token = self.parser.phase.processEndTag(token)breakreturn new_tokenclass AfterBodyPhase(Phase):def __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([("html", self.endTagHtml)])self.endTagHandler.default = self.endTagOtherdef processEOF(self):# Stop parsingpassdef processComment(self, token):# This is needed because data is to be appended to the <html> element# here and not to whatever is currently open.self.tree.insertComment(token, self.tree.openElements[0])def processCharacters(self, token):self.parser.parseError("unexpected-char-after-body")self.parser.phase = self.parser.phases["inBody"]return tokendef startTagHtml(self, token):return self.parser.phases["inBody"].processStartTag(token)def startTagOther(self, token):self.parser.parseError("unexpected-start-tag-after-body",{"name": token["name"]})self.parser.phase = self.parser.phases["inBody"]return tokendef endTagHtml(self, name):if self.parser.innerHTML:self.parser.parseError("unexpected-end-tag-after-body-innerhtml")else:self.parser.phase = self.parser.phases["afterAfterBody"]def endTagOther(self, token):self.parser.parseError("unexpected-end-tag-after-body",{"name": token["name"]})self.parser.phase = self.parser.phases["inBody"]return tokenclass InFramesetPhase(Phase):# http://www.whatwg.org/specs/web-apps/current-work/#in-framesetdef __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),("frameset", self.startTagFrameset),("frame", self.startTagFrame),("noframes", self.startTagNoframes)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([("frameset", self.endTagFrameset)])self.endTagHandler.default = self.endTagOtherdef processEOF(self):if self.tree.openElements[-1].name != "html":self.parser.parseError("eof-in-frameset")else:assert self.parser.innerHTMLdef processCharacters(self, token):self.parser.parseError("unexpected-char-in-frameset")def startTagFrameset(self, token):self.tree.insertElement(token)def startTagFrame(self, token):self.tree.insertElement(token)self.tree.openElements.pop()def startTagNoframes(self, token):return self.parser.phases["inBody"].processStartTag(token)def startTagOther(self, token):self.parser.parseError("unexpected-start-tag-in-frameset",{"name": token["name"]})def endTagFrameset(self, token):if self.tree.openElements[-1].name == "html":# innerHTML caseself.parser.parseError("unexpected-frameset-in-frameset-innerhtml")else:self.tree.openElements.pop()if (not self.parser.innerHTML andself.tree.openElements[-1].name != "frameset"):# If we're not in innerHTML mode and the current node is not a# "frameset" element (anymore) then switch.self.parser.phase = self.parser.phases["afterFrameset"]def endTagOther(self, token):self.parser.parseError("unexpected-end-tag-in-frameset",{"name": token["name"]})class AfterFramesetPhase(Phase):# http://www.whatwg.org/specs/web-apps/current-work/#after3def __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),("noframes", self.startTagNoframes)])self.startTagHandler.default = self.startTagOtherself.endTagHandler = _utils.MethodDispatcher([("html", self.endTagHtml)])self.endTagHandler.default = self.endTagOtherdef processEOF(self):# Stop parsingpassdef processCharacters(self, token):self.parser.parseError("unexpected-char-after-frameset")def startTagNoframes(self, token):return self.parser.phases["inHead"].processStartTag(token)def startTagOther(self, token):self.parser.parseError("unexpected-start-tag-after-frameset",{"name": token["name"]})def endTagHtml(self, token):self.parser.phase = self.parser.phases["afterAfterFrameset"]def endTagOther(self, token):self.parser.parseError("unexpected-end-tag-after-frameset",{"name": token["name"]})class AfterAfterBodyPhase(Phase):def __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml)])self.startTagHandler.default = self.startTagOtherdef processEOF(self):passdef processComment(self, token):self.tree.insertComment(token, self.tree.document)def processSpaceCharacters(self, token):return self.parser.phases["inBody"].processSpaceCharacters(token)def processCharacters(self, token):self.parser.parseError("expected-eof-but-got-char")self.parser.phase = self.parser.phases["inBody"]return tokendef startTagHtml(self, token):return self.parser.phases["inBody"].processStartTag(token)def startTagOther(self, token):self.parser.parseError("expected-eof-but-got-start-tag",{"name": token["name"]})self.parser.phase = self.parser.phases["inBody"]return tokendef processEndTag(self, token):self.parser.parseError("expected-eof-but-got-end-tag",{"name": token["name"]})self.parser.phase = self.parser.phases["inBody"]return tokenclass AfterAfterFramesetPhase(Phase):def __init__(self, parser, tree):Phase.__init__(self, parser, tree)self.startTagHandler = _utils.MethodDispatcher([("html", self.startTagHtml),("noframes", self.startTagNoFrames)])self.startTagHandler.default = self.startTagOtherdef processEOF(self):passdef processComment(self, token):self.tree.insertComment(token, self.tree.document)def processSpaceCharacters(self, token):return self.parser.phases["inBody"].processSpaceCharacters(token)def processCharacters(self, token):self.parser.parseError("expected-eof-but-got-char")def startTagHtml(self, token):return self.parser.phases["inBody"].processStartTag(token)def startTagNoFrames(self, token):return self.parser.phases["inHead"].processStartTag(token)def startTagOther(self, token):self.parser.parseError("expected-eof-but-got-start-tag",{"name": token["name"]})def processEndTag(self, token):self.parser.parseError("expected-eof-but-got-end-tag",{"name": token["name"]})# pylint:enable=unused-argumentreturn {"initial": InitialPhase,"beforeHtml": BeforeHtmlPhase,"beforeHead": BeforeHeadPhase,"inHead": InHeadPhase,"inHeadNoscript": InHeadNoscriptPhase,"afterHead": AfterHeadPhase,"inBody": InBodyPhase,"text": TextPhase,"inTable": InTablePhase,"inTableText": InTableTextPhase,"inCaption": InCaptionPhase,"inColumnGroup": InColumnGroupPhase,"inTableBody": InTableBodyPhase,"inRow": InRowPhase,"inCell": InCellPhase,"inSelect": InSelectPhase,"inSelectInTable": InSelectInTablePhase,"inForeignContent": InForeignContentPhase,"afterBody": AfterBodyPhase,"inFrameset": InFramesetPhase,"afterFrameset": AfterFramesetPhase,"afterAfterBody": AfterAfterBodyPhase,"afterAfterFrameset": AfterAfterFramesetPhase,# XXX after after frameset}def adjust_attributes(token, replacements):if PY3 or _utils.PY27:needs_adjustment = viewkeys(token['data']) & viewkeys(replacements)else:needs_adjustment = frozenset(token['data']) & frozenset(replacements)if needs_adjustment:token['data'] = OrderedDict((replacements.get(k, k), v)for k, v in token['data'].items())def impliedTagToken(name, type="EndTag", attributes=None,selfClosing=False):if attributes is None:attributes = {}return {"type": tokenTypes[type], "name": name, "data": attributes,"selfClosing": selfClosing}class ParseError(Exception):"""Error in parsed document"""pass
from __future__ import absolute_import, division, unicode_literalsimport refrom . import basefrom ..constants import rcdataElements, spaceCharactersspaceCharacters = "".join(spaceCharacters)SPACES_REGEX = re.compile("[%s]+" % spaceCharacters)class Filter(base.Filter):spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements))def __iter__(self):preserve = 0for token in base.Filter.__iter__(self):type = token["type"]if type == "StartTag" \and (preserve or token["name"] in self.spacePreserveElements):preserve += 1elif type == "EndTag" and preserve:preserve -= 1elif not preserve and type == "SpaceCharacters" and token["data"]:# Test on token["data"] above to not introduce spaces where there were nottoken["data"] = " "elif not preserve and type == "Characters":token["data"] = collapse_spaces(token["data"])yield tokendef collapse_spaces(text):return SPACES_REGEX.sub(' ', text)
from __future__ import absolute_import, division, unicode_literalsimport refrom xml.sax.saxutils import escape, unescapefrom pip._vendor.six.moves import urllib_parse as urlparsefrom . import basefrom ..constants import namespaces, prefixes__all__ = ["Filter"]allowed_elements = frozenset(((namespaces['html'], 'a'),(namespaces['html'], 'abbr'),(namespaces['html'], 'acronym'),(namespaces['html'], 'address'),(namespaces['html'], 'area'),(namespaces['html'], 'article'),(namespaces['html'], 'aside'),(namespaces['html'], 'audio'),(namespaces['html'], 'b'),(namespaces['html'], 'big'),(namespaces['html'], 'blockquote'),(namespaces['html'], 'br'),(namespaces['html'], 'button'),(namespaces['html'], 'canvas'),(namespaces['html'], 'caption'),(namespaces['html'], 'center'),(namespaces['html'], 'cite'),(namespaces['html'], 'code'),(namespaces['html'], 'col'),(namespaces['html'], 'colgroup'),(namespaces['html'], 'command'),(namespaces['html'], 'datagrid'),(namespaces['html'], 'datalist'),(namespaces['html'], 'dd'),(namespaces['html'], 'del'),(namespaces['html'], 'details'),(namespaces['html'], 'dfn'),(namespaces['html'], 'dialog'),(namespaces['html'], 'dir'),(namespaces['html'], 'div'),(namespaces['html'], 'dl'),(namespaces['html'], 'dt'),(namespaces['html'], 'em'),(namespaces['html'], 'event-source'),(namespaces['html'], 'fieldset'),(namespaces['html'], 'figcaption'),(namespaces['html'], 'figure'),(namespaces['html'], 'footer'),(namespaces['html'], 'font'),(namespaces['html'], 'form'),(namespaces['html'], 'header'),(namespaces['html'], 'h1'),(namespaces['html'], 'h2'),(namespaces['html'], 'h3'),(namespaces['html'], 'h4'),(namespaces['html'], 'h5'),(namespaces['html'], 'h6'),(namespaces['html'], 'hr'),(namespaces['html'], 'i'),(namespaces['html'], 'img'),(namespaces['html'], 'input'),(namespaces['html'], 'ins'),(namespaces['html'], 'keygen'),(namespaces['html'], 'kbd'),(namespaces['html'], 'label'),(namespaces['html'], 'legend'),(namespaces['html'], 'li'),(namespaces['html'], 'm'),(namespaces['html'], 'map'),(namespaces['html'], 'menu'),(namespaces['html'], 'meter'),(namespaces['html'], 'multicol'),(namespaces['html'], 'nav'),(namespaces['html'], 'nextid'),(namespaces['html'], 'ol'),(namespaces['html'], 'output'),(namespaces['html'], 'optgroup'),(namespaces['html'], 'option'),(namespaces['html'], 'p'),(namespaces['html'], 'pre'),(namespaces['html'], 'progress'),(namespaces['html'], 'q'),(namespaces['html'], 's'),(namespaces['html'], 'samp'),(namespaces['html'], 'section'),(namespaces['html'], 'select'),(namespaces['html'], 'small'),(namespaces['html'], 'sound'),(namespaces['html'], 'source'),(namespaces['html'], 'spacer'),(namespaces['html'], 'span'),(namespaces['html'], 'strike'),(namespaces['html'], 'strong'),(namespaces['html'], 'sub'),(namespaces['html'], 'sup'),(namespaces['html'], 'table'),(namespaces['html'], 'tbody'),(namespaces['html'], 'td'),(namespaces['html'], 'textarea'),(namespaces['html'], 'time'),(namespaces['html'], 'tfoot'),(namespaces['html'], 'th'),(namespaces['html'], 'thead'),(namespaces['html'], 'tr'),(namespaces['html'], 'tt'),(namespaces['html'], 'u'),(namespaces['html'], 'ul'),(namespaces['html'], 'var'),(namespaces['html'], 'video'),(namespaces['mathml'], 'maction'),(namespaces['mathml'], 'math'),(namespaces['mathml'], 'merror'),(namespaces['mathml'], 'mfrac'),(namespaces['mathml'], 'mi'),(namespaces['mathml'], 'mmultiscripts'),(namespaces['mathml'], 'mn'),(namespaces['mathml'], 'mo'),(namespaces['mathml'], 'mover'),(namespaces['mathml'], 'mpadded'),(namespaces['mathml'], 'mphantom'),(namespaces['mathml'], 'mprescripts'),(namespaces['mathml'], 'mroot'),(namespaces['mathml'], 'mrow'),(namespaces['mathml'], 'mspace'),(namespaces['mathml'], 'msqrt'),(namespaces['mathml'], 'mstyle'),(namespaces['mathml'], 'msub'),(namespaces['mathml'], 'msubsup'),(namespaces['mathml'], 'msup'),(namespaces['mathml'], 'mtable'),(namespaces['mathml'], 'mtd'),(namespaces['mathml'], 'mtext'),(namespaces['mathml'], 'mtr'),(namespaces['mathml'], 'munder'),(namespaces['mathml'], 'munderover'),(namespaces['mathml'], 'none'),(namespaces['svg'], 'a'),(namespaces['svg'], 'animate'),(namespaces['svg'], 'animateColor'),(namespaces['svg'], 'animateMotion'),(namespaces['svg'], 'animateTransform'),(namespaces['svg'], 'clipPath'),(namespaces['svg'], 'circle'),(namespaces['svg'], 'defs'),(namespaces['svg'], 'desc'),(namespaces['svg'], 'ellipse'),(namespaces['svg'], 'font-face'),(namespaces['svg'], 'font-face-name'),(namespaces['svg'], 'font-face-src'),(namespaces['svg'], 'g'),(namespaces['svg'], 'glyph'),(namespaces['svg'], 'hkern'),(namespaces['svg'], 'linearGradient'),(namespaces['svg'], 'line'),(namespaces['svg'], 'marker'),(namespaces['svg'], 'metadata'),(namespaces['svg'], 'missing-glyph'),(namespaces['svg'], 'mpath'),(namespaces['svg'], 'path'),(namespaces['svg'], 'polygon'),(namespaces['svg'], 'polyline'),(namespaces['svg'], 'radialGradient'),(namespaces['svg'], 'rect'),(namespaces['svg'], 'set'),(namespaces['svg'], 'stop'),(namespaces['svg'], 'svg'),(namespaces['svg'], 'switch'),(namespaces['svg'], 'text'),(namespaces['svg'], 'title'),(namespaces['svg'], 'tspan'),(namespaces['svg'], 'use'),))allowed_attributes = frozenset((# HTML attributes(None, 'abbr'),(None, 'accept'),(None, 'accept-charset'),(None, 'accesskey'),(None, 'action'),(None, 'align'),(None, 'alt'),(None, 'autocomplete'),(None, 'autofocus'),(None, 'axis'),(None, 'background'),(None, 'balance'),(None, 'bgcolor'),(None, 'bgproperties'),(None, 'border'),(None, 'bordercolor'),(None, 'bordercolordark'),(None, 'bordercolorlight'),(None, 'bottompadding'),(None, 'cellpadding'),(None, 'cellspacing'),(None, 'ch'),(None, 'challenge'),(None, 'char'),(None, 'charoff'),(None, 'choff'),(None, 'charset'),(None, 'checked'),(None, 'cite'),(None, 'class'),(None, 'clear'),(None, 'color'),(None, 'cols'),(None, 'colspan'),(None, 'compact'),(None, 'contenteditable'),(None, 'controls'),(None, 'coords'),(None, 'data'),(None, 'datafld'),(None, 'datapagesize'),(None, 'datasrc'),(None, 'datetime'),(None, 'default'),(None, 'delay'),(None, 'dir'),(None, 'disabled'),(None, 'draggable'),(None, 'dynsrc'),(None, 'enctype'),(None, 'end'),(None, 'face'),(None, 'for'),(None, 'form'),(None, 'frame'),(None, 'galleryimg'),(None, 'gutter'),(None, 'headers'),(None, 'height'),(None, 'hidefocus'),(None, 'hidden'),(None, 'high'),(None, 'href'),(None, 'hreflang'),(None, 'hspace'),(None, 'icon'),(None, 'id'),(None, 'inputmode'),(None, 'ismap'),(None, 'keytype'),(None, 'label'),(None, 'leftspacing'),(None, 'lang'),(None, 'list'),(None, 'longdesc'),(None, 'loop'),(None, 'loopcount'),(None, 'loopend'),(None, 'loopstart'),(None, 'low'),(None, 'lowsrc'),(None, 'max'),(None, 'maxlength'),(None, 'media'),(None, 'method'),(None, 'min'),(None, 'multiple'),(None, 'name'),(None, 'nohref'),(None, 'noshade'),(None, 'nowrap'),(None, 'open'),(None, 'optimum'),(None, 'pattern'),(None, 'ping'),(None, 'point-size'),(None, 'poster'),(None, 'pqg'),(None, 'preload'),(None, 'prompt'),(None, 'radiogroup'),(None, 'readonly'),(None, 'rel'),(None, 'repeat-max'),(None, 'repeat-min'),(None, 'replace'),(None, 'required'),(None, 'rev'),(None, 'rightspacing'),(None, 'rows'),(None, 'rowspan'),(None, 'rules'),(None, 'scope'),(None, 'selected'),(None, 'shape'),(None, 'size'),(None, 'span'),(None, 'src'),(None, 'start'),(None, 'step'),(None, 'style'),(None, 'summary'),(None, 'suppress'),(None, 'tabindex'),(None, 'target'),(None, 'template'),(None, 'title'),(None, 'toppadding'),(None, 'type'),(None, 'unselectable'),(None, 'usemap'),(None, 'urn'),(None, 'valign'),(None, 'value'),(None, 'variable'),(None, 'volume'),(None, 'vspace'),(None, 'vrml'),(None, 'width'),(None, 'wrap'),(namespaces['xml'], 'lang'),# MathML attributes(None, 'actiontype'),(None, 'align'),(None, 'columnalign'),(None, 'columnalign'),(None, 'columnalign'),(None, 'columnlines'),(None, 'columnspacing'),(None, 'columnspan'),(None, 'depth'),(None, 'display'),(None, 'displaystyle'),(None, 'equalcolumns'),(None, 'equalrows'),(None, 'fence'),(None, 'fontstyle'),(None, 'fontweight'),(None, 'frame'),(None, 'height'),(None, 'linethickness'),(None, 'lspace'),(None, 'mathbackground'),(None, 'mathcolor'),(None, 'mathvariant'),(None, 'mathvariant'),(None, 'maxsize'),(None, 'minsize'),(None, 'other'),(None, 'rowalign'),(None, 'rowalign'),(None, 'rowalign'),(None, 'rowlines'),(None, 'rowspacing'),(None, 'rowspan'),(None, 'rspace'),(None, 'scriptlevel'),(None, 'selection'),(None, 'separator'),(None, 'stretchy'),(None, 'width'),(None, 'width'),(namespaces['xlink'], 'href'),(namespaces['xlink'], 'show'),(namespaces['xlink'], 'type'),# SVG attributes(None, 'accent-height'),(None, 'accumulate'),(None, 'additive'),(None, 'alphabetic'),(None, 'arabic-form'),(None, 'ascent'),(None, 'attributeName'),(None, 'attributeType'),(None, 'baseProfile'),(None, 'bbox'),(None, 'begin'),(None, 'by'),(None, 'calcMode'),(None, 'cap-height'),(None, 'class'),(None, 'clip-path'),(None, 'color'),(None, 'color-rendering'),(None, 'content'),(None, 'cx'),(None, 'cy'),(None, 'd'),(None, 'dx'),(None, 'dy'),(None, 'descent'),(None, 'display'),(None, 'dur'),(None, 'end'),(None, 'fill'),(None, 'fill-opacity'),(None, 'fill-rule'),(None, 'font-family'),(None, 'font-size'),(None, 'font-stretch'),(None, 'font-style'),(None, 'font-variant'),(None, 'font-weight'),(None, 'from'),(None, 'fx'),(None, 'fy'),(None, 'g1'),(None, 'g2'),(None, 'glyph-name'),(None, 'gradientUnits'),(None, 'hanging'),(None, 'height'),(None, 'horiz-adv-x'),(None, 'horiz-origin-x'),(None, 'id'),(None, 'ideographic'),(None, 'k'),(None, 'keyPoints'),(None, 'keySplines'),(None, 'keyTimes'),(None, 'lang'),(None, 'marker-end'),(None, 'marker-mid'),(None, 'marker-start'),(None, 'markerHeight'),(None, 'markerUnits'),(None, 'markerWidth'),(None, 'mathematical'),(None, 'max'),(None, 'min'),(None, 'name'),(None, 'offset'),(None, 'opacity'),(None, 'orient'),(None, 'origin'),(None, 'overline-position'),(None, 'overline-thickness'),(None, 'panose-1'),(None, 'path'),(None, 'pathLength'),(None, 'points'),(None, 'preserveAspectRatio'),(None, 'r'),(None, 'refX'),(None, 'refY'),(None, 'repeatCount'),(None, 'repeatDur'),(None, 'requiredExtensions'),(None, 'requiredFeatures'),(None, 'restart'),(None, 'rotate'),(None, 'rx'),(None, 'ry'),(None, 'slope'),(None, 'stemh'),(None, 'stemv'),(None, 'stop-color'),(None, 'stop-opacity'),(None, 'strikethrough-position'),(None, 'strikethrough-thickness'),(None, 'stroke'),(None, 'stroke-dasharray'),(None, 'stroke-dashoffset'),(None, 'stroke-linecap'),(None, 'stroke-linejoin'),(None, 'stroke-miterlimit'),(None, 'stroke-opacity'),(None, 'stroke-width'),(None, 'systemLanguage'),(None, 'target'),(None, 'text-anchor'),(None, 'to'),(None, 'transform'),(None, 'type'),(None, 'u1'),(None, 'u2'),(None, 'underline-position'),(None, 'underline-thickness'),(None, 'unicode'),(None, 'unicode-range'),(None, 'units-per-em'),(None, 'values'),(None, 'version'),(None, 'viewBox'),(None, 'visibility'),(None, 'width'),(None, 'widths'),(None, 'x'),(None, 'x-height'),(None, 'x1'),(None, 'x2'),(namespaces['xlink'], 'actuate'),(namespaces['xlink'], 'arcrole'),(namespaces['xlink'], 'href'),(namespaces['xlink'], 'role'),(namespaces['xlink'], 'show'),(namespaces['xlink'], 'title'),(namespaces['xlink'], 'type'),(namespaces['xml'], 'base'),(namespaces['xml'], 'lang'),(namespaces['xml'], 'space'),(None, 'y'),(None, 'y1'),(None, 'y2'),(None, 'zoomAndPan'),))attr_val_is_uri = frozenset(((None, 'href'),(None, 'src'),(None, 'cite'),(None, 'action'),(None, 'longdesc'),(None, 'poster'),(None, 'background'),(None, 'datasrc'),(None, 'dynsrc'),(None, 'lowsrc'),(None, 'ping'),(namespaces['xlink'], 'href'),(namespaces['xml'], 'base'),))svg_attr_val_allows_ref = frozenset(((None, 'clip-path'),(None, 'color-profile'),(None, 'cursor'),(None, 'fill'),(None, 'filter'),(None, 'marker'),(None, 'marker-start'),(None, 'marker-mid'),(None, 'marker-end'),(None, 'mask'),(None, 'stroke'),))svg_allow_local_href = frozenset(((None, 'altGlyph'),(None, 'animate'),(None, 'animateColor'),(None, 'animateMotion'),(None, 'animateTransform'),(None, 'cursor'),(None, 'feImage'),(None, 'filter'),(None, 'linearGradient'),(None, 'pattern'),(None, 'radialGradient'),(None, 'textpath'),(None, 'tref'),(None, 'set'),(None, 'use')))allowed_css_properties = frozenset(('azimuth','background-color','border-bottom-color','border-collapse','border-color','border-left-color','border-right-color','border-top-color','clear','color','cursor','direction','display','elevation','float','font','font-family','font-size','font-style','font-variant','font-weight','height','letter-spacing','line-height','overflow','pause','pause-after','pause-before','pitch','pitch-range','richness','speak','speak-header','speak-numeral','speak-punctuation','speech-rate','stress','text-align','text-decoration','text-indent','unicode-bidi','vertical-align','voice-family','volume','white-space','width',))allowed_css_keywords = frozenset(('auto','aqua','black','block','blue','bold','both','bottom','brown','center','collapse','dashed','dotted','fuchsia','gray','green','!important','italic','left','lime','maroon','medium','none','navy','normal','nowrap','olive','pointer','purple','red','right','solid','silver','teal','top','transparent','underline','white','yellow',))allowed_svg_properties = frozenset(('fill','fill-opacity','fill-rule','stroke','stroke-width','stroke-linecap','stroke-linejoin','stroke-opacity',))allowed_protocols = frozenset(('ed2k','ftp','http','https','irc','mailto','news','gopher','nntp','telnet','webcal','xmpp','callto','feed','urn','aim','rsync','tag','ssh','sftp','rtsp','afs','data',))allowed_content_types = frozenset(('image/png','image/jpeg','image/gif','image/webp','image/bmp','text/plain',))data_content_type = re.compile(r'''^# Match a content type <application>/<type>(?P<content_type>[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+)# Match any character set and encoding(?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?)|(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?)# Assume the rest is data,.*$''',re.VERBOSE)class Filter(base.Filter):""" sanitization of XHTML+MathML+SVG and of inline style attributes."""def __init__(self,source,allowed_elements=allowed_elements,allowed_attributes=allowed_attributes,allowed_css_properties=allowed_css_properties,allowed_css_keywords=allowed_css_keywords,allowed_svg_properties=allowed_svg_properties,allowed_protocols=allowed_protocols,allowed_content_types=allowed_content_types,attr_val_is_uri=attr_val_is_uri,svg_attr_val_allows_ref=svg_attr_val_allows_ref,svg_allow_local_href=svg_allow_local_href):super(Filter, self).__init__(source)self.allowed_elements = allowed_elementsself.allowed_attributes = allowed_attributesself.allowed_css_properties = allowed_css_propertiesself.allowed_css_keywords = allowed_css_keywordsself.allowed_svg_properties = allowed_svg_propertiesself.allowed_protocols = allowed_protocolsself.allowed_content_types = allowed_content_typesself.attr_val_is_uri = attr_val_is_uriself.svg_attr_val_allows_ref = svg_attr_val_allows_refself.svg_allow_local_href = svg_allow_local_hrefdef __iter__(self):for token in base.Filter.__iter__(self):token = self.sanitize_token(token)if token:yield token# Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and# stripping out all # attributes not in ALLOWED_ATTRIBUTES. Style# attributes are parsed, and a restricted set, # specified by# ALLOWED_CSS_PROPERTIES and ALLOWED_CSS_KEYWORDS, are allowed through.# attributes in ATTR_VAL_IS_URI are scanned, and only URI schemes specified# in ALLOWED_PROTOCOLS are allowed.## sanitize_html('<script> do_nasty_stuff() </script>')# => <script> do_nasty_stuff() </script># sanitize_html('<a href="javascript: sucker();">Click here for $100</a>')# => <a>Click here for $100</a>def sanitize_token(self, token):# accommodate filters which use token_type differentlytoken_type = token["type"]if token_type in ("StartTag", "EndTag", "EmptyTag"):name = token["name"]namespace = token["namespace"]if ((namespace, name) in self.allowed_elements or(namespace is None and(namespaces["html"], name) in self.allowed_elements)):return self.allowed_token(token)else:return self.disallowed_token(token)elif token_type == "Comment":passelse:return tokendef allowed_token(self, token):if "data" in token:attrs = token["data"]attr_names = set(attrs.keys())# Remove forbidden attributesfor to_remove in (attr_names - self.allowed_attributes):del token["data"][to_remove]attr_names.remove(to_remove)# Remove attributes with disallowed URL valuesfor attr in (attr_names & self.attr_val_is_uri):assert attr in attrs# I don't have a clue where this regexp comes from or why it matches those# characters, nor why we call unescape. I just know it's always been here.# Should you be worried by this comment in a sanitizer? Yes. On the other hand, all# this will do is remove *more* than it otherwise would.val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\s]+", '',unescape(attrs[attr])).lower()# remove replacement characters from unescaped charactersval_unescaped = val_unescaped.replace("\ufffd", "")try:uri = urlparse.urlparse(val_unescaped)except ValueError:uri = Nonedel attrs[attr]if uri and uri.scheme:if uri.scheme not in self.allowed_protocols:del attrs[attr]if uri.scheme == 'data':m = data_content_type.match(uri.path)if not m:del attrs[attr]elif m.group('content_type') not in self.allowed_content_types:del attrs[attr]for attr in self.svg_attr_val_allows_ref:if attr in attrs:attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)',' ',unescape(attrs[attr]))if (token["name"] in self.svg_allow_local_href and(namespaces['xlink'], 'href') in attrs and re.search('^\s*[^#\s].*',attrs[(namespaces['xlink'], 'href')])):del attrs[(namespaces['xlink'], 'href')]if (None, 'style') in attrs:attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')])token["data"] = attrsreturn tokendef disallowed_token(self, token):token_type = token["type"]if token_type == "EndTag":token["data"] = "</%s>" % token["name"]elif token["data"]:assert token_type in ("StartTag", "EmptyTag")attrs = []for (ns, name), v in token["data"].items():attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v)))token["data"] = "<%s%s>" % (token["name"], ''.join(attrs))else:token["data"] = "<%s>" % token["name"]if token.get("selfClosing"):token["data"] = token["data"][:-1] + "/>"token["type"] = "Characters"del token["name"]return tokendef sanitize_css(self, style):# disallow urlsstyle = re.compile('url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)# gauntletif not re.match("""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):return ''if not re.match("^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style):return ''clean = []for prop, value in re.findall("([-\w]+)\s*:\s*([^:;]*)", style):if not value:continueif prop.lower() in self.allowed_css_properties:clean.append(prop + ': ' + value + ';')elif prop.split('-')[0].lower() in ['background', 'border', 'margin','padding']:for keyword in value.split():if keyword not in self.allowed_css_keywords and \not re.match("^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqabreakelse:clean.append(prop + ': ' + value + ';')elif prop.lower() in self.allowed_svg_properties:clean.append(prop + ': ' + value + ';')return ' '.join(clean)
from __future__ import absolute_import, division, unicode_literalsfrom . import baseclass Filter(base.Filter):def slider(self):previous1 = previous2 = Nonefor token in self.source:if previous1 is not None:yield previous2, previous1, tokenprevious2 = previous1previous1 = tokenif previous1 is not None:yield previous2, previous1, Nonedef __iter__(self):for previous, token, next in self.slider():type = token["type"]if type == "StartTag":if (token["data"] ornot self.is_optional_start(token["name"], previous, next)):yield tokenelif type == "EndTag":if not self.is_optional_end(token["name"], next):yield tokenelse:yield tokendef is_optional_start(self, tagname, previous, next):type = next and next["type"] or Noneif tagname in 'html':# An html element's start tag may be omitted if the first thing# inside the html element is not a space character or a comment.return type not in ("Comment", "SpaceCharacters")elif tagname == 'head':# A head element's start tag may be omitted if the first thing# inside the head element is an element.# XXX: we also omit the start tag if the head element is emptyif type in ("StartTag", "EmptyTag"):return Trueelif type == "EndTag":return next["name"] == "head"elif tagname == 'body':# A body element's start tag may be omitted if the first thing# inside the body element is not a space character or a comment,# except if the first thing inside the body element is a script# or style element and the node immediately preceding the body# element is a head element whose end tag has been omitted.if type in ("Comment", "SpaceCharacters"):return Falseelif type == "StartTag":# XXX: we do not look at the preceding event, so we never omit# the body element's start tag if it's followed by a script or# a style element.return next["name"] not in ('script', 'style')else:return Trueelif tagname == 'colgroup':# A colgroup element's start tag may be omitted if the first thing# inside the colgroup element is a col element, and if the element# is not immediately preceded by another colgroup element whose# end tag has been omitted.if type in ("StartTag", "EmptyTag"):# XXX: we do not look at the preceding event, so instead we never# omit the colgroup element's end tag when it is immediately# followed by another colgroup element. See is_optional_end.return next["name"] == "col"else:return Falseelif tagname == 'tbody':# A tbody element's start tag may be omitted if the first thing# inside the tbody element is a tr element, and if the element is# not immediately preceded by a tbody, thead, or tfoot element# whose end tag has been omitted.if type == "StartTag":# omit the thead and tfoot elements' end tag when they are# immediately followed by a tbody element. See is_optional_end.if previous and previous['type'] == 'EndTag' and \previous['name'] in ('tbody', 'thead', 'tfoot'):return Falsereturn next["name"] == 'tr'else:return Falsereturn Falsedef is_optional_end(self, tagname, next):type = next and next["type"] or Noneif tagname in ('html', 'head', 'body'):# An html element's end tag may be omitted if the html element# is not immediately followed by a space character or a comment.return type not in ("Comment", "SpaceCharacters")elif tagname in ('li', 'optgroup', 'tr'):# A li element's end tag may be omitted if the li element is# immediately followed by another li element or if there is# no more content in the parent element.# An optgroup element's end tag may be omitted if the optgroup# element is immediately followed by another optgroup element,# or if there is no more content in the parent element.# A tr element's end tag may be omitted if the tr element is# immediately followed by another tr element, or if there is# no more content in the parent element.if type == "StartTag":return next["name"] == tagnameelse:return type == "EndTag" or type is Noneelif tagname in ('dt', 'dd'):# A dt element's end tag may be omitted if the dt element is# immediately followed by another dt element or a dd element.# A dd element's end tag may be omitted if the dd element is# immediately followed by another dd element or a dt element,# or if there is no more content in the parent element.if type == "StartTag":return next["name"] in ('dt', 'dd')elif tagname == 'dd':return type == "EndTag" or type is Noneelse:return Falseelif tagname == 'p':# A p element's end tag may be omitted if the p element is# immediately followed by an address, article, aside,# blockquote, datagrid, dialog, dir, div, dl, fieldset,# footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu,# nav, ol, p, pre, section, table, or ul, element, or if# there is no more content in the parent element.if type in ("StartTag", "EmptyTag"):return next["name"] in ('address', 'article', 'aside','blockquote', 'datagrid', 'dialog','dir', 'div', 'dl', 'fieldset', 'footer','form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6','header', 'hr', 'menu', 'nav', 'ol','p', 'pre', 'section', 'table', 'ul')else:return type == "EndTag" or type is Noneelif tagname == 'option':# An option element's end tag may be omitted if the option# element is immediately followed by another option element,# or if it is immediately followed by an <code>optgroup</code># element, or if there is no more content in the parent# element.if type == "StartTag":return next["name"] in ('option', 'optgroup')else:return type == "EndTag" or type is Noneelif tagname in ('rt', 'rp'):# An rt element's end tag may be omitted if the rt element is# immediately followed by an rt or rp element, or if there is# no more content in the parent element.# An rp element's end tag may be omitted if the rp element is# immediately followed by an rt or rp element, or if there is# no more content in the parent element.if type == "StartTag":return next["name"] in ('rt', 'rp')else:return type == "EndTag" or type is Noneelif tagname == 'colgroup':# A colgroup element's end tag may be omitted if the colgroup# element is not immediately followed by a space character or# a comment.if type in ("Comment", "SpaceCharacters"):return Falseelif type == "StartTag":# XXX: we also look for an immediately following colgroup# element. See is_optional_start.return next["name"] != 'colgroup'else:return Trueelif tagname in ('thead', 'tbody'):# A thead element's end tag may be omitted if the thead element# is immediately followed by a tbody or tfoot element.# A tbody element's end tag may be omitted if the tbody element# is immediately followed by a tbody or tfoot element, or if# there is no more content in the parent element.# A tfoot element's end tag may be omitted if the tfoot element# is immediately followed by a tbody element, or if there is no# more content in the parent element.# XXX: we never omit the end tag when the following element is# a tbody. See is_optional_start.if type == "StartTag":return next["name"] in ['tbody', 'tfoot']elif tagname == 'tbody':return type == "EndTag" or type is Noneelse:return Falseelif tagname == 'tfoot':# A tfoot element's end tag may be omitted if the tfoot element# is immediately followed by a tbody element, or if there is no# more content in the parent element.# XXX: we never omit the end tag when the following element is# a tbody. See is_optional_start.if type == "StartTag":return next["name"] == 'tbody'else:return type == "EndTag" or type is Noneelif tagname in ('td', 'th'):# A td element's end tag may be omitted if the td element is# immediately followed by a td or th element, or if there is# no more content in the parent element.# A th element's end tag may be omitted if the th element is# immediately followed by a td or th element, or if there is# no more content in the parent element.if type == "StartTag":return next["name"] in ('td', 'th')else:return type == "EndTag" or type is Nonereturn False
from __future__ import absolute_import, division, unicode_literalsfrom pip._vendor.six import text_typefrom . import basefrom ..constants import namespaces, voidElementsfrom ..constants import spaceCharactersspaceCharacters = "".join(spaceCharacters)class Filter(base.Filter):def __init__(self, source, require_matching_tags=True):super(Filter, self).__init__(source)self.require_matching_tags = require_matching_tagsdef __iter__(self):open_elements = []for token in base.Filter.__iter__(self):type = token["type"]if type in ("StartTag", "EmptyTag"):namespace = token["namespace"]name = token["name"]assert namespace is None or isinstance(namespace, text_type)assert namespace != ""assert isinstance(name, text_type)assert name != ""assert isinstance(token["data"], dict)if (not namespace or namespace == namespaces["html"]) and name in voidElements:assert type == "EmptyTag"else:assert type == "StartTag"if type == "StartTag" and self.require_matching_tags:open_elements.append((namespace, name))for (namespace, name), value in token["data"].items():assert namespace is None or isinstance(namespace, text_type)assert namespace != ""assert isinstance(name, text_type)assert name != ""assert isinstance(value, text_type)elif type == "EndTag":namespace = token["namespace"]name = token["name"]assert namespace is None or isinstance(namespace, text_type)assert namespace != ""assert isinstance(name, text_type)assert name != ""if (not namespace or namespace == namespaces["html"]) and name in voidElements:assert False, "Void element reported as EndTag token: %(tag)s" % {"tag": name}elif self.require_matching_tags:start = open_elements.pop()assert start == (namespace, name)elif type == "Comment":data = token["data"]assert isinstance(data, text_type)elif type in ("Characters", "SpaceCharacters"):data = token["data"]assert isinstance(data, text_type)assert data != ""if type == "SpaceCharacters":assert data.strip(spaceCharacters) == ""elif type == "Doctype":name = token["name"]assert name is None or isinstance(name, text_type)assert token["publicId"] is None or isinstance(name, text_type)assert token["systemId"] is None or isinstance(name, text_type)elif type == "Entity":assert isinstance(token["name"], text_type)elif type == "SerializerError":assert isinstance(token["data"], text_type)else:assert False, "Unknown token type: %(type)s" % {"type": type}yield token
from __future__ import absolute_import, division, unicode_literalsfrom . import baseclass Filter(base.Filter):def __init__(self, source, encoding):base.Filter.__init__(self, source)self.encoding = encodingdef __iter__(self):state = "pre_head"meta_found = (self.encoding is None)pending = []for token in base.Filter.__iter__(self):type = token["type"]if type == "StartTag":if token["name"].lower() == "head":state = "in_head"elif type == "EmptyTag":if token["name"].lower() == "meta":# replace charset with actual encodinghas_http_equiv_content_type = Falsefor (namespace, name), value in token["data"].items():if namespace is not None:continueelif name.lower() == 'charset':token["data"][(namespace, name)] = self.encodingmeta_found = Truebreakelif name == 'http-equiv' and value.lower() == 'content-type':has_http_equiv_content_type = Trueelse:if has_http_equiv_content_type and (None, "content") in token["data"]:token["data"][(None, "content")] = 'text/html; charset=%s' % self.encodingmeta_found = Trueelif token["name"].lower() == "head" and not meta_found:# insert meta into empty headyield {"type": "StartTag", "name": "head","data": token["data"]}yield {"type": "EmptyTag", "name": "meta","data": {(None, "charset"): self.encoding}}yield {"type": "EndTag", "name": "head"}meta_found = Truecontinueelif type == "EndTag":if token["name"].lower() == "head" and pending:# insert meta into head (if necessary) and flush pending queueyield pending.pop(0)if not meta_found:yield {"type": "EmptyTag", "name": "meta","data": {(None, "charset"): self.encoding}}while pending:yield pending.pop(0)meta_found = Truestate = "post_head"if state == "in_head":pending.append(token)else:yield token
from __future__ import absolute_import, division, unicode_literalsclass Filter(object):def __init__(self, source):self.source = sourcedef __iter__(self):return iter(self.source)def __getattr__(self, name):return getattr(self.source, name)
from __future__ import absolute_import, division, unicode_literalsfrom . import basetry:from collections import OrderedDictexcept ImportError:from ordereddict import OrderedDictclass Filter(base.Filter):def __iter__(self):for token in base.Filter.__iter__(self):if token["type"] in ("StartTag", "EmptyTag"):attrs = OrderedDict()for name, value in sorted(token["data"].items(),key=lambda x: x[0]):attrs[name] = valuetoken["data"] = attrsyield token
from __future__ import absolute_import, division, unicode_literalsimport stringEOF = NoneE = {"null-character":"Null character in input stream, replaced with U+FFFD.","invalid-codepoint":"Invalid codepoint in stream.","incorrectly-placed-solidus":"Solidus (/) incorrectly placed in tag.","incorrect-cr-newline-entity":"Incorrect CR newline entity, replaced with LF.","illegal-windows-1252-entity":"Entity used with illegal number (windows-1252 reference).","cant-convert-numeric-entity":"Numeric entity couldn't be converted to character ""(codepoint U+%(charAsInt)08x).","illegal-codepoint-for-numeric-entity":"Numeric entity represents an illegal codepoint: ""U+%(charAsInt)08x.","numeric-entity-without-semicolon":"Numeric entity didn't end with ';'.","expected-numeric-entity-but-got-eof":"Numeric entity expected. Got end of file instead.","expected-numeric-entity":"Numeric entity expected but none found.","named-entity-without-semicolon":"Named entity didn't end with ';'.","expected-named-entity":"Named entity expected. Got none.","attributes-in-end-tag":"End tag contains unexpected attributes.",'self-closing-flag-on-end-tag':"End tag contains unexpected self-closing flag.","expected-tag-name-but-got-right-bracket":"Expected tag name. Got '>' instead.","expected-tag-name-but-got-question-mark":"Expected tag name. Got '?' instead. (HTML doesn't ""support processing instructions.)","expected-tag-name":"Expected tag name. Got something else instead","expected-closing-tag-but-got-right-bracket":"Expected closing tag. Got '>' instead. Ignoring '</>'.","expected-closing-tag-but-got-eof":"Expected closing tag. Unexpected end of file.","expected-closing-tag-but-got-char":"Expected closing tag. Unexpected character '%(data)s' found.","eof-in-tag-name":"Unexpected end of file in the tag name.","expected-attribute-name-but-got-eof":"Unexpected end of file. Expected attribute name instead.","eof-in-attribute-name":"Unexpected end of file in attribute name.","invalid-character-in-attribute-name":"Invalid character in attribute name","duplicate-attribute":"Dropped duplicate attribute on tag.","expected-end-of-tag-name-but-got-eof":"Unexpected end of file. Expected = or end of tag.","expected-attribute-value-but-got-eof":"Unexpected end of file. Expected attribute value.","expected-attribute-value-but-got-right-bracket":"Expected attribute value. Got '>' instead.",'equals-in-unquoted-attribute-value':"Unexpected = in unquoted attribute",'unexpected-character-in-unquoted-attribute-value':"Unexpected character in unquoted attribute","invalid-character-after-attribute-name":"Unexpected character after attribute name.","unexpected-character-after-attribute-value":"Unexpected character after attribute value.","eof-in-attribute-value-double-quote":"Unexpected end of file in attribute value (\").","eof-in-attribute-value-single-quote":"Unexpected end of file in attribute value (').","eof-in-attribute-value-no-quotes":"Unexpected end of file in attribute value.","unexpected-EOF-after-solidus-in-tag":"Unexpected end of file in tag. Expected >","unexpected-character-after-solidus-in-tag":"Unexpected character after / in tag. Expected >","expected-dashes-or-doctype":"Expected '--' or 'DOCTYPE'. Not found.","unexpected-bang-after-double-dash-in-comment":"Unexpected ! after -- in comment","unexpected-space-after-double-dash-in-comment":"Unexpected space after -- in comment","incorrect-comment":"Incorrect comment.","eof-in-comment":"Unexpected end of file in comment.","eof-in-comment-end-dash":"Unexpected end of file in comment (-)","unexpected-dash-after-double-dash-in-comment":"Unexpected '-' after '--' found in comment.","eof-in-comment-double-dash":"Unexpected end of file in comment (--).","eof-in-comment-end-space-state":"Unexpected end of file in comment.","eof-in-comment-end-bang-state":"Unexpected end of file in comment.","unexpected-char-in-comment":"Unexpected character in comment found.","need-space-after-doctype":"No space after literal string 'DOCTYPE'.","expected-doctype-name-but-got-right-bracket":"Unexpected > character. Expected DOCTYPE name.","expected-doctype-name-but-got-eof":"Unexpected end of file. Expected DOCTYPE name.","eof-in-doctype-name":"Unexpected end of file in DOCTYPE name.","eof-in-doctype":"Unexpected end of file in DOCTYPE.","expected-space-or-right-bracket-in-doctype":"Expected space or '>'. Got '%(data)s'","unexpected-end-of-doctype":"Unexpected end of DOCTYPE.","unexpected-char-in-doctype":"Unexpected character in DOCTYPE.","eof-in-innerhtml":"XXX innerHTML EOF","unexpected-doctype":"Unexpected DOCTYPE. Ignored.","non-html-root":"html needs to be the first start tag.","expected-doctype-but-got-eof":"Unexpected End of file. Expected DOCTYPE.","unknown-doctype":"Erroneous DOCTYPE.","expected-doctype-but-got-chars":"Unexpected non-space characters. Expected DOCTYPE.","expected-doctype-but-got-start-tag":"Unexpected start tag (%(name)s). Expected DOCTYPE.","expected-doctype-but-got-end-tag":"Unexpected end tag (%(name)s). Expected DOCTYPE.","end-tag-after-implied-root":"Unexpected end tag (%(name)s) after the (implied) root element.","expected-named-closing-tag-but-got-eof":"Unexpected end of file. Expected end tag (%(name)s).","two-heads-are-not-better-than-one":"Unexpected start tag head in existing head. Ignored.","unexpected-end-tag":"Unexpected end tag (%(name)s). Ignored.","unexpected-start-tag-out-of-my-head":"Unexpected start tag (%(name)s) that can be in head. Moved.","unexpected-start-tag":"Unexpected start tag (%(name)s).","missing-end-tag":"Missing end tag (%(name)s).","missing-end-tags":"Missing end tags (%(name)s).","unexpected-start-tag-implies-end-tag":"Unexpected start tag (%(startName)s) ""implies end tag (%(endName)s).","unexpected-start-tag-treated-as":"Unexpected start tag (%(originalName)s). Treated as %(newName)s.","deprecated-tag":"Unexpected start tag %(name)s. Don't use it!","unexpected-start-tag-ignored":"Unexpected start tag %(name)s. Ignored.","expected-one-end-tag-but-got-another":"Unexpected end tag (%(gotName)s). ""Missing end tag (%(expectedName)s).","end-tag-too-early":"End tag (%(name)s) seen too early. Expected other end tag.","end-tag-too-early-named":"Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).","end-tag-too-early-ignored":"End tag (%(name)s) seen too early. Ignored.","adoption-agency-1.1":"End tag (%(name)s) violates step 1, ""paragraph 1 of the adoption agency algorithm.","adoption-agency-1.2":"End tag (%(name)s) violates step 1, ""paragraph 2 of the adoption agency algorithm.","adoption-agency-1.3":"End tag (%(name)s) violates step 1, ""paragraph 3 of the adoption agency algorithm.","adoption-agency-4.4":"End tag (%(name)s) violates step 4, ""paragraph 4 of the adoption agency algorithm.","unexpected-end-tag-treated-as":"Unexpected end tag (%(originalName)s). Treated as %(newName)s.","no-end-tag":"This element (%(name)s) has no end tag.","unexpected-implied-end-tag-in-table":"Unexpected implied end tag (%(name)s) in the table phase.","unexpected-implied-end-tag-in-table-body":"Unexpected implied end tag (%(name)s) in the table body phase.","unexpected-char-implies-table-voodoo":"Unexpected non-space characters in ""table context caused voodoo mode.","unexpected-hidden-input-in-table":"Unexpected input with type hidden in table context.","unexpected-form-in-table":"Unexpected form in table context.","unexpected-start-tag-implies-table-voodoo":"Unexpected start tag (%(name)s) in ""table context caused voodoo mode.","unexpected-end-tag-implies-table-voodoo":"Unexpected end tag (%(name)s) in ""table context caused voodoo mode.","unexpected-cell-in-table-body":"Unexpected table cell start tag (%(name)s) ""in the table body phase.","unexpected-cell-end-tag":"Got table cell end tag (%(name)s) ""while required end tags are missing.","unexpected-end-tag-in-table-body":"Unexpected end tag (%(name)s) in the table body phase. Ignored.","unexpected-implied-end-tag-in-table-row":"Unexpected implied end tag (%(name)s) in the table row phase.","unexpected-end-tag-in-table-row":"Unexpected end tag (%(name)s) in the table row phase. Ignored.","unexpected-select-in-select":"Unexpected select start tag in the select phase ""treated as select end tag.","unexpected-input-in-select":"Unexpected input start tag in the select phase.","unexpected-start-tag-in-select":"Unexpected start tag token (%(name)s in the select phase. ""Ignored.","unexpected-end-tag-in-select":"Unexpected end tag (%(name)s) in the select phase. Ignored.","unexpected-table-element-start-tag-in-select-in-table":"Unexpected table element start tag (%(name)s) in the select in table phase.","unexpected-table-element-end-tag-in-select-in-table":"Unexpected table element end tag (%(name)s) in the select in table phase.","unexpected-char-after-body":"Unexpected non-space characters in the after body phase.","unexpected-start-tag-after-body":"Unexpected start tag token (%(name)s)"" in the after body phase.","unexpected-end-tag-after-body":"Unexpected end tag token (%(name)s)"" in the after body phase.","unexpected-char-in-frameset":"Unexpected characters in the frameset phase. Characters ignored.","unexpected-start-tag-in-frameset":"Unexpected start tag token (%(name)s)"" in the frameset phase. Ignored.","unexpected-frameset-in-frameset-innerhtml":"Unexpected end tag token (frameset) ""in the frameset phase (innerHTML).","unexpected-end-tag-in-frameset":"Unexpected end tag token (%(name)s)"" in the frameset phase. Ignored.","unexpected-char-after-frameset":"Unexpected non-space characters in the ""after frameset phase. Ignored.","unexpected-start-tag-after-frameset":"Unexpected start tag (%(name)s)"" in the after frameset phase. Ignored.","unexpected-end-tag-after-frameset":"Unexpected end tag (%(name)s)"" in the after frameset phase. Ignored.","unexpected-end-tag-after-body-innerhtml":"Unexpected end tag after body(innerHtml)","expected-eof-but-got-char":"Unexpected non-space characters. Expected end of file.","expected-eof-but-got-start-tag":"Unexpected start tag (%(name)s)"". Expected end of file.","expected-eof-but-got-end-tag":"Unexpected end tag (%(name)s)"". Expected end of file.","eof-in-table":"Unexpected end of file. Expected table content.","eof-in-select":"Unexpected end of file. Expected select content.","eof-in-frameset":"Unexpected end of file. Expected frameset content.","eof-in-script-in-script":"Unexpected end of file. Expected script content.","eof-in-foreign-lands":"Unexpected end of file. Expected foreign content","non-void-element-with-trailing-solidus":"Trailing solidus not allowed on element %(name)s","unexpected-html-element-in-foreign-content":"Element %(name)s not allowed in a non-html context","unexpected-end-tag-before-html":"Unexpected end tag (%(name)s) before html.","unexpected-inhead-noscript-tag":"Element %(name)s not allowed in a inhead-noscript context","eof-in-head-noscript":"Unexpected end of file. Expected inhead-noscript content","char-in-head-noscript":"Unexpected non-space character. Expected inhead-noscript content","XXX-undefined-error":"Undefined error (this sucks and should be fixed)",}namespaces = {"html": "http://www.w3.org/1999/xhtml","mathml": "http://www.w3.org/1998/Math/MathML","svg": "http://www.w3.org/2000/svg","xlink": "http://www.w3.org/1999/xlink","xml": "http://www.w3.org/XML/1998/namespace","xmlns": "http://www.w3.org/2000/xmlns/"}scopingElements = frozenset([(namespaces["html"], "applet"),(namespaces["html"], "caption"),(namespaces["html"], "html"),(namespaces["html"], "marquee"),(namespaces["html"], "object"),(namespaces["html"], "table"),(namespaces["html"], "td"),(namespaces["html"], "th"),(namespaces["mathml"], "mi"),(namespaces["mathml"], "mo"),(namespaces["mathml"], "mn"),(namespaces["mathml"], "ms"),(namespaces["mathml"], "mtext"),(namespaces["mathml"], "annotation-xml"),(namespaces["svg"], "foreignObject"),(namespaces["svg"], "desc"),(namespaces["svg"], "title"),])formattingElements = frozenset([(namespaces["html"], "a"),(namespaces["html"], "b"),(namespaces["html"], "big"),(namespaces["html"], "code"),(namespaces["html"], "em"),(namespaces["html"], "font"),(namespaces["html"], "i"),(namespaces["html"], "nobr"),(namespaces["html"], "s"),(namespaces["html"], "small"),(namespaces["html"], "strike"),(namespaces["html"], "strong"),(namespaces["html"], "tt"),(namespaces["html"], "u")])specialElements = frozenset([(namespaces["html"], "address"),(namespaces["html"], "applet"),(namespaces["html"], "area"),(namespaces["html"], "article"),(namespaces["html"], "aside"),(namespaces["html"], "base"),(namespaces["html"], "basefont"),(namespaces["html"], "bgsound"),(namespaces["html"], "blockquote"),(namespaces["html"], "body"),(namespaces["html"], "br"),(namespaces["html"], "button"),(namespaces["html"], "caption"),(namespaces["html"], "center"),(namespaces["html"], "col"),(namespaces["html"], "colgroup"),(namespaces["html"], "command"),(namespaces["html"], "dd"),(namespaces["html"], "details"),(namespaces["html"], "dir"),(namespaces["html"], "div"),(namespaces["html"], "dl"),(namespaces["html"], "dt"),(namespaces["html"], "embed"),(namespaces["html"], "fieldset"),(namespaces["html"], "figure"),(namespaces["html"], "footer"),(namespaces["html"], "form"),(namespaces["html"], "frame"),(namespaces["html"], "frameset"),(namespaces["html"], "h1"),(namespaces["html"], "h2"),(namespaces["html"], "h3"),(namespaces["html"], "h4"),(namespaces["html"], "h5"),(namespaces["html"], "h6"),(namespaces["html"], "head"),(namespaces["html"], "header"),(namespaces["html"], "hr"),(namespaces["html"], "html"),(namespaces["html"], "iframe"),# Note that image is commented out in the spec as "this isn't an# element that can end up on the stack, so it doesn't matter,"(namespaces["html"], "image"),(namespaces["html"], "img"),(namespaces["html"], "input"),(namespaces["html"], "isindex"),(namespaces["html"], "li"),(namespaces["html"], "link"),(namespaces["html"], "listing"),(namespaces["html"], "marquee"),(namespaces["html"], "menu"),(namespaces["html"], "meta"),(namespaces["html"], "nav"),(namespaces["html"], "noembed"),(namespaces["html"], "noframes"),(namespaces["html"], "noscript"),(namespaces["html"], "object"),(namespaces["html"], "ol"),(namespaces["html"], "p"),(namespaces["html"], "param"),(namespaces["html"], "plaintext"),(namespaces["html"], "pre"),(namespaces["html"], "script"),(namespaces["html"], "section"),(namespaces["html"], "select"),(namespaces["html"], "style"),(namespaces["html"], "table"),(namespaces["html"], "tbody"),(namespaces["html"], "td"),(namespaces["html"], "textarea"),(namespaces["html"], "tfoot"),(namespaces["html"], "th"),(namespaces["html"], "thead"),(namespaces["html"], "title"),(namespaces["html"], "tr"),(namespaces["html"], "ul"),(namespaces["html"], "wbr"),(namespaces["html"], "xmp"),(namespaces["svg"], "foreignObject")])htmlIntegrationPointElements = frozenset([(namespaces["mathml"], "annotaion-xml"),(namespaces["svg"], "foreignObject"),(namespaces["svg"], "desc"),(namespaces["svg"], "title")])mathmlTextIntegrationPointElements = frozenset([(namespaces["mathml"], "mi"),(namespaces["mathml"], "mo"),(namespaces["mathml"], "mn"),(namespaces["mathml"], "ms"),(namespaces["mathml"], "mtext")])adjustSVGAttributes = {"attributename": "attributeName","attributetype": "attributeType","basefrequency": "baseFrequency","baseprofile": "baseProfile","calcmode": "calcMode","clippathunits": "clipPathUnits","contentscripttype": "contentScriptType","contentstyletype": "contentStyleType","diffuseconstant": "diffuseConstant","edgemode": "edgeMode","externalresourcesrequired": "externalResourcesRequired","filterres": "filterRes","filterunits": "filterUnits","glyphref": "glyphRef","gradienttransform": "gradientTransform","gradientunits": "gradientUnits","kernelmatrix": "kernelMatrix","kernelunitlength": "kernelUnitLength","keypoints": "keyPoints","keysplines": "keySplines","keytimes": "keyTimes","lengthadjust": "lengthAdjust","limitingconeangle": "limitingConeAngle","markerheight": "markerHeight","markerunits": "markerUnits","markerwidth": "markerWidth","maskcontentunits": "maskContentUnits","maskunits": "maskUnits","numoctaves": "numOctaves","pathlength": "pathLength","patterncontentunits": "patternContentUnits","patterntransform": "patternTransform","patternunits": "patternUnits","pointsatx": "pointsAtX","pointsaty": "pointsAtY","pointsatz": "pointsAtZ","preservealpha": "preserveAlpha","preserveaspectratio": "preserveAspectRatio","primitiveunits": "primitiveUnits","refx": "refX","refy": "refY","repeatcount": "repeatCount","repeatdur": "repeatDur","requiredextensions": "requiredExtensions","requiredfeatures": "requiredFeatures","specularconstant": "specularConstant","specularexponent": "specularExponent","spreadmethod": "spreadMethod","startoffset": "startOffset","stddeviation": "stdDeviation","stitchtiles": "stitchTiles","surfacescale": "surfaceScale","systemlanguage": "systemLanguage","tablevalues": "tableValues","targetx": "targetX","targety": "targetY","textlength": "textLength","viewbox": "viewBox","viewtarget": "viewTarget","xchannelselector": "xChannelSelector","ychannelselector": "yChannelSelector","zoomandpan": "zoomAndPan"}adjustMathMLAttributes = {"definitionurl": "definitionURL"}adjustForeignAttributes = {"xlink:actuate": ("xlink", "actuate", namespaces["xlink"]),"xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]),"xlink:href": ("xlink", "href", namespaces["xlink"]),"xlink:role": ("xlink", "role", namespaces["xlink"]),"xlink:show": ("xlink", "show", namespaces["xlink"]),"xlink:title": ("xlink", "title", namespaces["xlink"]),"xlink:type": ("xlink", "type", namespaces["xlink"]),"xml:base": ("xml", "base", namespaces["xml"]),"xml:lang": ("xml", "lang", namespaces["xml"]),"xml:space": ("xml", "space", namespaces["xml"]),"xmlns": (None, "xmlns", namespaces["xmlns"]),"xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"])}unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) inadjustForeignAttributes.items()])spaceCharacters = frozenset(["\t","\n","\u000C"," ","\r"])tableInsertModeElements = frozenset(["table","tbody","tfoot","thead","tr"])asciiLowercase = frozenset(string.ascii_lowercase)asciiUppercase = frozenset(string.ascii_uppercase)asciiLetters = frozenset(string.ascii_letters)digits = frozenset(string.digits)hexDigits = frozenset(string.hexdigits)asciiUpper2Lower = dict([(ord(c), ord(c.lower()))for c in string.ascii_uppercase])# Heading elements need to be orderedheadingElements = ("h1","h2","h3","h4","h5","h6")voidElements = frozenset(["base","command","event-source","link","meta","hr","br","img","embed","param","area","col","input","source","track"])cdataElements = frozenset(['title', 'textarea'])rcdataElements = frozenset(['style','script','xmp','iframe','noembed','noframes','noscript'])booleanAttributes = {"": frozenset(["irrelevant"]),"style": frozenset(["scoped"]),"img": frozenset(["ismap"]),"audio": frozenset(["autoplay", "controls"]),"video": frozenset(["autoplay", "controls"]),"script": frozenset(["defer", "async"]),"details": frozenset(["open"]),"datagrid": frozenset(["multiple", "disabled"]),"command": frozenset(["hidden", "disabled", "checked", "default"]),"hr": frozenset(["noshade"]),"menu": frozenset(["autosubmit"]),"fieldset": frozenset(["disabled", "readonly"]),"option": frozenset(["disabled", "readonly", "selected"]),"optgroup": frozenset(["disabled", "readonly"]),"button": frozenset(["disabled", "autofocus"]),"input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]),"select": frozenset(["disabled", "readonly", "autofocus", "multiple"]),"output": frozenset(["disabled", "readonly"]),}# entitiesWindows1252 has to be _ordered_ and needs to have an index. It# therefore can't be a frozenset.entitiesWindows1252 = (8364, # 0x80 0x20AC EURO SIGN65533, # 0x81 UNDEFINED8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS8224, # 0x86 0x2020 DAGGER8225, # 0x87 0x2021 DOUBLE DAGGER710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT8240, # 0x89 0x2030 PER MILLE SIGN352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE65533, # 0x8D UNDEFINED381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON65533, # 0x8F UNDEFINED65533, # 0x90 UNDEFINED8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK8226, # 0x95 0x2022 BULLET8211, # 0x96 0x2013 EN DASH8212, # 0x97 0x2014 EM DASH732, # 0x98 0x02DC SMALL TILDE8482, # 0x99 0x2122 TRADE MARK SIGN353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE65533, # 0x9D UNDEFINED382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS)xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;'])entities = {"AElig": "\xc6","AElig;": "\xc6","AMP": "&","AMP;": "&","Aacute": "\xc1","Aacute;": "\xc1","Abreve;": "\u0102","Acirc": "\xc2","Acirc;": "\xc2","Acy;": "\u0410","Afr;": "\U0001d504","Agrave": "\xc0","Agrave;": "\xc0","Alpha;": "\u0391","Amacr;": "\u0100","And;": "\u2a53","Aogon;": "\u0104","Aopf;": "\U0001d538","ApplyFunction;": "\u2061","Aring": "\xc5","Aring;": "\xc5","Ascr;": "\U0001d49c","Assign;": "\u2254","Atilde": "\xc3","Atilde;": "\xc3","Auml": "\xc4","Auml;": "\xc4","Backslash;": "\u2216","Barv;": "\u2ae7","Barwed;": "\u2306","Bcy;": "\u0411","Because;": "\u2235","Bernoullis;": "\u212c","Beta;": "\u0392","Bfr;": "\U0001d505","Bopf;": "\U0001d539","Breve;": "\u02d8","Bscr;": "\u212c","Bumpeq;": "\u224e","CHcy;": "\u0427","COPY": "\xa9","COPY;": "\xa9","Cacute;": "\u0106","Cap;": "\u22d2","CapitalDifferentialD;": "\u2145","Cayleys;": "\u212d","Ccaron;": "\u010c","Ccedil": "\xc7","Ccedil;": "\xc7","Ccirc;": "\u0108","Cconint;": "\u2230","Cdot;": "\u010a","Cedilla;": "\xb8","CenterDot;": "\xb7","Cfr;": "\u212d","Chi;": "\u03a7","CircleDot;": "\u2299","CircleMinus;": "\u2296","CirclePlus;": "\u2295","CircleTimes;": "\u2297","ClockwiseContourIntegral;": "\u2232","CloseCurlyDoubleQuote;": "\u201d","CloseCurlyQuote;": "\u2019","Colon;": "\u2237","Colone;": "\u2a74","Congruent;": "\u2261","Conint;": "\u222f","ContourIntegral;": "\u222e","Copf;": "\u2102","Coproduct;": "\u2210","CounterClockwiseContourIntegral;": "\u2233","Cross;": "\u2a2f","Cscr;": "\U0001d49e","Cup;": "\u22d3","CupCap;": "\u224d","DD;": "\u2145","DDotrahd;": "\u2911","DJcy;": "\u0402","DScy;": "\u0405","DZcy;": "\u040f","Dagger;": "\u2021","Darr;": "\u21a1","Dashv;": "\u2ae4","Dcaron;": "\u010e","Dcy;": "\u0414","Del;": "\u2207","Delta;": "\u0394","Dfr;": "\U0001d507","DiacriticalAcute;": "\xb4","DiacriticalDot;": "\u02d9","DiacriticalDoubleAcute;": "\u02dd","DiacriticalGrave;": "`","DiacriticalTilde;": "\u02dc","Diamond;": "\u22c4","DifferentialD;": "\u2146","Dopf;": "\U0001d53b","Dot;": "\xa8","DotDot;": "\u20dc","DotEqual;": "\u2250","DoubleContourIntegral;": "\u222f","DoubleDot;": "\xa8","DoubleDownArrow;": "\u21d3","DoubleLeftArrow;": "\u21d0","DoubleLeftRightArrow;": "\u21d4","DoubleLeftTee;": "\u2ae4","DoubleLongLeftArrow;": "\u27f8","DoubleLongLeftRightArrow;": "\u27fa","DoubleLongRightArrow;": "\u27f9","DoubleRightArrow;": "\u21d2","DoubleRightTee;": "\u22a8","DoubleUpArrow;": "\u21d1","DoubleUpDownArrow;": "\u21d5","DoubleVerticalBar;": "\u2225","DownArrow;": "\u2193","DownArrowBar;": "\u2913","DownArrowUpArrow;": "\u21f5","DownBreve;": "\u0311","DownLeftRightVector;": "\u2950","DownLeftTeeVector;": "\u295e","DownLeftVector;": "\u21bd","DownLeftVectorBar;": "\u2956","DownRightTeeVector;": "\u295f","DownRightVector;": "\u21c1","DownRightVectorBar;": "\u2957","DownTee;": "\u22a4","DownTeeArrow;": "\u21a7","Downarrow;": "\u21d3","Dscr;": "\U0001d49f","Dstrok;": "\u0110","ENG;": "\u014a","ETH": "\xd0","ETH;": "\xd0","Eacute": "\xc9","Eacute;": "\xc9","Ecaron;": "\u011a","Ecirc": "\xca","Ecirc;": "\xca","Ecy;": "\u042d","Edot;": "\u0116","Efr;": "\U0001d508","Egrave": "\xc8","Egrave;": "\xc8","Element;": "\u2208","Emacr;": "\u0112","EmptySmallSquare;": "\u25fb","EmptyVerySmallSquare;": "\u25ab","Eogon;": "\u0118","Eopf;": "\U0001d53c","Epsilon;": "\u0395","Equal;": "\u2a75","EqualTilde;": "\u2242","Equilibrium;": "\u21cc","Escr;": "\u2130","Esim;": "\u2a73","Eta;": "\u0397","Euml": "\xcb","Euml;": "\xcb","Exists;": "\u2203","ExponentialE;": "\u2147","Fcy;": "\u0424","Ffr;": "\U0001d509","FilledSmallSquare;": "\u25fc","FilledVerySmallSquare;": "\u25aa","Fopf;": "\U0001d53d","ForAll;": "\u2200","Fouriertrf;": "\u2131","Fscr;": "\u2131","GJcy;": "\u0403","GT": ">","GT;": ">","Gamma;": "\u0393","Gammad;": "\u03dc","Gbreve;": "\u011e","Gcedil;": "\u0122","Gcirc;": "\u011c","Gcy;": "\u0413","Gdot;": "\u0120","Gfr;": "\U0001d50a","Gg;": "\u22d9","Gopf;": "\U0001d53e","GreaterEqual;": "\u2265","GreaterEqualLess;": "\u22db","GreaterFullEqual;": "\u2267","GreaterGreater;": "\u2aa2","GreaterLess;": "\u2277","GreaterSlantEqual;": "\u2a7e","GreaterTilde;": "\u2273","Gscr;": "\U0001d4a2","Gt;": "\u226b","HARDcy;": "\u042a","Hacek;": "\u02c7","Hat;": "^","Hcirc;": "\u0124","Hfr;": "\u210c","HilbertSpace;": "\u210b","Hopf;": "\u210d","HorizontalLine;": "\u2500","Hscr;": "\u210b","Hstrok;": "\u0126","HumpDownHump;": "\u224e","HumpEqual;": "\u224f","IEcy;": "\u0415","IJlig;": "\u0132","IOcy;": "\u0401","Iacute": "\xcd","Iacute;": "\xcd","Icirc": "\xce","Icirc;": "\xce","Icy;": "\u0418","Idot;": "\u0130","Ifr;": "\u2111","Igrave": "\xcc","Igrave;": "\xcc","Im;": "\u2111","Imacr;": "\u012a","ImaginaryI;": "\u2148","Implies;": "\u21d2","Int;": "\u222c","Integral;": "\u222b","Intersection;": "\u22c2","InvisibleComma;": "\u2063","InvisibleTimes;": "\u2062","Iogon;": "\u012e","Iopf;": "\U0001d540","Iota;": "\u0399","Iscr;": "\u2110","Itilde;": "\u0128","Iukcy;": "\u0406","Iuml": "\xcf","Iuml;": "\xcf","Jcirc;": "\u0134","Jcy;": "\u0419","Jfr;": "\U0001d50d","Jopf;": "\U0001d541","Jscr;": "\U0001d4a5","Jsercy;": "\u0408","Jukcy;": "\u0404","KHcy;": "\u0425","KJcy;": "\u040c","Kappa;": "\u039a","Kcedil;": "\u0136","Kcy;": "\u041a","Kfr;": "\U0001d50e","Kopf;": "\U0001d542","Kscr;": "\U0001d4a6","LJcy;": "\u0409","LT": "<","LT;": "<","Lacute;": "\u0139","Lambda;": "\u039b","Lang;": "\u27ea","Laplacetrf;": "\u2112","Larr;": "\u219e","Lcaron;": "\u013d","Lcedil;": "\u013b","Lcy;": "\u041b","LeftAngleBracket;": "\u27e8","LeftArrow;": "\u2190","LeftArrowBar;": "\u21e4","LeftArrowRightArrow;": "\u21c6","LeftCeiling;": "\u2308","LeftDoubleBracket;": "\u27e6","LeftDownTeeVector;": "\u2961","LeftDownVector;": "\u21c3","LeftDownVectorBar;": "\u2959","LeftFloor;": "\u230a","LeftRightArrow;": "\u2194","LeftRightVector;": "\u294e","LeftTee;": "\u22a3","LeftTeeArrow;": "\u21a4","LeftTeeVector;": "\u295a","LeftTriangle;": "\u22b2","LeftTriangleBar;": "\u29cf","LeftTriangleEqual;": "\u22b4","LeftUpDownVector;": "\u2951","LeftUpTeeVector;": "\u2960","LeftUpVector;": "\u21bf","LeftUpVectorBar;": "\u2958","LeftVector;": "\u21bc","LeftVectorBar;": "\u2952","Leftarrow;": "\u21d0","Leftrightarrow;": "\u21d4","LessEqualGreater;": "\u22da","LessFullEqual;": "\u2266","LessGreater;": "\u2276","LessLess;": "\u2aa1","LessSlantEqual;": "\u2a7d","LessTilde;": "\u2272","Lfr;": "\U0001d50f","Ll;": "\u22d8","Lleftarrow;": "\u21da","Lmidot;": "\u013f","LongLeftArrow;": "\u27f5","LongLeftRightArrow;": "\u27f7","LongRightArrow;": "\u27f6","Longleftarrow;": "\u27f8","Longleftrightarrow;": "\u27fa","Longrightarrow;": "\u27f9","Lopf;": "\U0001d543","LowerLeftArrow;": "\u2199","LowerRightArrow;": "\u2198","Lscr;": "\u2112","Lsh;": "\u21b0","Lstrok;": "\u0141","Lt;": "\u226a","Map;": "\u2905","Mcy;": "\u041c","MediumSpace;": "\u205f","Mellintrf;": "\u2133","Mfr;": "\U0001d510","MinusPlus;": "\u2213","Mopf;": "\U0001d544","Mscr;": "\u2133","Mu;": "\u039c","NJcy;": "\u040a","Nacute;": "\u0143","Ncaron;": "\u0147","Ncedil;": "\u0145","Ncy;": "\u041d","NegativeMediumSpace;": "\u200b","NegativeThickSpace;": "\u200b","NegativeThinSpace;": "\u200b","NegativeVeryThinSpace;": "\u200b","NestedGreaterGreater;": "\u226b","NestedLessLess;": "\u226a","NewLine;": "\n","Nfr;": "\U0001d511","NoBreak;": "\u2060","NonBreakingSpace;": "\xa0","Nopf;": "\u2115","Not;": "\u2aec","NotCongruent;": "\u2262","NotCupCap;": "\u226d","NotDoubleVerticalBar;": "\u2226","NotElement;": "\u2209","NotEqual;": "\u2260","NotEqualTilde;": "\u2242\u0338","NotExists;": "\u2204","NotGreater;": "\u226f","NotGreaterEqual;": "\u2271","NotGreaterFullEqual;": "\u2267\u0338","NotGreaterGreater;": "\u226b\u0338","NotGreaterLess;": "\u2279","NotGreaterSlantEqual;": "\u2a7e\u0338","NotGreaterTilde;": "\u2275","NotHumpDownHump;": "\u224e\u0338","NotHumpEqual;": "\u224f\u0338","NotLeftTriangle;": "\u22ea","NotLeftTriangleBar;": "\u29cf\u0338","NotLeftTriangleEqual;": "\u22ec","NotLess;": "\u226e","NotLessEqual;": "\u2270","NotLessGreater;": "\u2278","NotLessLess;": "\u226a\u0338","NotLessSlantEqual;": "\u2a7d\u0338","NotLessTilde;": "\u2274","NotNestedGreaterGreater;": "\u2aa2\u0338","NotNestedLessLess;": "\u2aa1\u0338","NotPrecedes;": "\u2280","NotPrecedesEqual;": "\u2aaf\u0338","NotPrecedesSlantEqual;": "\u22e0","NotReverseElement;": "\u220c","NotRightTriangle;": "\u22eb","NotRightTriangleBar;": "\u29d0\u0338","NotRightTriangleEqual;": "\u22ed","NotSquareSubset;": "\u228f\u0338","NotSquareSubsetEqual;": "\u22e2","NotSquareSuperset;": "\u2290\u0338","NotSquareSupersetEqual;": "\u22e3","NotSubset;": "\u2282\u20d2","NotSubsetEqual;": "\u2288","NotSucceeds;": "\u2281","NotSucceedsEqual;": "\u2ab0\u0338","NotSucceedsSlantEqual;": "\u22e1","NotSucceedsTilde;": "\u227f\u0338","NotSuperset;": "\u2283\u20d2","NotSupersetEqual;": "\u2289","NotTilde;": "\u2241","NotTildeEqual;": "\u2244","NotTildeFullEqual;": "\u2247","NotTildeTilde;": "\u2249","NotVerticalBar;": "\u2224","Nscr;": "\U0001d4a9","Ntilde": "\xd1","Ntilde;": "\xd1","Nu;": "\u039d","OElig;": "\u0152","Oacute": "\xd3","Oacute;": "\xd3","Ocirc": "\xd4","Ocirc;": "\xd4","Ocy;": "\u041e","Odblac;": "\u0150","Ofr;": "\U0001d512","Ograve": "\xd2","Ograve;": "\xd2","Omacr;": "\u014c","Omega;": "\u03a9","Omicron;": "\u039f","Oopf;": "\U0001d546","OpenCurlyDoubleQuote;": "\u201c","OpenCurlyQuote;": "\u2018","Or;": "\u2a54","Oscr;": "\U0001d4aa","Oslash": "\xd8","Oslash;": "\xd8","Otilde": "\xd5","Otilde;": "\xd5","Otimes;": "\u2a37","Ouml": "\xd6","Ouml;": "\xd6","OverBar;": "\u203e","OverBrace;": "\u23de","OverBracket;": "\u23b4","OverParenthesis;": "\u23dc","PartialD;": "\u2202","Pcy;": "\u041f","Pfr;": "\U0001d513","Phi;": "\u03a6","Pi;": "\u03a0","PlusMinus;": "\xb1","Poincareplane;": "\u210c","Popf;": "\u2119","Pr;": "\u2abb","Precedes;": "\u227a","PrecedesEqual;": "\u2aaf","PrecedesSlantEqual;": "\u227c","PrecedesTilde;": "\u227e","Prime;": "\u2033","Product;": "\u220f","Proportion;": "\u2237","Proportional;": "\u221d","Pscr;": "\U0001d4ab","Psi;": "\u03a8","QUOT": "\"","QUOT;": "\"","Qfr;": "\U0001d514","Qopf;": "\u211a","Qscr;": "\U0001d4ac","RBarr;": "\u2910","REG": "\xae","REG;": "\xae","Racute;": "\u0154","Rang;": "\u27eb","Rarr;": "\u21a0","Rarrtl;": "\u2916","Rcaron;": "\u0158","Rcedil;": "\u0156","Rcy;": "\u0420","Re;": "\u211c","ReverseElement;": "\u220b","ReverseEquilibrium;": "\u21cb","ReverseUpEquilibrium;": "\u296f","Rfr;": "\u211c","Rho;": "\u03a1","RightAngleBracket;": "\u27e9","RightArrow;": "\u2192","RightArrowBar;": "\u21e5","RightArrowLeftArrow;": "\u21c4","RightCeiling;": "\u2309","RightDoubleBracket;": "\u27e7","RightDownTeeVector;": "\u295d","RightDownVector;": "\u21c2","RightDownVectorBar;": "\u2955","RightFloor;": "\u230b","RightTee;": "\u22a2","RightTeeArrow;": "\u21a6","RightTeeVector;": "\u295b","RightTriangle;": "\u22b3","RightTriangleBar;": "\u29d0","RightTriangleEqual;": "\u22b5","RightUpDownVector;": "\u294f","RightUpTeeVector;": "\u295c","RightUpVector;": "\u21be","RightUpVectorBar;": "\u2954","RightVector;": "\u21c0","RightVectorBar;": "\u2953","Rightarrow;": "\u21d2","Ropf;": "\u211d","RoundImplies;": "\u2970","Rrightarrow;": "\u21db","Rscr;": "\u211b","Rsh;": "\u21b1","RuleDelayed;": "\u29f4","SHCHcy;": "\u0429","SHcy;": "\u0428","SOFTcy;": "\u042c","Sacute;": "\u015a","Sc;": "\u2abc","Scaron;": "\u0160","Scedil;": "\u015e","Scirc;": "\u015c","Scy;": "\u0421","Sfr;": "\U0001d516","ShortDownArrow;": "\u2193","ShortLeftArrow;": "\u2190","ShortRightArrow;": "\u2192","ShortUpArrow;": "\u2191","Sigma;": "\u03a3","SmallCircle;": "\u2218","Sopf;": "\U0001d54a","Sqrt;": "\u221a","Square;": "\u25a1","SquareIntersection;": "\u2293","SquareSubset;": "\u228f","SquareSubsetEqual;": "\u2291","SquareSuperset;": "\u2290","SquareSupersetEqual;": "\u2292","SquareUnion;": "\u2294","Sscr;": "\U0001d4ae","Star;": "\u22c6","Sub;": "\u22d0","Subset;": "\u22d0","SubsetEqual;": "\u2286","Succeeds;": "\u227b","SucceedsEqual;": "\u2ab0","SucceedsSlantEqual;": "\u227d","SucceedsTilde;": "\u227f","SuchThat;": "\u220b","Sum;": "\u2211","Sup;": "\u22d1","Superset;": "\u2283","SupersetEqual;": "\u2287","Supset;": "\u22d1","THORN": "\xde","THORN;": "\xde","TRADE;": "\u2122","TSHcy;": "\u040b","TScy;": "\u0426","Tab;": "\t","Tau;": "\u03a4","Tcaron;": "\u0164","Tcedil;": "\u0162","Tcy;": "\u0422","Tfr;": "\U0001d517","Therefore;": "\u2234","Theta;": "\u0398","ThickSpace;": "\u205f\u200a","ThinSpace;": "\u2009","Tilde;": "\u223c","TildeEqual;": "\u2243","TildeFullEqual;": "\u2245","TildeTilde;": "\u2248","Topf;": "\U0001d54b","TripleDot;": "\u20db","Tscr;": "\U0001d4af","Tstrok;": "\u0166","Uacute": "\xda","Uacute;": "\xda","Uarr;": "\u219f","Uarrocir;": "\u2949","Ubrcy;": "\u040e","Ubreve;": "\u016c","Ucirc": "\xdb","Ucirc;": "\xdb","Ucy;": "\u0423","Udblac;": "\u0170","Ufr;": "\U0001d518","Ugrave": "\xd9","Ugrave;": "\xd9","Umacr;": "\u016a","UnderBar;": "_","UnderBrace;": "\u23df","UnderBracket;": "\u23b5","UnderParenthesis;": "\u23dd","Union;": "\u22c3","UnionPlus;": "\u228e","Uogon;": "\u0172","Uopf;": "\U0001d54c","UpArrow;": "\u2191","UpArrowBar;": "\u2912","UpArrowDownArrow;": "\u21c5","UpDownArrow;": "\u2195","UpEquilibrium;": "\u296e","UpTee;": "\u22a5","UpTeeArrow;": "\u21a5","Uparrow;": "\u21d1","Updownarrow;": "\u21d5","UpperLeftArrow;": "\u2196","UpperRightArrow;": "\u2197","Upsi;": "\u03d2","Upsilon;": "\u03a5","Uring;": "\u016e","Uscr;": "\U0001d4b0","Utilde;": "\u0168","Uuml": "\xdc","Uuml;": "\xdc","VDash;": "\u22ab","Vbar;": "\u2aeb","Vcy;": "\u0412","Vdash;": "\u22a9","Vdashl;": "\u2ae6","Vee;": "\u22c1","Verbar;": "\u2016","Vert;": "\u2016","VerticalBar;": "\u2223","VerticalLine;": "|","VerticalSeparator;": "\u2758","VerticalTilde;": "\u2240","VeryThinSpace;": "\u200a","Vfr;": "\U0001d519","Vopf;": "\U0001d54d","Vscr;": "\U0001d4b1","Vvdash;": "\u22aa","Wcirc;": "\u0174","Wedge;": "\u22c0","Wfr;": "\U0001d51a","Wopf;": "\U0001d54e","Wscr;": "\U0001d4b2","Xfr;": "\U0001d51b","Xi;": "\u039e","Xopf;": "\U0001d54f","Xscr;": "\U0001d4b3","YAcy;": "\u042f","YIcy;": "\u0407","YUcy;": "\u042e","Yacute": "\xdd","Yacute;": "\xdd","Ycirc;": "\u0176","Ycy;": "\u042b","Yfr;": "\U0001d51c","Yopf;": "\U0001d550","Yscr;": "\U0001d4b4","Yuml;": "\u0178","ZHcy;": "\u0416","Zacute;": "\u0179","Zcaron;": "\u017d","Zcy;": "\u0417","Zdot;": "\u017b","ZeroWidthSpace;": "\u200b","Zeta;": "\u0396","Zfr;": "\u2128","Zopf;": "\u2124","Zscr;": "\U0001d4b5","aacute": "\xe1","aacute;": "\xe1","abreve;": "\u0103","ac;": "\u223e","acE;": "\u223e\u0333","acd;": "\u223f","acirc": "\xe2","acirc;": "\xe2","acute": "\xb4","acute;": "\xb4","acy;": "\u0430","aelig": "\xe6","aelig;": "\xe6","af;": "\u2061","afr;": "\U0001d51e","agrave": "\xe0","agrave;": "\xe0","alefsym;": "\u2135","aleph;": "\u2135","alpha;": "\u03b1","amacr;": "\u0101","amalg;": "\u2a3f","amp": "&","amp;": "&","and;": "\u2227","andand;": "\u2a55","andd;": "\u2a5c","andslope;": "\u2a58","andv;": "\u2a5a","ang;": "\u2220","ange;": "\u29a4","angle;": "\u2220","angmsd;": "\u2221","angmsdaa;": "\u29a8","angmsdab;": "\u29a9","angmsdac;": "\u29aa","angmsdad;": "\u29ab","angmsdae;": "\u29ac","angmsdaf;": "\u29ad","angmsdag;": "\u29ae","angmsdah;": "\u29af","angrt;": "\u221f","angrtvb;": "\u22be","angrtvbd;": "\u299d","angsph;": "\u2222","angst;": "\xc5","angzarr;": "\u237c","aogon;": "\u0105","aopf;": "\U0001d552","ap;": "\u2248","apE;": "\u2a70","apacir;": "\u2a6f","ape;": "\u224a","apid;": "\u224b","apos;": "'","approx;": "\u2248","approxeq;": "\u224a","aring": "\xe5","aring;": "\xe5","ascr;": "\U0001d4b6","ast;": "*","asymp;": "\u2248","asympeq;": "\u224d","atilde": "\xe3","atilde;": "\xe3","auml": "\xe4","auml;": "\xe4","awconint;": "\u2233","awint;": "\u2a11","bNot;": "\u2aed","backcong;": "\u224c","backepsilon;": "\u03f6","backprime;": "\u2035","backsim;": "\u223d","backsimeq;": "\u22cd","barvee;": "\u22bd","barwed;": "\u2305","barwedge;": "\u2305","bbrk;": "\u23b5","bbrktbrk;": "\u23b6","bcong;": "\u224c","bcy;": "\u0431","bdquo;": "\u201e","becaus;": "\u2235","because;": "\u2235","bemptyv;": "\u29b0","bepsi;": "\u03f6","bernou;": "\u212c","beta;": "\u03b2","beth;": "\u2136","between;": "\u226c","bfr;": "\U0001d51f","bigcap;": "\u22c2","bigcirc;": "\u25ef","bigcup;": "\u22c3","bigodot;": "\u2a00","bigoplus;": "\u2a01","bigotimes;": "\u2a02","bigsqcup;": "\u2a06","bigstar;": "\u2605","bigtriangledown;": "\u25bd","bigtriangleup;": "\u25b3","biguplus;": "\u2a04","bigvee;": "\u22c1","bigwedge;": "\u22c0","bkarow;": "\u290d","blacklozenge;": "\u29eb","blacksquare;": "\u25aa","blacktriangle;": "\u25b4","blacktriangledown;": "\u25be","blacktriangleleft;": "\u25c2","blacktriangleright;": "\u25b8","blank;": "\u2423","blk12;": "\u2592","blk14;": "\u2591","blk34;": "\u2593","block;": "\u2588","bne;": "=\u20e5","bnequiv;": "\u2261\u20e5","bnot;": "\u2310","bopf;": "\U0001d553","bot;": "\u22a5","bottom;": "\u22a5","bowtie;": "\u22c8","boxDL;": "\u2557","boxDR;": "\u2554","boxDl;": "\u2556","boxDr;": "\u2553","boxH;": "\u2550","boxHD;": "\u2566","boxHU;": "\u2569","boxHd;": "\u2564","boxHu;": "\u2567","boxUL;": "\u255d","boxUR;": "\u255a","boxUl;": "\u255c","boxUr;": "\u2559","boxV;": "\u2551","boxVH;": "\u256c","boxVL;": "\u2563","boxVR;": "\u2560","boxVh;": "\u256b","boxVl;": "\u2562","boxVr;": "\u255f","boxbox;": "\u29c9","boxdL;": "\u2555","boxdR;": "\u2552","boxdl;": "\u2510","boxdr;": "\u250c","boxh;": "\u2500","boxhD;": "\u2565","boxhU;": "\u2568","boxhd;": "\u252c","boxhu;": "\u2534","boxminus;": "\u229f","boxplus;": "\u229e","boxtimes;": "\u22a0","boxuL;": "\u255b","boxuR;": "\u2558","boxul;": "\u2518","boxur;": "\u2514","boxv;": "\u2502","boxvH;": "\u256a","boxvL;": "\u2561","boxvR;": "\u255e","boxvh;": "\u253c","boxvl;": "\u2524","boxvr;": "\u251c","bprime;": "\u2035","breve;": "\u02d8","brvbar": "\xa6","brvbar;": "\xa6","bscr;": "\U0001d4b7","bsemi;": "\u204f","bsim;": "\u223d","bsime;": "\u22cd","bsol;": "\\","bsolb;": "\u29c5","bsolhsub;": "\u27c8","bull;": "\u2022","bullet;": "\u2022","bump;": "\u224e","bumpE;": "\u2aae","bumpe;": "\u224f","bumpeq;": "\u224f","cacute;": "\u0107","cap;": "\u2229","capand;": "\u2a44","capbrcup;": "\u2a49","capcap;": "\u2a4b","capcup;": "\u2a47","capdot;": "\u2a40","caps;": "\u2229\ufe00","caret;": "\u2041","caron;": "\u02c7","ccaps;": "\u2a4d","ccaron;": "\u010d","ccedil": "\xe7","ccedil;": "\xe7","ccirc;": "\u0109","ccups;": "\u2a4c","ccupssm;": "\u2a50","cdot;": "\u010b","cedil": "\xb8","cedil;": "\xb8","cemptyv;": "\u29b2","cent": "\xa2","cent;": "\xa2","centerdot;": "\xb7","cfr;": "\U0001d520","chcy;": "\u0447","check;": "\u2713","checkmark;": "\u2713","chi;": "\u03c7","cir;": "\u25cb","cirE;": "\u29c3","circ;": "\u02c6","circeq;": "\u2257","circlearrowleft;": "\u21ba","circlearrowright;": "\u21bb","circledR;": "\xae","circledS;": "\u24c8","circledast;": "\u229b","circledcirc;": "\u229a","circleddash;": "\u229d","cire;": "\u2257","cirfnint;": "\u2a10","cirmid;": "\u2aef","cirscir;": "\u29c2","clubs;": "\u2663","clubsuit;": "\u2663","colon;": ":","colone;": "\u2254","coloneq;": "\u2254","comma;": ",","commat;": "@","comp;": "\u2201","compfn;": "\u2218","complement;": "\u2201","complexes;": "\u2102","cong;": "\u2245","congdot;": "\u2a6d","conint;": "\u222e","copf;": "\U0001d554","coprod;": "\u2210","copy": "\xa9","copy;": "\xa9","copysr;": "\u2117","crarr;": "\u21b5","cross;": "\u2717","cscr;": "\U0001d4b8","csub;": "\u2acf","csube;": "\u2ad1","csup;": "\u2ad0","csupe;": "\u2ad2","ctdot;": "\u22ef","cudarrl;": "\u2938","cudarrr;": "\u2935","cuepr;": "\u22de","cuesc;": "\u22df","cularr;": "\u21b6","cularrp;": "\u293d","cup;": "\u222a","cupbrcap;": "\u2a48","cupcap;": "\u2a46","cupcup;": "\u2a4a","cupdot;": "\u228d","cupor;": "\u2a45","cups;": "\u222a\ufe00","curarr;": "\u21b7","curarrm;": "\u293c","curlyeqprec;": "\u22de","curlyeqsucc;": "\u22df","curlyvee;": "\u22ce","curlywedge;": "\u22cf","curren": "\xa4","curren;": "\xa4","curvearrowleft;": "\u21b6","curvearrowright;": "\u21b7","cuvee;": "\u22ce","cuwed;": "\u22cf","cwconint;": "\u2232","cwint;": "\u2231","cylcty;": "\u232d","dArr;": "\u21d3","dHar;": "\u2965","dagger;": "\u2020","daleth;": "\u2138","darr;": "\u2193","dash;": "\u2010","dashv;": "\u22a3","dbkarow;": "\u290f","dblac;": "\u02dd","dcaron;": "\u010f","dcy;": "\u0434","dd;": "\u2146","ddagger;": "\u2021","ddarr;": "\u21ca","ddotseq;": "\u2a77","deg": "\xb0","deg;": "\xb0","delta;": "\u03b4","demptyv;": "\u29b1","dfisht;": "\u297f","dfr;": "\U0001d521","dharl;": "\u21c3","dharr;": "\u21c2","diam;": "\u22c4","diamond;": "\u22c4","diamondsuit;": "\u2666","diams;": "\u2666","die;": "\xa8","digamma;": "\u03dd","disin;": "\u22f2","div;": "\xf7","divide": "\xf7","divide;": "\xf7","divideontimes;": "\u22c7","divonx;": "\u22c7","djcy;": "\u0452","dlcorn;": "\u231e","dlcrop;": "\u230d","dollar;": "$","dopf;": "\U0001d555","dot;": "\u02d9","doteq;": "\u2250","doteqdot;": "\u2251","dotminus;": "\u2238","dotplus;": "\u2214","dotsquare;": "\u22a1","doublebarwedge;": "\u2306","downarrow;": "\u2193","downdownarrows;": "\u21ca","downharpoonleft;": "\u21c3","downharpoonright;": "\u21c2","drbkarow;": "\u2910","drcorn;": "\u231f","drcrop;": "\u230c","dscr;": "\U0001d4b9","dscy;": "\u0455","dsol;": "\u29f6","dstrok;": "\u0111","dtdot;": "\u22f1","dtri;": "\u25bf","dtrif;": "\u25be","duarr;": "\u21f5","duhar;": "\u296f","dwangle;": "\u29a6","dzcy;": "\u045f","dzigrarr;": "\u27ff","eDDot;": "\u2a77","eDot;": "\u2251","eacute": "\xe9","eacute;": "\xe9","easter;": "\u2a6e","ecaron;": "\u011b","ecir;": "\u2256","ecirc": "\xea","ecirc;": "\xea","ecolon;": "\u2255","ecy;": "\u044d","edot;": "\u0117","ee;": "\u2147","efDot;": "\u2252","efr;": "\U0001d522","eg;": "\u2a9a","egrave": "\xe8","egrave;": "\xe8","egs;": "\u2a96","egsdot;": "\u2a98","el;": "\u2a99","elinters;": "\u23e7","ell;": "\u2113","els;": "\u2a95","elsdot;": "\u2a97","emacr;": "\u0113","empty;": "\u2205","emptyset;": "\u2205","emptyv;": "\u2205","emsp13;": "\u2004","emsp14;": "\u2005","emsp;": "\u2003","eng;": "\u014b","ensp;": "\u2002","eogon;": "\u0119","eopf;": "\U0001d556","epar;": "\u22d5","eparsl;": "\u29e3","eplus;": "\u2a71","epsi;": "\u03b5","epsilon;": "\u03b5","epsiv;": "\u03f5","eqcirc;": "\u2256","eqcolon;": "\u2255","eqsim;": "\u2242","eqslantgtr;": "\u2a96","eqslantless;": "\u2a95","equals;": "=","equest;": "\u225f","equiv;": "\u2261","equivDD;": "\u2a78","eqvparsl;": "\u29e5","erDot;": "\u2253","erarr;": "\u2971","escr;": "\u212f","esdot;": "\u2250","esim;": "\u2242","eta;": "\u03b7","eth": "\xf0","eth;": "\xf0","euml": "\xeb","euml;": "\xeb","euro;": "\u20ac","excl;": "!","exist;": "\u2203","expectation;": "\u2130","exponentiale;": "\u2147","fallingdotseq;": "\u2252","fcy;": "\u0444","female;": "\u2640","ffilig;": "\ufb03","fflig;": "\ufb00","ffllig;": "\ufb04","ffr;": "\U0001d523","filig;": "\ufb01","fjlig;": "fj","flat;": "\u266d","fllig;": "\ufb02","fltns;": "\u25b1","fnof;": "\u0192","fopf;": "\U0001d557","forall;": "\u2200","fork;": "\u22d4","forkv;": "\u2ad9","fpartint;": "\u2a0d","frac12": "\xbd","frac12;": "\xbd","frac13;": "\u2153","frac14": "\xbc","frac14;": "\xbc","frac15;": "\u2155","frac16;": "\u2159","frac18;": "\u215b","frac23;": "\u2154","frac25;": "\u2156","frac34": "\xbe","frac34;": "\xbe","frac35;": "\u2157","frac38;": "\u215c","frac45;": "\u2158","frac56;": "\u215a","frac58;": "\u215d","frac78;": "\u215e","frasl;": "\u2044","frown;": "\u2322","fscr;": "\U0001d4bb","gE;": "\u2267","gEl;": "\u2a8c","gacute;": "\u01f5","gamma;": "\u03b3","gammad;": "\u03dd","gap;": "\u2a86","gbreve;": "\u011f","gcirc;": "\u011d","gcy;": "\u0433","gdot;": "\u0121","ge;": "\u2265","gel;": "\u22db","geq;": "\u2265","geqq;": "\u2267","geqslant;": "\u2a7e","ges;": "\u2a7e","gescc;": "\u2aa9","gesdot;": "\u2a80","gesdoto;": "\u2a82","gesdotol;": "\u2a84","gesl;": "\u22db\ufe00","gesles;": "\u2a94","gfr;": "\U0001d524","gg;": "\u226b","ggg;": "\u22d9","gimel;": "\u2137","gjcy;": "\u0453","gl;": "\u2277","glE;": "\u2a92","gla;": "\u2aa5","glj;": "\u2aa4","gnE;": "\u2269","gnap;": "\u2a8a","gnapprox;": "\u2a8a","gne;": "\u2a88","gneq;": "\u2a88","gneqq;": "\u2269","gnsim;": "\u22e7","gopf;": "\U0001d558","grave;": "`","gscr;": "\u210a","gsim;": "\u2273","gsime;": "\u2a8e","gsiml;": "\u2a90","gt": ">","gt;": ">","gtcc;": "\u2aa7","gtcir;": "\u2a7a","gtdot;": "\u22d7","gtlPar;": "\u2995","gtquest;": "\u2a7c","gtrapprox;": "\u2a86","gtrarr;": "\u2978","gtrdot;": "\u22d7","gtreqless;": "\u22db","gtreqqless;": "\u2a8c","gtrless;": "\u2277","gtrsim;": "\u2273","gvertneqq;": "\u2269\ufe00","gvnE;": "\u2269\ufe00","hArr;": "\u21d4","hairsp;": "\u200a","half;": "\xbd","hamilt;": "\u210b","hardcy;": "\u044a","harr;": "\u2194","harrcir;": "\u2948","harrw;": "\u21ad","hbar;": "\u210f","hcirc;": "\u0125","hearts;": "\u2665","heartsuit;": "\u2665","hellip;": "\u2026","hercon;": "\u22b9","hfr;": "\U0001d525","hksearow;": "\u2925","hkswarow;": "\u2926","hoarr;": "\u21ff","homtht;": "\u223b","hookleftarrow;": "\u21a9","hookrightarrow;": "\u21aa","hopf;": "\U0001d559","horbar;": "\u2015","hscr;": "\U0001d4bd","hslash;": "\u210f","hstrok;": "\u0127","hybull;": "\u2043","hyphen;": "\u2010","iacute": "\xed","iacute;": "\xed","ic;": "\u2063","icirc": "\xee","icirc;": "\xee","icy;": "\u0438","iecy;": "\u0435","iexcl": "\xa1","iexcl;": "\xa1","iff;": "\u21d4","ifr;": "\U0001d526","igrave": "\xec","igrave;": "\xec","ii;": "\u2148","iiiint;": "\u2a0c","iiint;": "\u222d","iinfin;": "\u29dc","iiota;": "\u2129","ijlig;": "\u0133","imacr;": "\u012b","image;": "\u2111","imagline;": "\u2110","imagpart;": "\u2111","imath;": "\u0131","imof;": "\u22b7","imped;": "\u01b5","in;": "\u2208","incare;": "\u2105","infin;": "\u221e","infintie;": "\u29dd","inodot;": "\u0131","int;": "\u222b","intcal;": "\u22ba","integers;": "\u2124","intercal;": "\u22ba","intlarhk;": "\u2a17","intprod;": "\u2a3c","iocy;": "\u0451","iogon;": "\u012f","iopf;": "\U0001d55a","iota;": "\u03b9","iprod;": "\u2a3c","iquest": "\xbf","iquest;": "\xbf","iscr;": "\U0001d4be","isin;": "\u2208","isinE;": "\u22f9","isindot;": "\u22f5","isins;": "\u22f4","isinsv;": "\u22f3","isinv;": "\u2208","it;": "\u2062","itilde;": "\u0129","iukcy;": "\u0456","iuml": "\xef","iuml;": "\xef","jcirc;": "\u0135","jcy;": "\u0439","jfr;": "\U0001d527","jmath;": "\u0237","jopf;": "\U0001d55b","jscr;": "\U0001d4bf","jsercy;": "\u0458","jukcy;": "\u0454","kappa;": "\u03ba","kappav;": "\u03f0","kcedil;": "\u0137","kcy;": "\u043a","kfr;": "\U0001d528","kgreen;": "\u0138","khcy;": "\u0445","kjcy;": "\u045c","kopf;": "\U0001d55c","kscr;": "\U0001d4c0","lAarr;": "\u21da","lArr;": "\u21d0","lAtail;": "\u291b","lBarr;": "\u290e","lE;": "\u2266","lEg;": "\u2a8b","lHar;": "\u2962","lacute;": "\u013a","laemptyv;": "\u29b4","lagran;": "\u2112","lambda;": "\u03bb","lang;": "\u27e8","langd;": "\u2991","langle;": "\u27e8","lap;": "\u2a85","laquo": "\xab","laquo;": "\xab","larr;": "\u2190","larrb;": "\u21e4","larrbfs;": "\u291f","larrfs;": "\u291d","larrhk;": "\u21a9","larrlp;": "\u21ab","larrpl;": "\u2939","larrsim;": "\u2973","larrtl;": "\u21a2","lat;": "\u2aab","latail;": "\u2919","late;": "\u2aad","lates;": "\u2aad\ufe00","lbarr;": "\u290c","lbbrk;": "\u2772","lbrace;": "{","lbrack;": "[","lbrke;": "\u298b","lbrksld;": "\u298f","lbrkslu;": "\u298d","lcaron;": "\u013e","lcedil;": "\u013c","lceil;": "\u2308","lcub;": "{","lcy;": "\u043b","ldca;": "\u2936","ldquo;": "\u201c","ldquor;": "\u201e","ldrdhar;": "\u2967","ldrushar;": "\u294b","ldsh;": "\u21b2","le;": "\u2264","leftarrow;": "\u2190","leftarrowtail;": "\u21a2","leftharpoondown;": "\u21bd","leftharpoonup;": "\u21bc","leftleftarrows;": "\u21c7","leftrightarrow;": "\u2194","leftrightarrows;": "\u21c6","leftrightharpoons;": "\u21cb","leftrightsquigarrow;": "\u21ad","leftthreetimes;": "\u22cb","leg;": "\u22da","leq;": "\u2264","leqq;": "\u2266","leqslant;": "\u2a7d","les;": "\u2a7d","lescc;": "\u2aa8","lesdot;": "\u2a7f","lesdoto;": "\u2a81","lesdotor;": "\u2a83","lesg;": "\u22da\ufe00","lesges;": "\u2a93","lessapprox;": "\u2a85","lessdot;": "\u22d6","lesseqgtr;": "\u22da","lesseqqgtr;": "\u2a8b","lessgtr;": "\u2276","lesssim;": "\u2272","lfisht;": "\u297c","lfloor;": "\u230a","lfr;": "\U0001d529","lg;": "\u2276","lgE;": "\u2a91","lhard;": "\u21bd","lharu;": "\u21bc","lharul;": "\u296a","lhblk;": "\u2584","ljcy;": "\u0459","ll;": "\u226a","llarr;": "\u21c7","llcorner;": "\u231e","llhard;": "\u296b","lltri;": "\u25fa","lmidot;": "\u0140","lmoust;": "\u23b0","lmoustache;": "\u23b0","lnE;": "\u2268","lnap;": "\u2a89","lnapprox;": "\u2a89","lne;": "\u2a87","lneq;": "\u2a87","lneqq;": "\u2268","lnsim;": "\u22e6","loang;": "\u27ec","loarr;": "\u21fd","lobrk;": "\u27e6","longleftarrow;": "\u27f5","longleftrightarrow;": "\u27f7","longmapsto;": "\u27fc","longrightarrow;": "\u27f6","looparrowleft;": "\u21ab","looparrowright;": "\u21ac","lopar;": "\u2985","lopf;": "\U0001d55d","loplus;": "\u2a2d","lotimes;": "\u2a34","lowast;": "\u2217","lowbar;": "_","loz;": "\u25ca","lozenge;": "\u25ca","lozf;": "\u29eb","lpar;": "(","lparlt;": "\u2993","lrarr;": "\u21c6","lrcorner;": "\u231f","lrhar;": "\u21cb","lrhard;": "\u296d","lrm;": "\u200e","lrtri;": "\u22bf","lsaquo;": "\u2039","lscr;": "\U0001d4c1","lsh;": "\u21b0","lsim;": "\u2272","lsime;": "\u2a8d","lsimg;": "\u2a8f","lsqb;": "[","lsquo;": "\u2018","lsquor;": "\u201a","lstrok;": "\u0142","lt": "<","lt;": "<","ltcc;": "\u2aa6","ltcir;": "\u2a79","ltdot;": "\u22d6","lthree;": "\u22cb","ltimes;": "\u22c9","ltlarr;": "\u2976","ltquest;": "\u2a7b","ltrPar;": "\u2996","ltri;": "\u25c3","ltrie;": "\u22b4","ltrif;": "\u25c2","lurdshar;": "\u294a","luruhar;": "\u2966","lvertneqq;": "\u2268\ufe00","lvnE;": "\u2268\ufe00","mDDot;": "\u223a","macr": "\xaf","macr;": "\xaf","male;": "\u2642","malt;": "\u2720","maltese;": "\u2720","map;": "\u21a6","mapsto;": "\u21a6","mapstodown;": "\u21a7","mapstoleft;": "\u21a4","mapstoup;": "\u21a5","marker;": "\u25ae","mcomma;": "\u2a29","mcy;": "\u043c","mdash;": "\u2014","measuredangle;": "\u2221","mfr;": "\U0001d52a","mho;": "\u2127","micro": "\xb5","micro;": "\xb5","mid;": "\u2223","midast;": "*","midcir;": "\u2af0","middot": "\xb7","middot;": "\xb7","minus;": "\u2212","minusb;": "\u229f","minusd;": "\u2238","minusdu;": "\u2a2a","mlcp;": "\u2adb","mldr;": "\u2026","mnplus;": "\u2213","models;": "\u22a7","mopf;": "\U0001d55e","mp;": "\u2213","mscr;": "\U0001d4c2","mstpos;": "\u223e","mu;": "\u03bc","multimap;": "\u22b8","mumap;": "\u22b8","nGg;": "\u22d9\u0338","nGt;": "\u226b\u20d2","nGtv;": "\u226b\u0338","nLeftarrow;": "\u21cd","nLeftrightarrow;": "\u21ce","nLl;": "\u22d8\u0338","nLt;": "\u226a\u20d2","nLtv;": "\u226a\u0338","nRightarrow;": "\u21cf","nVDash;": "\u22af","nVdash;": "\u22ae","nabla;": "\u2207","nacute;": "\u0144","nang;": "\u2220\u20d2","nap;": "\u2249","napE;": "\u2a70\u0338","napid;": "\u224b\u0338","napos;": "\u0149","napprox;": "\u2249","natur;": "\u266e","natural;": "\u266e","naturals;": "\u2115","nbsp": "\xa0","nbsp;": "\xa0","nbump;": "\u224e\u0338","nbumpe;": "\u224f\u0338","ncap;": "\u2a43","ncaron;": "\u0148","ncedil;": "\u0146","ncong;": "\u2247","ncongdot;": "\u2a6d\u0338","ncup;": "\u2a42","ncy;": "\u043d","ndash;": "\u2013","ne;": "\u2260","neArr;": "\u21d7","nearhk;": "\u2924","nearr;": "\u2197","nearrow;": "\u2197","nedot;": "\u2250\u0338","nequiv;": "\u2262","nesear;": "\u2928","nesim;": "\u2242\u0338","nexist;": "\u2204","nexists;": "\u2204","nfr;": "\U0001d52b","ngE;": "\u2267\u0338","nge;": "\u2271","ngeq;": "\u2271","ngeqq;": "\u2267\u0338","ngeqslant;": "\u2a7e\u0338","nges;": "\u2a7e\u0338","ngsim;": "\u2275","ngt;": "\u226f","ngtr;": "\u226f","nhArr;": "\u21ce","nharr;": "\u21ae","nhpar;": "\u2af2","ni;": "\u220b","nis;": "\u22fc","nisd;": "\u22fa","niv;": "\u220b","njcy;": "\u045a","nlArr;": "\u21cd","nlE;": "\u2266\u0338","nlarr;": "\u219a","nldr;": "\u2025","nle;": "\u2270","nleftarrow;": "\u219a","nleftrightarrow;": "\u21ae","nleq;": "\u2270","nleqq;": "\u2266\u0338","nleqslant;": "\u2a7d\u0338","nles;": "\u2a7d\u0338","nless;": "\u226e","nlsim;": "\u2274","nlt;": "\u226e","nltri;": "\u22ea","nltrie;": "\u22ec","nmid;": "\u2224","nopf;": "\U0001d55f","not": "\xac","not;": "\xac","notin;": "\u2209","notinE;": "\u22f9\u0338","notindot;": "\u22f5\u0338","notinva;": "\u2209","notinvb;": "\u22f7","notinvc;": "\u22f6","notni;": "\u220c","notniva;": "\u220c","notnivb;": "\u22fe","notnivc;": "\u22fd","npar;": "\u2226","nparallel;": "\u2226","nparsl;": "\u2afd\u20e5","npart;": "\u2202\u0338","npolint;": "\u2a14","npr;": "\u2280","nprcue;": "\u22e0","npre;": "\u2aaf\u0338","nprec;": "\u2280","npreceq;": "\u2aaf\u0338","nrArr;": "\u21cf","nrarr;": "\u219b","nrarrc;": "\u2933\u0338","nrarrw;": "\u219d\u0338","nrightarrow;": "\u219b","nrtri;": "\u22eb","nrtrie;": "\u22ed","nsc;": "\u2281","nsccue;": "\u22e1","nsce;": "\u2ab0\u0338","nscr;": "\U0001d4c3","nshortmid;": "\u2224","nshortparallel;": "\u2226","nsim;": "\u2241","nsime;": "\u2244","nsimeq;": "\u2244","nsmid;": "\u2224","nspar;": "\u2226","nsqsube;": "\u22e2","nsqsupe;": "\u22e3","nsub;": "\u2284","nsubE;": "\u2ac5\u0338","nsube;": "\u2288","nsubset;": "\u2282\u20d2","nsubseteq;": "\u2288","nsubseteqq;": "\u2ac5\u0338","nsucc;": "\u2281","nsucceq;": "\u2ab0\u0338","nsup;": "\u2285","nsupE;": "\u2ac6\u0338","nsupe;": "\u2289","nsupset;": "\u2283\u20d2","nsupseteq;": "\u2289","nsupseteqq;": "\u2ac6\u0338","ntgl;": "\u2279","ntilde": "\xf1","ntilde;": "\xf1","ntlg;": "\u2278","ntriangleleft;": "\u22ea","ntrianglelefteq;": "\u22ec","ntriangleright;": "\u22eb","ntrianglerighteq;": "\u22ed","nu;": "\u03bd","num;": "#","numero;": "\u2116","numsp;": "\u2007","nvDash;": "\u22ad","nvHarr;": "\u2904","nvap;": "\u224d\u20d2","nvdash;": "\u22ac","nvge;": "\u2265\u20d2","nvgt;": ">\u20d2","nvinfin;": "\u29de","nvlArr;": "\u2902","nvle;": "\u2264\u20d2","nvlt;": "<\u20d2","nvltrie;": "\u22b4\u20d2","nvrArr;": "\u2903","nvrtrie;": "\u22b5\u20d2","nvsim;": "\u223c\u20d2","nwArr;": "\u21d6","nwarhk;": "\u2923","nwarr;": "\u2196","nwarrow;": "\u2196","nwnear;": "\u2927","oS;": "\u24c8","oacute": "\xf3","oacute;": "\xf3","oast;": "\u229b","ocir;": "\u229a","ocirc": "\xf4","ocirc;": "\xf4","ocy;": "\u043e","odash;": "\u229d","odblac;": "\u0151","odiv;": "\u2a38","odot;": "\u2299","odsold;": "\u29bc","oelig;": "\u0153","ofcir;": "\u29bf","ofr;": "\U0001d52c","ogon;": "\u02db","ograve": "\xf2","ograve;": "\xf2","ogt;": "\u29c1","ohbar;": "\u29b5","ohm;": "\u03a9","oint;": "\u222e","olarr;": "\u21ba","olcir;": "\u29be","olcross;": "\u29bb","oline;": "\u203e","olt;": "\u29c0","omacr;": "\u014d","omega;": "\u03c9","omicron;": "\u03bf","omid;": "\u29b6","ominus;": "\u2296","oopf;": "\U0001d560","opar;": "\u29b7","operp;": "\u29b9","oplus;": "\u2295","or;": "\u2228","orarr;": "\u21bb","ord;": "\u2a5d","order;": "\u2134","orderof;": "\u2134","ordf": "\xaa","ordf;": "\xaa","ordm": "\xba","ordm;": "\xba","origof;": "\u22b6","oror;": "\u2a56","orslope;": "\u2a57","orv;": "\u2a5b","oscr;": "\u2134","oslash": "\xf8","oslash;": "\xf8","osol;": "\u2298","otilde": "\xf5","otilde;": "\xf5","otimes;": "\u2297","otimesas;": "\u2a36","ouml": "\xf6","ouml;": "\xf6","ovbar;": "\u233d","par;": "\u2225","para": "\xb6","para;": "\xb6","parallel;": "\u2225","parsim;": "\u2af3","parsl;": "\u2afd","part;": "\u2202","pcy;": "\u043f","percnt;": "%","period;": ".","permil;": "\u2030","perp;": "\u22a5","pertenk;": "\u2031","pfr;": "\U0001d52d","phi;": "\u03c6","phiv;": "\u03d5","phmmat;": "\u2133","phone;": "\u260e","pi;": "\u03c0","pitchfork;": "\u22d4","piv;": "\u03d6","planck;": "\u210f","planckh;": "\u210e","plankv;": "\u210f","plus;": "+","plusacir;": "\u2a23","plusb;": "\u229e","pluscir;": "\u2a22","plusdo;": "\u2214","plusdu;": "\u2a25","pluse;": "\u2a72","plusmn": "\xb1","plusmn;": "\xb1","plussim;": "\u2a26","plustwo;": "\u2a27","pm;": "\xb1","pointint;": "\u2a15","popf;": "\U0001d561","pound": "\xa3","pound;": "\xa3","pr;": "\u227a","prE;": "\u2ab3","prap;": "\u2ab7","prcue;": "\u227c","pre;": "\u2aaf","prec;": "\u227a","precapprox;": "\u2ab7","preccurlyeq;": "\u227c","preceq;": "\u2aaf","precnapprox;": "\u2ab9","precneqq;": "\u2ab5","precnsim;": "\u22e8","precsim;": "\u227e","prime;": "\u2032","primes;": "\u2119","prnE;": "\u2ab5","prnap;": "\u2ab9","prnsim;": "\u22e8","prod;": "\u220f","profalar;": "\u232e","profline;": "\u2312","profsurf;": "\u2313","prop;": "\u221d","propto;": "\u221d","prsim;": "\u227e","prurel;": "\u22b0","pscr;": "\U0001d4c5","psi;": "\u03c8","puncsp;": "\u2008","qfr;": "\U0001d52e","qint;": "\u2a0c","qopf;": "\U0001d562","qprime;": "\u2057","qscr;": "\U0001d4c6","quaternions;": "\u210d","quatint;": "\u2a16","quest;": "?","questeq;": "\u225f","quot": "\"","quot;": "\"","rAarr;": "\u21db","rArr;": "\u21d2","rAtail;": "\u291c","rBarr;": "\u290f","rHar;": "\u2964","race;": "\u223d\u0331","racute;": "\u0155","radic;": "\u221a","raemptyv;": "\u29b3","rang;": "\u27e9","rangd;": "\u2992","range;": "\u29a5","rangle;": "\u27e9","raquo": "\xbb","raquo;": "\xbb","rarr;": "\u2192","rarrap;": "\u2975","rarrb;": "\u21e5","rarrbfs;": "\u2920","rarrc;": "\u2933","rarrfs;": "\u291e","rarrhk;": "\u21aa","rarrlp;": "\u21ac","rarrpl;": "\u2945","rarrsim;": "\u2974","rarrtl;": "\u21a3","rarrw;": "\u219d","ratail;": "\u291a","ratio;": "\u2236","rationals;": "\u211a","rbarr;": "\u290d","rbbrk;": "\u2773","rbrace;": "}","rbrack;": "]","rbrke;": "\u298c","rbrksld;": "\u298e","rbrkslu;": "\u2990","rcaron;": "\u0159","rcedil;": "\u0157","rceil;": "\u2309","rcub;": "}","rcy;": "\u0440","rdca;": "\u2937","rdldhar;": "\u2969","rdquo;": "\u201d","rdquor;": "\u201d","rdsh;": "\u21b3","real;": "\u211c","realine;": "\u211b","realpart;": "\u211c","reals;": "\u211d","rect;": "\u25ad","reg": "\xae","reg;": "\xae","rfisht;": "\u297d","rfloor;": "\u230b","rfr;": "\U0001d52f","rhard;": "\u21c1","rharu;": "\u21c0","rharul;": "\u296c","rho;": "\u03c1","rhov;": "\u03f1","rightarrow;": "\u2192","rightarrowtail;": "\u21a3","rightharpoondown;": "\u21c1","rightharpoonup;": "\u21c0","rightleftarrows;": "\u21c4","rightleftharpoons;": "\u21cc","rightrightarrows;": "\u21c9","rightsquigarrow;": "\u219d","rightthreetimes;": "\u22cc","ring;": "\u02da","risingdotseq;": "\u2253","rlarr;": "\u21c4","rlhar;": "\u21cc","rlm;": "\u200f","rmoust;": "\u23b1","rmoustache;": "\u23b1","rnmid;": "\u2aee","roang;": "\u27ed","roarr;": "\u21fe","robrk;": "\u27e7","ropar;": "\u2986","ropf;": "\U0001d563","roplus;": "\u2a2e","rotimes;": "\u2a35","rpar;": ")","rpargt;": "\u2994","rppolint;": "\u2a12","rrarr;": "\u21c9","rsaquo;": "\u203a","rscr;": "\U0001d4c7","rsh;": "\u21b1","rsqb;": "]","rsquo;": "\u2019","rsquor;": "\u2019","rthree;": "\u22cc","rtimes;": "\u22ca","rtri;": "\u25b9","rtrie;": "\u22b5","rtrif;": "\u25b8","rtriltri;": "\u29ce","ruluhar;": "\u2968","rx;": "\u211e","sacute;": "\u015b","sbquo;": "\u201a","sc;": "\u227b","scE;": "\u2ab4","scap;": "\u2ab8","scaron;": "\u0161","sccue;": "\u227d","sce;": "\u2ab0","scedil;": "\u015f","scirc;": "\u015d","scnE;": "\u2ab6","scnap;": "\u2aba","scnsim;": "\u22e9","scpolint;": "\u2a13","scsim;": "\u227f","scy;": "\u0441","sdot;": "\u22c5","sdotb;": "\u22a1","sdote;": "\u2a66","seArr;": "\u21d8","searhk;": "\u2925","searr;": "\u2198","searrow;": "\u2198","sect": "\xa7","sect;": "\xa7","semi;": ";","seswar;": "\u2929","setminus;": "\u2216","setmn;": "\u2216","sext;": "\u2736","sfr;": "\U0001d530","sfrown;": "\u2322","sharp;": "\u266f","shchcy;": "\u0449","shcy;": "\u0448","shortmid;": "\u2223","shortparallel;": "\u2225","shy": "\xad","shy;": "\xad","sigma;": "\u03c3","sigmaf;": "\u03c2","sigmav;": "\u03c2","sim;": "\u223c","simdot;": "\u2a6a","sime;": "\u2243","simeq;": "\u2243","simg;": "\u2a9e","simgE;": "\u2aa0","siml;": "\u2a9d","simlE;": "\u2a9f","simne;": "\u2246","simplus;": "\u2a24","simrarr;": "\u2972","slarr;": "\u2190","smallsetminus;": "\u2216","smashp;": "\u2a33","smeparsl;": "\u29e4","smid;": "\u2223","smile;": "\u2323","smt;": "\u2aaa","smte;": "\u2aac","smtes;": "\u2aac\ufe00","softcy;": "\u044c","sol;": "/","solb;": "\u29c4","solbar;": "\u233f","sopf;": "\U0001d564","spades;": "\u2660","spadesuit;": "\u2660","spar;": "\u2225","sqcap;": "\u2293","sqcaps;": "\u2293\ufe00","sqcup;": "\u2294","sqcups;": "\u2294\ufe00","sqsub;": "\u228f","sqsube;": "\u2291","sqsubset;": "\u228f","sqsubseteq;": "\u2291","sqsup;": "\u2290","sqsupe;": "\u2292","sqsupset;": "\u2290","sqsupseteq;": "\u2292","squ;": "\u25a1","square;": "\u25a1","squarf;": "\u25aa","squf;": "\u25aa","srarr;": "\u2192","sscr;": "\U0001d4c8","ssetmn;": "\u2216","ssmile;": "\u2323","sstarf;": "\u22c6","star;": "\u2606","starf;": "\u2605","straightepsilon;": "\u03f5","straightphi;": "\u03d5","strns;": "\xaf","sub;": "\u2282","subE;": "\u2ac5","subdot;": "\u2abd","sube;": "\u2286","subedot;": "\u2ac3","submult;": "\u2ac1","subnE;": "\u2acb","subne;": "\u228a","subplus;": "\u2abf","subrarr;": "\u2979","subset;": "\u2282","subseteq;": "\u2286","subseteqq;": "\u2ac5","subsetneq;": "\u228a","subsetneqq;": "\u2acb","subsim;": "\u2ac7","subsub;": "\u2ad5","subsup;": "\u2ad3","succ;": "\u227b","succapprox;": "\u2ab8","succcurlyeq;": "\u227d","succeq;": "\u2ab0","succnapprox;": "\u2aba","succneqq;": "\u2ab6","succnsim;": "\u22e9","succsim;": "\u227f","sum;": "\u2211","sung;": "\u266a","sup1": "\xb9","sup1;": "\xb9","sup2": "\xb2","sup2;": "\xb2","sup3": "\xb3","sup3;": "\xb3","sup;": "\u2283","supE;": "\u2ac6","supdot;": "\u2abe","supdsub;": "\u2ad8","supe;": "\u2287","supedot;": "\u2ac4","suphsol;": "\u27c9","suphsub;": "\u2ad7","suplarr;": "\u297b","supmult;": "\u2ac2","supnE;": "\u2acc","supne;": "\u228b","supplus;": "\u2ac0","supset;": "\u2283","supseteq;": "\u2287","supseteqq;": "\u2ac6","supsetneq;": "\u228b","supsetneqq;": "\u2acc","supsim;": "\u2ac8","supsub;": "\u2ad4","supsup;": "\u2ad6","swArr;": "\u21d9","swarhk;": "\u2926","swarr;": "\u2199","swarrow;": "\u2199","swnwar;": "\u292a","szlig": "\xdf","szlig;": "\xdf","target;": "\u2316","tau;": "\u03c4","tbrk;": "\u23b4","tcaron;": "\u0165","tcedil;": "\u0163","tcy;": "\u0442","tdot;": "\u20db","telrec;": "\u2315","tfr;": "\U0001d531","there4;": "\u2234","therefore;": "\u2234","theta;": "\u03b8","thetasym;": "\u03d1","thetav;": "\u03d1","thickapprox;": "\u2248","thicksim;": "\u223c","thinsp;": "\u2009","thkap;": "\u2248","thksim;": "\u223c","thorn": "\xfe","thorn;": "\xfe","tilde;": "\u02dc","times": "\xd7","times;": "\xd7","timesb;": "\u22a0","timesbar;": "\u2a31","timesd;": "\u2a30","tint;": "\u222d","toea;": "\u2928","top;": "\u22a4","topbot;": "\u2336","topcir;": "\u2af1","topf;": "\U0001d565","topfork;": "\u2ada","tosa;": "\u2929","tprime;": "\u2034","trade;": "\u2122","triangle;": "\u25b5","triangledown;": "\u25bf","triangleleft;": "\u25c3","trianglelefteq;": "\u22b4","triangleq;": "\u225c","triangleright;": "\u25b9","trianglerighteq;": "\u22b5","tridot;": "\u25ec","trie;": "\u225c","triminus;": "\u2a3a","triplus;": "\u2a39","trisb;": "\u29cd","tritime;": "\u2a3b","trpezium;": "\u23e2","tscr;": "\U0001d4c9","tscy;": "\u0446","tshcy;": "\u045b","tstrok;": "\u0167","twixt;": "\u226c","twoheadleftarrow;": "\u219e","twoheadrightarrow;": "\u21a0","uArr;": "\u21d1","uHar;": "\u2963","uacute": "\xfa","uacute;": "\xfa","uarr;": "\u2191","ubrcy;": "\u045e","ubreve;": "\u016d","ucirc": "\xfb","ucirc;": "\xfb","ucy;": "\u0443","udarr;": "\u21c5","udblac;": "\u0171","udhar;": "\u296e","ufisht;": "\u297e","ufr;": "\U0001d532","ugrave": "\xf9","ugrave;": "\xf9","uharl;": "\u21bf","uharr;": "\u21be","uhblk;": "\u2580","ulcorn;": "\u231c","ulcorner;": "\u231c","ulcrop;": "\u230f","ultri;": "\u25f8","umacr;": "\u016b","uml": "\xa8","uml;": "\xa8","uogon;": "\u0173","uopf;": "\U0001d566","uparrow;": "\u2191","updownarrow;": "\u2195","upharpoonleft;": "\u21bf","upharpoonright;": "\u21be","uplus;": "\u228e","upsi;": "\u03c5","upsih;": "\u03d2","upsilon;": "\u03c5","upuparrows;": "\u21c8","urcorn;": "\u231d","urcorner;": "\u231d","urcrop;": "\u230e","uring;": "\u016f","urtri;": "\u25f9","uscr;": "\U0001d4ca","utdot;": "\u22f0","utilde;": "\u0169","utri;": "\u25b5","utrif;": "\u25b4","uuarr;": "\u21c8","uuml": "\xfc","uuml;": "\xfc","uwangle;": "\u29a7","vArr;": "\u21d5","vBar;": "\u2ae8","vBarv;": "\u2ae9","vDash;": "\u22a8","vangrt;": "\u299c","varepsilon;": "\u03f5","varkappa;": "\u03f0","varnothing;": "\u2205","varphi;": "\u03d5","varpi;": "\u03d6","varpropto;": "\u221d","varr;": "\u2195","varrho;": "\u03f1","varsigma;": "\u03c2","varsubsetneq;": "\u228a\ufe00","varsubsetneqq;": "\u2acb\ufe00","varsupsetneq;": "\u228b\ufe00","varsupsetneqq;": "\u2acc\ufe00","vartheta;": "\u03d1","vartriangleleft;": "\u22b2","vartriangleright;": "\u22b3","vcy;": "\u0432","vdash;": "\u22a2","vee;": "\u2228","veebar;": "\u22bb","veeeq;": "\u225a","vellip;": "\u22ee","verbar;": "|","vert;": "|","vfr;": "\U0001d533","vltri;": "\u22b2","vnsub;": "\u2282\u20d2","vnsup;": "\u2283\u20d2","vopf;": "\U0001d567","vprop;": "\u221d","vrtri;": "\u22b3","vscr;": "\U0001d4cb","vsubnE;": "\u2acb\ufe00","vsubne;": "\u228a\ufe00","vsupnE;": "\u2acc\ufe00","vsupne;": "\u228b\ufe00","vzigzag;": "\u299a","wcirc;": "\u0175","wedbar;": "\u2a5f","wedge;": "\u2227","wedgeq;": "\u2259","weierp;": "\u2118","wfr;": "\U0001d534","wopf;": "\U0001d568","wp;": "\u2118","wr;": "\u2240","wreath;": "\u2240","wscr;": "\U0001d4cc","xcap;": "\u22c2","xcirc;": "\u25ef","xcup;": "\u22c3","xdtri;": "\u25bd","xfr;": "\U0001d535","xhArr;": "\u27fa","xharr;": "\u27f7","xi;": "\u03be","xlArr;": "\u27f8","xlarr;": "\u27f5","xmap;": "\u27fc","xnis;": "\u22fb","xodot;": "\u2a00","xopf;": "\U0001d569","xoplus;": "\u2a01","xotime;": "\u2a02","xrArr;": "\u27f9","xrarr;": "\u27f6","xscr;": "\U0001d4cd","xsqcup;": "\u2a06","xuplus;": "\u2a04","xutri;": "\u25b3","xvee;": "\u22c1","xwedge;": "\u22c0","yacute": "\xfd","yacute;": "\xfd","yacy;": "\u044f","ycirc;": "\u0177","ycy;": "\u044b","yen": "\xa5","yen;": "\xa5","yfr;": "\U0001d536","yicy;": "\u0457","yopf;": "\U0001d56a","yscr;": "\U0001d4ce","yucy;": "\u044e","yuml": "\xff","yuml;": "\xff","zacute;": "\u017a","zcaron;": "\u017e","zcy;": "\u0437","zdot;": "\u017c","zeetrf;": "\u2128","zeta;": "\u03b6","zfr;": "\U0001d537","zhcy;": "\u0436","zigrarr;": "\u21dd","zopf;": "\U0001d56b","zscr;": "\U0001d4cf","zwj;": "\u200d","zwnj;": "\u200c",}replacementCharacters = {0x0: "\uFFFD",0x0d: "\u000D",0x80: "\u20AC",0x81: "\u0081",0x82: "\u201A",0x83: "\u0192",0x84: "\u201E",0x85: "\u2026",0x86: "\u2020",0x87: "\u2021",0x88: "\u02C6",0x89: "\u2030",0x8A: "\u0160",0x8B: "\u2039",0x8C: "\u0152",0x8D: "\u008D",0x8E: "\u017D",0x8F: "\u008F",0x90: "\u0090",0x91: "\u2018",0x92: "\u2019",0x93: "\u201C",0x94: "\u201D",0x95: "\u2022",0x96: "\u2013",0x97: "\u2014",0x98: "\u02DC",0x99: "\u2122",0x9A: "\u0161",0x9B: "\u203A",0x9C: "\u0153",0x9D: "\u009D",0x9E: "\u017E",0x9F: "\u0178",}tokenTypes = {"Doctype": 0,"Characters": 1,"SpaceCharacters": 2,"StartTag": 3,"EndTag": 4,"EmptyTag": 5,"Comment": 6,"ParseError": 7}tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"],tokenTypes["EmptyTag"]])prefixes = dict([(v, k) for k, v in namespaces.items()])prefixes["http://www.w3.org/1998/Math/MathML"] = "math"class DataLossWarning(UserWarning):passclass ReparseException(Exception):pass
from __future__ import absolute_import, division, unicode_literalsimport sysfrom types import ModuleTypefrom pip._vendor.six import text_typetry:import xml.etree.cElementTree as default_etreeexcept ImportError:import xml.etree.ElementTree as default_etree__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair","surrogatePairToCodepoint", "moduleFactoryFactory","supports_lone_surrogates", "PY27"]PY27 = sys.version_info[0] == 2 and sys.version_info[1] >= 7# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be# caught by the below test. In general this would be any platform# using UTF-16 as its encoding of unicode strings, such as# Jython. This is because UTF-16 itself is based on the use of such# surrogates, and there is no mechanism to further escape such# escapes.try:_x = eval('"\\uD800"') # pylint:disable=eval-usedif not isinstance(_x, text_type):# We need this with u"" because of http://bugs.jython.org/issue2039_x = eval('u"\\uD800"') # pylint:disable=eval-usedassert isinstance(_x, text_type)except: # pylint:disable=bare-exceptsupports_lone_surrogates = Falseelse:supports_lone_surrogates = Trueclass MethodDispatcher(dict):"""Dict with 2 special properties:On initiation, keys that are lists, sets or tuples are converted tomultiple keys so accessing any one of the items in the originallist-like object returns the matching valuemd = MethodDispatcher({("foo", "bar"):"baz"})md["foo"] == "baz"A default value which can be set through the default attribute."""def __init__(self, items=()):# Using _dictEntries instead of directly assigning to self is about# twice as fast. Please do careful performance testing before changing# anything here._dictEntries = []for name, value in items:if isinstance(name, (list, tuple, frozenset, set)):for item in name:_dictEntries.append((item, value))else:_dictEntries.append((name, value))dict.__init__(self, _dictEntries)assert len(self) == len(_dictEntries)self.default = Nonedef __getitem__(self, key):return dict.get(self, key, self.default)# Some utility functions to deal with weirdness around UCS2 vs UCS4# python buildsdef isSurrogatePair(data):return (len(data) == 2 andord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF andord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF)def surrogatePairToCodepoint(data):char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 +(ord(data[1]) - 0xDC00))return char_val# Module Factory Factory (no, this isn't Java, I know)# Here to stop this being duplicated all over the place.def moduleFactoryFactory(factory):moduleCache = {}def moduleFactory(baseModule, *args, **kwargs):if isinstance(ModuleType.__name__, type("")):name = "_%s_factory" % baseModule.__name__else:name = b"_%s_factory" % baseModule.__name__kwargs_tuple = tuple(kwargs.items())try:return moduleCache[name][args][kwargs_tuple]except KeyError:mod = ModuleType(name)objs = factory(baseModule, *args, **kwargs)mod.__dict__.update(objs)if "name" not in moduleCache:moduleCache[name] = {}if "args" not in moduleCache[name]:moduleCache[name][args] = {}if "kwargs" not in moduleCache[name][args]:moduleCache[name][args][kwargs_tuple] = {}moduleCache[name][args][kwargs_tuple] = modreturn modreturn moduleFactorydef memoize(func):cache = {}def wrapped(*args, **kwargs):key = (tuple(args), tuple(kwargs.items()))if key not in cache:cache[key] = func(*args, **kwargs)return cache[key]return wrapped
from __future__ import absolute_import, division, unicode_literalsfrom pip._vendor.six import text_typefrom bisect import bisect_leftfrom ._base import Trie as ABCTrieclass Trie(ABCTrie):def __init__(self, data):if not all(isinstance(x, text_type) for x in data.keys()):raise TypeError("All keys must be strings")self._data = dataself._keys = sorted(data.keys())self._cachestr = ""self._cachepoints = (0, len(data))def __contains__(self, key):return key in self._datadef __len__(self):return len(self._data)def __iter__(self):return iter(self._data)def __getitem__(self, key):return self._data[key]def keys(self, prefix=None):if prefix is None or prefix == "" or not self._keys:return set(self._keys)if prefix.startswith(self._cachestr):lo, hi = self._cachepointsstart = i = bisect_left(self._keys, prefix, lo, hi)else:start = i = bisect_left(self._keys, prefix)keys = set()if start == len(self._keys):return keyswhile self._keys[i].startswith(prefix):keys.add(self._keys[i])i += 1self._cachestr = prefixself._cachepoints = (start, i)return keysdef has_keys_with_prefix(self, prefix):if prefix in self._data:return Trueif prefix.startswith(self._cachestr):lo, hi = self._cachepointsi = bisect_left(self._keys, prefix, lo, hi)else:i = bisect_left(self._keys, prefix)if i == len(self._keys):return Falsereturn self._keys[i].startswith(prefix)
from __future__ import absolute_import, division, unicode_literalsfrom datrie import Trie as DATriefrom pip._vendor.six import text_typefrom ._base import Trie as ABCTrieclass Trie(ABCTrie):def __init__(self, data):chars = set()for key in data.keys():if not isinstance(key, text_type):raise TypeError("All keys must be strings")for char in key:chars.add(char)self._data = DATrie("".join(chars))for key, value in data.items():self._data[key] = valuedef __contains__(self, key):return key in self._datadef __len__(self):return len(self._data)def __iter__(self):raise NotImplementedError()def __getitem__(self, key):return self._data[key]def keys(self, prefix=None):return self._data.keys(prefix)def has_keys_with_prefix(self, prefix):return self._data.has_keys_with_prefix(prefix)def longest_prefix(self, prefix):return self._data.longest_prefix(prefix)def longest_prefix_item(self, prefix):return self._data.longest_prefix_item(prefix)
from __future__ import absolute_import, division, unicode_literalsfrom collections import Mappingclass Trie(Mapping):"""Abstract base class for tries"""def keys(self, prefix=None):# pylint:disable=arguments-differkeys = super(Trie, self).keys()if prefix is None:return set(keys)# Python 2.6: no set comprehensionsreturn set([x for x in keys if x.startswith(prefix)])def has_keys_with_prefix(self, prefix):for key in self.keys():if key.startswith(prefix):return Truereturn Falsedef longest_prefix(self, prefix):if prefix in self:return prefixfor i in range(1, len(prefix) + 1):if prefix[:-i] in self:return prefix[:-i]raise KeyError(prefix)def longest_prefix_item(self, prefix):lprefix = self.longest_prefix(prefix)return (lprefix, self[lprefix])
from __future__ import absolute_import, division, unicode_literalsfrom .py import Trie as PyTrieTrie = PyTrie# pylint:disable=wrong-import-positiontry:from .datrie import Trie as DATrieexcept ImportError:passelse:Trie = DATrie# pylint:enable=wrong-import-position
from __future__ import absolute_import, division, unicode_literalsfrom pip._vendor.six import unichr as chrfrom collections import dequefrom .constants import spaceCharactersfrom .constants import entitiesfrom .constants import asciiLetters, asciiUpper2Lowerfrom .constants import digits, hexDigits, EOFfrom .constants import tokenTypes, tagTokenTypesfrom .constants import replacementCharactersfrom ._inputstream import HTMLInputStreamfrom ._trie import TrieentitiesTrie = Trie(entities)class HTMLTokenizer(object):""" This class takes care of tokenizing HTML.* self.currentTokenHolds the token that is currently being processed.* self.stateHolds a reference to the method to be invoked... XXX* self.streamPoints to HTMLInputStream object."""def __init__(self, stream, parser=None, **kwargs):self.stream = HTMLInputStream(stream, **kwargs)self.parser = parser# Setup the initial tokenizer stateself.escapeFlag = Falseself.lastFourChars = []self.state = self.dataStateself.escape = False# The current token being createdself.currentToken = Nonesuper(HTMLTokenizer, self).__init__()def __iter__(self):""" This is where the magic happens.We do our usually processing through the states and when we have a tokento return we yield the token which pauses processing until the next tokenis requested."""self.tokenQueue = deque([])# Start processing. When EOF is reached self.state will return False# instead of True and the loop will terminate.while self.state():while self.stream.errors:yield {"type": tokenTypes["ParseError"], "data": self.stream.errors.pop(0)}while self.tokenQueue:yield self.tokenQueue.popleft()def consumeNumberEntity(self, isHex):"""This function returns either U+FFFD or the character based on thedecimal or hexadecimal representation. It also discards ";" if present.If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked."""allowed = digitsradix = 10if isHex:allowed = hexDigitsradix = 16charStack = []# Consume all the characters that are in range while making sure we# don't hit an EOF.c = self.stream.char()while c in allowed and c is not EOF:charStack.append(c)c = self.stream.char()# Convert the set of characters consumed to an int.charAsInt = int("".join(charStack), radix)# Certain characters get replaced with othersif charAsInt in replacementCharacters:char = replacementCharacters[charAsInt]self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"illegal-codepoint-for-numeric-entity","datavars": {"charAsInt": charAsInt}})elif ((0xD800 <= charAsInt <= 0xDFFF) or(charAsInt > 0x10FFFF)):char = "\uFFFD"self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"illegal-codepoint-for-numeric-entity","datavars": {"charAsInt": charAsInt}})else:# Should speed up this check somehow (e.g. move the set to a constant)if ((0x0001 <= charAsInt <= 0x0008) or(0x000E <= charAsInt <= 0x001F) or(0x007F <= charAsInt <= 0x009F) or(0xFDD0 <= charAsInt <= 0xFDEF) orcharAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE,0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE,0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE,0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE,0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE,0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE,0xFFFFF, 0x10FFFE, 0x10FFFF])):self.tokenQueue.append({"type": tokenTypes["ParseError"],"data":"illegal-codepoint-for-numeric-entity","datavars": {"charAsInt": charAsInt}})try:# Try/except needed as UCS-2 Python builds' unichar only works# within the BMP.char = chr(charAsInt)except ValueError:v = charAsInt - 0x10000char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF))# Discard the ; if present. Otherwise, put it back on the queue and# invoke parseError on parser.if c != ";":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"numeric-entity-without-semicolon"})self.stream.unget(c)return chardef consumeEntity(self, allowedChar=None, fromAttribute=False):# Initialise to the default output for when no entity is matchedoutput = "&"charStack = [self.stream.char()]if (charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&") or(allowedChar is not None and allowedChar == charStack[0])):self.stream.unget(charStack[0])elif charStack[0] == "#":# Read the next character to see if it's hex or decimalhex = FalsecharStack.append(self.stream.char())if charStack[-1] in ("x", "X"):hex = TruecharStack.append(self.stream.char())# charStack[-1] should be the first digitif (hex and charStack[-1] in hexDigits) \or (not hex and charStack[-1] in digits):# At least one digit found, so consume the whole numberself.stream.unget(charStack[-1])output = self.consumeNumberEntity(hex)else:# No digits foundself.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "expected-numeric-entity"})self.stream.unget(charStack.pop())output = "&" + "".join(charStack)else:# At this point in the process might have named entity. Entities# are stored in the global variable "entities".## Consume characters and compare to these to a substring of the# entity names in the list until the substring no longer matches.while (charStack[-1] is not EOF):if not entitiesTrie.has_keys_with_prefix("".join(charStack)):breakcharStack.append(self.stream.char())# At this point we have a string that starts with some characters# that may match an entity# Try to find the longest entity the string will match to take care# of ¬i for instance.try:entityName = entitiesTrie.longest_prefix("".join(charStack[:-1]))entityLength = len(entityName)except KeyError:entityName = Noneif entityName is not None:if entityName[-1] != ";":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"named-entity-without-semicolon"})if (entityName[-1] != ";" and fromAttribute and(charStack[entityLength] in asciiLetters orcharStack[entityLength] in digits orcharStack[entityLength] == "=")):self.stream.unget(charStack.pop())output = "&" + "".join(charStack)else:output = entities[entityName]self.stream.unget(charStack.pop())output += "".join(charStack[entityLength:])else:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-named-entity"})self.stream.unget(charStack.pop())output = "&" + "".join(charStack)if fromAttribute:self.currentToken["data"][-1][1] += outputelse:if output in spaceCharacters:tokenType = "SpaceCharacters"else:tokenType = "Characters"self.tokenQueue.append({"type": tokenTypes[tokenType], "data": output})def processEntityInAttribute(self, allowedChar):"""This method replaces the need for "entityInAttributeValueState"."""self.consumeEntity(allowedChar=allowedChar, fromAttribute=True)def emitCurrentToken(self):"""This method is a generic handler for emitting the tags. It also setsthe state to "data" because that's what's needed after a token has beenemitted."""token = self.currentToken# Add token to the queue to be yieldedif (token["type"] in tagTokenTypes):token["name"] = token["name"].translate(asciiUpper2Lower)if token["type"] == tokenTypes["EndTag"]:if token["data"]:self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "attributes-in-end-tag"})if token["selfClosing"]:self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "self-closing-flag-on-end-tag"})self.tokenQueue.append(token)self.state = self.dataState# Below are the various tokenizer states worked out.def dataState(self):data = self.stream.char()if data == "&":self.state = self.entityDataStateelif data == "<":self.state = self.tagOpenStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "\u0000"})elif data is EOF:# Tokenization ends.return Falseelif data in spaceCharacters:# Directly after emitting a token you switch back to the "data# state". At that point spaceCharacters are important so they are# emitted separately.self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data":data + self.stream.charsUntil(spaceCharacters, True)})# No need to update lastFourChars here, since the first space will# have already been appended to lastFourChars and will have broken# any <!-- or --> sequenceselse:chars = self.stream.charsUntil(("&", "<", "\u0000"))self.tokenQueue.append({"type": tokenTypes["Characters"], "data":data + chars})return Truedef entityDataState(self):self.consumeEntity()self.state = self.dataStatereturn Truedef rcdataState(self):data = self.stream.char()if data == "&":self.state = self.characterReferenceInRcdataelif data == "<":self.state = self.rcdataLessThanSignStateelif data == EOF:# Tokenization ends.return Falseelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "\uFFFD"})elif data in spaceCharacters:# Directly after emitting a token you switch back to the "data# state". At that point spaceCharacters are important so they are# emitted separately.self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data":data + self.stream.charsUntil(spaceCharacters, True)})# No need to update lastFourChars here, since the first space will# have already been appended to lastFourChars and will have broken# any <!-- or --> sequenceselse:chars = self.stream.charsUntil(("&", "<", "\u0000"))self.tokenQueue.append({"type": tokenTypes["Characters"], "data":data + chars})return Truedef characterReferenceInRcdata(self):self.consumeEntity()self.state = self.rcdataStatereturn Truedef rawtextState(self):data = self.stream.char()if data == "<":self.state = self.rawtextLessThanSignStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "\uFFFD"})elif data == EOF:# Tokenization ends.return Falseelse:chars = self.stream.charsUntil(("<", "\u0000"))self.tokenQueue.append({"type": tokenTypes["Characters"], "data":data + chars})return Truedef scriptDataState(self):data = self.stream.char()if data == "<":self.state = self.scriptDataLessThanSignStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "\uFFFD"})elif data == EOF:# Tokenization ends.return Falseelse:chars = self.stream.charsUntil(("<", "\u0000"))self.tokenQueue.append({"type": tokenTypes["Characters"], "data":data + chars})return Truedef plaintextState(self):data = self.stream.char()if data == EOF:# Tokenization ends.return Falseelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "\uFFFD"})else:self.tokenQueue.append({"type": tokenTypes["Characters"], "data":data + self.stream.charsUntil("\u0000")})return Truedef tagOpenState(self):data = self.stream.char()if data == "!":self.state = self.markupDeclarationOpenStateelif data == "/":self.state = self.closeTagOpenStateelif data in asciiLetters:self.currentToken = {"type": tokenTypes["StartTag"],"name": data, "data": [],"selfClosing": False,"selfClosingAcknowledged": False}self.state = self.tagNameStateelif data == ">":# XXX In theory it could be something besides a tag name. But# do we really care?self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-tag-name-but-got-right-bracket"})self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<>"})self.state = self.dataStateelif data == "?":# XXX In theory it could be something besides a tag name. But# do we really care?self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-tag-name-but-got-question-mark"})self.stream.unget(data)self.state = self.bogusCommentStateelse:# XXXself.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-tag-name"})self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})self.stream.unget(data)self.state = self.dataStatereturn Truedef closeTagOpenState(self):data = self.stream.char()if data in asciiLetters:self.currentToken = {"type": tokenTypes["EndTag"], "name": data,"data": [], "selfClosing": False}self.state = self.tagNameStateelif data == ">":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-closing-tag-but-got-right-bracket"})self.state = self.dataStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-closing-tag-but-got-eof"})self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})self.state = self.dataStateelse:# XXX data can be _'_...self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-closing-tag-but-got-char","datavars": {"data": data}})self.stream.unget(data)self.state = self.bogusCommentStatereturn Truedef tagNameState(self):data = self.stream.char()if data in spaceCharacters:self.state = self.beforeAttributeNameStateelif data == ">":self.emitCurrentToken()elif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-tag-name"})self.state = self.dataStateelif data == "/":self.state = self.selfClosingStartTagStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["name"] += "\uFFFD"else:self.currentToken["name"] += data# (Don't use charsUntil here, because tag names are# very short and it's faster to not do anything fancy)return Truedef rcdataLessThanSignState(self):data = self.stream.char()if data == "/":self.temporaryBuffer = ""self.state = self.rcdataEndTagOpenStateelse:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})self.stream.unget(data)self.state = self.rcdataStatereturn Truedef rcdataEndTagOpenState(self):data = self.stream.char()if data in asciiLetters:self.temporaryBuffer += dataself.state = self.rcdataEndTagNameStateelse:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})self.stream.unget(data)self.state = self.rcdataStatereturn Truedef rcdataEndTagNameState(self):appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()data = self.stream.char()if data in spaceCharacters and appropriate:self.currentToken = {"type": tokenTypes["EndTag"],"name": self.temporaryBuffer,"data": [], "selfClosing": False}self.state = self.beforeAttributeNameStateelif data == "/" and appropriate:self.currentToken = {"type": tokenTypes["EndTag"],"name": self.temporaryBuffer,"data": [], "selfClosing": False}self.state = self.selfClosingStartTagStateelif data == ">" and appropriate:self.currentToken = {"type": tokenTypes["EndTag"],"name": self.temporaryBuffer,"data": [], "selfClosing": False}self.emitCurrentToken()self.state = self.dataStateelif data in asciiLetters:self.temporaryBuffer += dataelse:self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "</" + self.temporaryBuffer})self.stream.unget(data)self.state = self.rcdataStatereturn Truedef rawtextLessThanSignState(self):data = self.stream.char()if data == "/":self.temporaryBuffer = ""self.state = self.rawtextEndTagOpenStateelse:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})self.stream.unget(data)self.state = self.rawtextStatereturn Truedef rawtextEndTagOpenState(self):data = self.stream.char()if data in asciiLetters:self.temporaryBuffer += dataself.state = self.rawtextEndTagNameStateelse:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})self.stream.unget(data)self.state = self.rawtextStatereturn Truedef rawtextEndTagNameState(self):appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()data = self.stream.char()if data in spaceCharacters and appropriate:self.currentToken = {"type": tokenTypes["EndTag"],"name": self.temporaryBuffer,"data": [], "selfClosing": False}self.state = self.beforeAttributeNameStateelif data == "/" and appropriate:self.currentToken = {"type": tokenTypes["EndTag"],"name": self.temporaryBuffer,"data": [], "selfClosing": False}self.state = self.selfClosingStartTagStateelif data == ">" and appropriate:self.currentToken = {"type": tokenTypes["EndTag"],"name": self.temporaryBuffer,"data": [], "selfClosing": False}self.emitCurrentToken()self.state = self.dataStateelif data in asciiLetters:self.temporaryBuffer += dataelse:self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "</" + self.temporaryBuffer})self.stream.unget(data)self.state = self.rawtextStatereturn Truedef scriptDataLessThanSignState(self):data = self.stream.char()if data == "/":self.temporaryBuffer = ""self.state = self.scriptDataEndTagOpenStateelif data == "!":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<!"})self.state = self.scriptDataEscapeStartStateelse:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})self.stream.unget(data)self.state = self.scriptDataStatereturn Truedef scriptDataEndTagOpenState(self):data = self.stream.char()if data in asciiLetters:self.temporaryBuffer += dataself.state = self.scriptDataEndTagNameStateelse:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})self.stream.unget(data)self.state = self.scriptDataStatereturn Truedef scriptDataEndTagNameState(self):appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()data = self.stream.char()if data in spaceCharacters and appropriate:self.currentToken = {"type": tokenTypes["EndTag"],"name": self.temporaryBuffer,"data": [], "selfClosing": False}self.state = self.beforeAttributeNameStateelif data == "/" and appropriate:self.currentToken = {"type": tokenTypes["EndTag"],"name": self.temporaryBuffer,"data": [], "selfClosing": False}self.state = self.selfClosingStartTagStateelif data == ">" and appropriate:self.currentToken = {"type": tokenTypes["EndTag"],"name": self.temporaryBuffer,"data": [], "selfClosing": False}self.emitCurrentToken()self.state = self.dataStateelif data in asciiLetters:self.temporaryBuffer += dataelse:self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "</" + self.temporaryBuffer})self.stream.unget(data)self.state = self.scriptDataStatereturn Truedef scriptDataEscapeStartState(self):data = self.stream.char()if data == "-":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})self.state = self.scriptDataEscapeStartDashStateelse:self.stream.unget(data)self.state = self.scriptDataStatereturn Truedef scriptDataEscapeStartDashState(self):data = self.stream.char()if data == "-":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})self.state = self.scriptDataEscapedDashDashStateelse:self.stream.unget(data)self.state = self.scriptDataStatereturn Truedef scriptDataEscapedState(self):data = self.stream.char()if data == "-":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})self.state = self.scriptDataEscapedDashStateelif data == "<":self.state = self.scriptDataEscapedLessThanSignStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "\uFFFD"})elif data == EOF:self.state = self.dataStateelse:chars = self.stream.charsUntil(("<", "-", "\u0000"))self.tokenQueue.append({"type": tokenTypes["Characters"], "data":data + chars})return Truedef scriptDataEscapedDashState(self):data = self.stream.char()if data == "-":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})self.state = self.scriptDataEscapedDashDashStateelif data == "<":self.state = self.scriptDataEscapedLessThanSignStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "\uFFFD"})self.state = self.scriptDataEscapedStateelif data == EOF:self.state = self.dataStateelse:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})self.state = self.scriptDataEscapedStatereturn Truedef scriptDataEscapedDashDashState(self):data = self.stream.char()if data == "-":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})elif data == "<":self.state = self.scriptDataEscapedLessThanSignStateelif data == ">":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"})self.state = self.scriptDataStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "\uFFFD"})self.state = self.scriptDataEscapedStateelif data == EOF:self.state = self.dataStateelse:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})self.state = self.scriptDataEscapedStatereturn Truedef scriptDataEscapedLessThanSignState(self):data = self.stream.char()if data == "/":self.temporaryBuffer = ""self.state = self.scriptDataEscapedEndTagOpenStateelif data in asciiLetters:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<" + data})self.temporaryBuffer = dataself.state = self.scriptDataDoubleEscapeStartStateelse:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})self.stream.unget(data)self.state = self.scriptDataEscapedStatereturn Truedef scriptDataEscapedEndTagOpenState(self):data = self.stream.char()if data in asciiLetters:self.temporaryBuffer = dataself.state = self.scriptDataEscapedEndTagNameStateelse:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})self.stream.unget(data)self.state = self.scriptDataEscapedStatereturn Truedef scriptDataEscapedEndTagNameState(self):appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()data = self.stream.char()if data in spaceCharacters and appropriate:self.currentToken = {"type": tokenTypes["EndTag"],"name": self.temporaryBuffer,"data": [], "selfClosing": False}self.state = self.beforeAttributeNameStateelif data == "/" and appropriate:self.currentToken = {"type": tokenTypes["EndTag"],"name": self.temporaryBuffer,"data": [], "selfClosing": False}self.state = self.selfClosingStartTagStateelif data == ">" and appropriate:self.currentToken = {"type": tokenTypes["EndTag"],"name": self.temporaryBuffer,"data": [], "selfClosing": False}self.emitCurrentToken()self.state = self.dataStateelif data in asciiLetters:self.temporaryBuffer += dataelse:self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "</" + self.temporaryBuffer})self.stream.unget(data)self.state = self.scriptDataEscapedStatereturn Truedef scriptDataDoubleEscapeStartState(self):data = self.stream.char()if data in (spaceCharacters | frozenset(("/", ">"))):self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})if self.temporaryBuffer.lower() == "script":self.state = self.scriptDataDoubleEscapedStateelse:self.state = self.scriptDataEscapedStateelif data in asciiLetters:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})self.temporaryBuffer += dataelse:self.stream.unget(data)self.state = self.scriptDataEscapedStatereturn Truedef scriptDataDoubleEscapedState(self):data = self.stream.char()if data == "-":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})self.state = self.scriptDataDoubleEscapedDashStateelif data == "<":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})self.state = self.scriptDataDoubleEscapedLessThanSignStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "\uFFFD"})elif data == EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-script-in-script"})self.state = self.dataStateelse:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})return Truedef scriptDataDoubleEscapedDashState(self):data = self.stream.char()if data == "-":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})self.state = self.scriptDataDoubleEscapedDashDashStateelif data == "<":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})self.state = self.scriptDataDoubleEscapedLessThanSignStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "\uFFFD"})self.state = self.scriptDataDoubleEscapedStateelif data == EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-script-in-script"})self.state = self.dataStateelse:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})self.state = self.scriptDataDoubleEscapedStatereturn Truedef scriptDataDoubleEscapedDashDashState(self):data = self.stream.char()if data == "-":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})elif data == "<":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})self.state = self.scriptDataDoubleEscapedLessThanSignStateelif data == ">":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"})self.state = self.scriptDataStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.tokenQueue.append({"type": tokenTypes["Characters"],"data": "\uFFFD"})self.state = self.scriptDataDoubleEscapedStateelif data == EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-script-in-script"})self.state = self.dataStateelse:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})self.state = self.scriptDataDoubleEscapedStatereturn Truedef scriptDataDoubleEscapedLessThanSignState(self):data = self.stream.char()if data == "/":self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "/"})self.temporaryBuffer = ""self.state = self.scriptDataDoubleEscapeEndStateelse:self.stream.unget(data)self.state = self.scriptDataDoubleEscapedStatereturn Truedef scriptDataDoubleEscapeEndState(self):data = self.stream.char()if data in (spaceCharacters | frozenset(("/", ">"))):self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})if self.temporaryBuffer.lower() == "script":self.state = self.scriptDataEscapedStateelse:self.state = self.scriptDataDoubleEscapedStateelif data in asciiLetters:self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})self.temporaryBuffer += dataelse:self.stream.unget(data)self.state = self.scriptDataDoubleEscapedStatereturn Truedef beforeAttributeNameState(self):data = self.stream.char()if data in spaceCharacters:self.stream.charsUntil(spaceCharacters, True)elif data in asciiLetters:self.currentToken["data"].append([data, ""])self.state = self.attributeNameStateelif data == ">":self.emitCurrentToken()elif data == "/":self.state = self.selfClosingStartTagStateelif data in ("'", '"', "=", "<"):self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"invalid-character-in-attribute-name"})self.currentToken["data"].append([data, ""])self.state = self.attributeNameStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["data"].append(["\uFFFD", ""])self.state = self.attributeNameStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-attribute-name-but-got-eof"})self.state = self.dataStateelse:self.currentToken["data"].append([data, ""])self.state = self.attributeNameStatereturn Truedef attributeNameState(self):data = self.stream.char()leavingThisState = TrueemitToken = Falseif data == "=":self.state = self.beforeAttributeValueStateelif data in asciiLetters:self.currentToken["data"][-1][0] += data +\self.stream.charsUntil(asciiLetters, True)leavingThisState = Falseelif data == ">":# XXX If we emit here the attributes are converted to a dict# without being checked and when the code below runs we error# because data is a dict not a listemitToken = Trueelif data in spaceCharacters:self.state = self.afterAttributeNameStateelif data == "/":self.state = self.selfClosingStartTagStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["data"][-1][0] += "\uFFFD"leavingThisState = Falseelif data in ("'", '"', "<"):self.tokenQueue.append({"type": tokenTypes["ParseError"],"data":"invalid-character-in-attribute-name"})self.currentToken["data"][-1][0] += dataleavingThisState = Falseelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "eof-in-attribute-name"})self.state = self.dataStateelse:self.currentToken["data"][-1][0] += dataleavingThisState = Falseif leavingThisState:# Attributes are not dropped at this stage. That happens when the# start tag token is emitted so values can still be safely appended# to attributes, but we do want to report the parse error in time.self.currentToken["data"][-1][0] = (self.currentToken["data"][-1][0].translate(asciiUpper2Lower))for name, _ in self.currentToken["data"][:-1]:if self.currentToken["data"][-1][0] == name:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"duplicate-attribute"})break# XXX Fix for above XXXif emitToken:self.emitCurrentToken()return Truedef afterAttributeNameState(self):data = self.stream.char()if data in spaceCharacters:self.stream.charsUntil(spaceCharacters, True)elif data == "=":self.state = self.beforeAttributeValueStateelif data == ">":self.emitCurrentToken()elif data in asciiLetters:self.currentToken["data"].append([data, ""])self.state = self.attributeNameStateelif data == "/":self.state = self.selfClosingStartTagStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["data"].append(["\uFFFD", ""])self.state = self.attributeNameStateelif data in ("'", '"', "<"):self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"invalid-character-after-attribute-name"})self.currentToken["data"].append([data, ""])self.state = self.attributeNameStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-end-of-tag-but-got-eof"})self.state = self.dataStateelse:self.currentToken["data"].append([data, ""])self.state = self.attributeNameStatereturn Truedef beforeAttributeValueState(self):data = self.stream.char()if data in spaceCharacters:self.stream.charsUntil(spaceCharacters, True)elif data == "\"":self.state = self.attributeValueDoubleQuotedStateelif data == "&":self.state = self.attributeValueUnQuotedStateself.stream.unget(data)elif data == "'":self.state = self.attributeValueSingleQuotedStateelif data == ">":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-attribute-value-but-got-right-bracket"})self.emitCurrentToken()elif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["data"][-1][1] += "\uFFFD"self.state = self.attributeValueUnQuotedStateelif data in ("=", "<", "`"):self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"equals-in-unquoted-attribute-value"})self.currentToken["data"][-1][1] += dataself.state = self.attributeValueUnQuotedStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-attribute-value-but-got-eof"})self.state = self.dataStateelse:self.currentToken["data"][-1][1] += dataself.state = self.attributeValueUnQuotedStatereturn Truedef attributeValueDoubleQuotedState(self):data = self.stream.char()if data == "\"":self.state = self.afterAttributeValueStateelif data == "&":self.processEntityInAttribute('"')elif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["data"][-1][1] += "\uFFFD"elif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-attribute-value-double-quote"})self.state = self.dataStateelse:self.currentToken["data"][-1][1] += data +\self.stream.charsUntil(("\"", "&", "\u0000"))return Truedef attributeValueSingleQuotedState(self):data = self.stream.char()if data == "'":self.state = self.afterAttributeValueStateelif data == "&":self.processEntityInAttribute("'")elif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["data"][-1][1] += "\uFFFD"elif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-attribute-value-single-quote"})self.state = self.dataStateelse:self.currentToken["data"][-1][1] += data +\self.stream.charsUntil(("'", "&", "\u0000"))return Truedef attributeValueUnQuotedState(self):data = self.stream.char()if data in spaceCharacters:self.state = self.beforeAttributeNameStateelif data == "&":self.processEntityInAttribute(">")elif data == ">":self.emitCurrentToken()elif data in ('"', "'", "=", "<", "`"):self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-character-in-unquoted-attribute-value"})self.currentToken["data"][-1][1] += dataelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["data"][-1][1] += "\uFFFD"elif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-attribute-value-no-quotes"})self.state = self.dataStateelse:self.currentToken["data"][-1][1] += data + self.stream.charsUntil(frozenset(("&", ">", '"', "'", "=", "<", "`", "\u0000")) | spaceCharacters)return Truedef afterAttributeValueState(self):data = self.stream.char()if data in spaceCharacters:self.state = self.beforeAttributeNameStateelif data == ">":self.emitCurrentToken()elif data == "/":self.state = self.selfClosingStartTagStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-EOF-after-attribute-value"})self.stream.unget(data)self.state = self.dataStateelse:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-character-after-attribute-value"})self.stream.unget(data)self.state = self.beforeAttributeNameStatereturn Truedef selfClosingStartTagState(self):data = self.stream.char()if data == ">":self.currentToken["selfClosing"] = Trueself.emitCurrentToken()elif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"],"data":"unexpected-EOF-after-solidus-in-tag"})self.stream.unget(data)self.state = self.dataStateelse:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-character-after-solidus-in-tag"})self.stream.unget(data)self.state = self.beforeAttributeNameStatereturn Truedef bogusCommentState(self):# Make a new comment token and give it as value all the characters# until the first > or EOF (charsUntil checks for EOF automatically)# and emit it.data = self.stream.charsUntil(">")data = data.replace("\u0000", "\uFFFD")self.tokenQueue.append({"type": tokenTypes["Comment"], "data": data})# Eat the character directly after the bogus comment which is either a# ">" or an EOF.self.stream.char()self.state = self.dataStatereturn Truedef markupDeclarationOpenState(self):charStack = [self.stream.char()]if charStack[-1] == "-":charStack.append(self.stream.char())if charStack[-1] == "-":self.currentToken = {"type": tokenTypes["Comment"], "data": ""}self.state = self.commentStartStatereturn Trueelif charStack[-1] in ('d', 'D'):matched = Truefor expected in (('o', 'O'), ('c', 'C'), ('t', 'T'),('y', 'Y'), ('p', 'P'), ('e', 'E')):charStack.append(self.stream.char())if charStack[-1] not in expected:matched = Falsebreakif matched:self.currentToken = {"type": tokenTypes["Doctype"],"name": "","publicId": None, "systemId": None,"correct": True}self.state = self.doctypeStatereturn Trueelif (charStack[-1] == "[" andself.parser is not None andself.parser.tree.openElements andself.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace):matched = Truefor expected in ["C", "D", "A", "T", "A", "["]:charStack.append(self.stream.char())if charStack[-1] != expected:matched = Falsebreakif matched:self.state = self.cdataSectionStatereturn Trueself.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-dashes-or-doctype"})while charStack:self.stream.unget(charStack.pop())self.state = self.bogusCommentStatereturn Truedef commentStartState(self):data = self.stream.char()if data == "-":self.state = self.commentStartDashStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["data"] += "\uFFFD"elif data == ">":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"incorrect-comment"})self.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-comment"})self.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.currentToken["data"] += dataself.state = self.commentStatereturn Truedef commentStartDashState(self):data = self.stream.char()if data == "-":self.state = self.commentEndStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["data"] += "-\uFFFD"elif data == ">":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"incorrect-comment"})self.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-comment"})self.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.currentToken["data"] += "-" + dataself.state = self.commentStatereturn Truedef commentState(self):data = self.stream.char()if data == "-":self.state = self.commentEndDashStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["data"] += "\uFFFD"elif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "eof-in-comment"})self.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.currentToken["data"] += data + \self.stream.charsUntil(("-", "\u0000"))return Truedef commentEndDashState(self):data = self.stream.char()if data == "-":self.state = self.commentEndStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["data"] += "-\uFFFD"self.state = self.commentStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-comment-end-dash"})self.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.currentToken["data"] += "-" + dataself.state = self.commentStatereturn Truedef commentEndState(self):data = self.stream.char()if data == ">":self.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["data"] += "--\uFFFD"self.state = self.commentStateelif data == "!":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-bang-after-double-dash-in-comment"})self.state = self.commentEndBangStateelif data == "-":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-dash-after-double-dash-in-comment"})self.currentToken["data"] += dataelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-comment-double-dash"})self.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:# XXXself.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-char-in-comment"})self.currentToken["data"] += "--" + dataself.state = self.commentStatereturn Truedef commentEndBangState(self):data = self.stream.char()if data == ">":self.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data == "-":self.currentToken["data"] += "--!"self.state = self.commentEndDashStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["data"] += "--!\uFFFD"self.state = self.commentStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-comment-end-bang-state"})self.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.currentToken["data"] += "--!" + dataself.state = self.commentStatereturn Truedef doctypeState(self):data = self.stream.char()if data in spaceCharacters:self.state = self.beforeDoctypeNameStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-doctype-name-but-got-eof"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"need-space-after-doctype"})self.stream.unget(data)self.state = self.beforeDoctypeNameStatereturn Truedef beforeDoctypeNameState(self):data = self.stream.char()if data in spaceCharacters:passelif data == ">":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-doctype-name-but-got-right-bracket"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["name"] = "\uFFFD"self.state = self.doctypeNameStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-doctype-name-but-got-eof"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.currentToken["name"] = dataself.state = self.doctypeNameStatereturn Truedef doctypeNameState(self):data = self.stream.char()if data in spaceCharacters:self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)self.state = self.afterDoctypeNameStateelif data == ">":self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)self.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["name"] += "\uFFFD"self.state = self.doctypeNameStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-doctype-name"})self.currentToken["correct"] = Falseself.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)self.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.currentToken["name"] += datareturn Truedef afterDoctypeNameState(self):data = self.stream.char()if data in spaceCharacters:passelif data == ">":self.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data is EOF:self.currentToken["correct"] = Falseself.stream.unget(data)self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-doctype"})self.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:if data in ("p", "P"):matched = Truefor expected in (("u", "U"), ("b", "B"), ("l", "L"),("i", "I"), ("c", "C")):data = self.stream.char()if data not in expected:matched = Falsebreakif matched:self.state = self.afterDoctypePublicKeywordStatereturn Trueelif data in ("s", "S"):matched = Truefor expected in (("y", "Y"), ("s", "S"), ("t", "T"),("e", "E"), ("m", "M")):data = self.stream.char()if data not in expected:matched = Falsebreakif matched:self.state = self.afterDoctypeSystemKeywordStatereturn True# All the characters read before the current 'data' will be# [a-zA-Z], so they're garbage in the bogus doctype and can be# discarded; only the latest character might be '>' or EOF# and needs to be ungettedself.stream.unget(data)self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"expected-space-or-right-bracket-in-doctype", "datavars":{"data": data}})self.currentToken["correct"] = Falseself.state = self.bogusDoctypeStatereturn Truedef afterDoctypePublicKeywordState(self):data = self.stream.char()if data in spaceCharacters:self.state = self.beforeDoctypePublicIdentifierStateelif data in ("'", '"'):self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-char-in-doctype"})self.stream.unget(data)self.state = self.beforeDoctypePublicIdentifierStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.stream.unget(data)self.state = self.beforeDoctypePublicIdentifierStatereturn Truedef beforeDoctypePublicIdentifierState(self):data = self.stream.char()if data in spaceCharacters:passelif data == "\"":self.currentToken["publicId"] = ""self.state = self.doctypePublicIdentifierDoubleQuotedStateelif data == "'":self.currentToken["publicId"] = ""self.state = self.doctypePublicIdentifierSingleQuotedStateelif data == ">":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-end-of-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-char-in-doctype"})self.currentToken["correct"] = Falseself.state = self.bogusDoctypeStatereturn Truedef doctypePublicIdentifierDoubleQuotedState(self):data = self.stream.char()if data == "\"":self.state = self.afterDoctypePublicIdentifierStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["publicId"] += "\uFFFD"elif data == ">":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-end-of-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.currentToken["publicId"] += datareturn Truedef doctypePublicIdentifierSingleQuotedState(self):data = self.stream.char()if data == "'":self.state = self.afterDoctypePublicIdentifierStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["publicId"] += "\uFFFD"elif data == ">":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-end-of-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.currentToken["publicId"] += datareturn Truedef afterDoctypePublicIdentifierState(self):data = self.stream.char()if data in spaceCharacters:self.state = self.betweenDoctypePublicAndSystemIdentifiersStateelif data == ">":self.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data == '"':self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-char-in-doctype"})self.currentToken["systemId"] = ""self.state = self.doctypeSystemIdentifierDoubleQuotedStateelif data == "'":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-char-in-doctype"})self.currentToken["systemId"] = ""self.state = self.doctypeSystemIdentifierSingleQuotedStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-char-in-doctype"})self.currentToken["correct"] = Falseself.state = self.bogusDoctypeStatereturn Truedef betweenDoctypePublicAndSystemIdentifiersState(self):data = self.stream.char()if data in spaceCharacters:passelif data == ">":self.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data == '"':self.currentToken["systemId"] = ""self.state = self.doctypeSystemIdentifierDoubleQuotedStateelif data == "'":self.currentToken["systemId"] = ""self.state = self.doctypeSystemIdentifierSingleQuotedStateelif data == EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-char-in-doctype"})self.currentToken["correct"] = Falseself.state = self.bogusDoctypeStatereturn Truedef afterDoctypeSystemKeywordState(self):data = self.stream.char()if data in spaceCharacters:self.state = self.beforeDoctypeSystemIdentifierStateelif data in ("'", '"'):self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-char-in-doctype"})self.stream.unget(data)self.state = self.beforeDoctypeSystemIdentifierStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.stream.unget(data)self.state = self.beforeDoctypeSystemIdentifierStatereturn Truedef beforeDoctypeSystemIdentifierState(self):data = self.stream.char()if data in spaceCharacters:passelif data == "\"":self.currentToken["systemId"] = ""self.state = self.doctypeSystemIdentifierDoubleQuotedStateelif data == "'":self.currentToken["systemId"] = ""self.state = self.doctypeSystemIdentifierSingleQuotedStateelif data == ">":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-char-in-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-char-in-doctype"})self.currentToken["correct"] = Falseself.state = self.bogusDoctypeStatereturn Truedef doctypeSystemIdentifierDoubleQuotedState(self):data = self.stream.char()if data == "\"":self.state = self.afterDoctypeSystemIdentifierStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["systemId"] += "\uFFFD"elif data == ">":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-end-of-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.currentToken["systemId"] += datareturn Truedef doctypeSystemIdentifierSingleQuotedState(self):data = self.stream.char()if data == "'":self.state = self.afterDoctypeSystemIdentifierStateelif data == "\u0000":self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})self.currentToken["systemId"] += "\uFFFD"elif data == ">":self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-end-of-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.currentToken["systemId"] += datareturn Truedef afterDoctypeSystemIdentifierState(self):data = self.stream.char()if data in spaceCharacters:passelif data == ">":self.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data is EOF:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"eof-in-doctype"})self.currentToken["correct"] = Falseself.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":"unexpected-char-in-doctype"})self.state = self.bogusDoctypeStatereturn Truedef bogusDoctypeState(self):data = self.stream.char()if data == ">":self.tokenQueue.append(self.currentToken)self.state = self.dataStateelif data is EOF:# XXX EMITself.stream.unget(data)self.tokenQueue.append(self.currentToken)self.state = self.dataStateelse:passreturn Truedef cdataSectionState(self):data = []while True:data.append(self.stream.charsUntil("]"))data.append(self.stream.charsUntil(">"))char = self.stream.char()if char == EOF:breakelse:assert char == ">"if data[-1][-2:] == "]]":data[-1] = data[-1][:-2]breakelse:data.append(char)data = "".join(data) # pylint:disable=redefined-variable-type# Deal with null here rather than in the parsernullCount = data.count("\u0000")if nullCount > 0:for _ in range(nullCount):self.tokenQueue.append({"type": tokenTypes["ParseError"],"data": "invalid-codepoint"})data = data.replace("\u0000", "\uFFFD")if data:self.tokenQueue.append({"type": tokenTypes["Characters"],"data": data})self.state = self.dataStatereturn True
from __future__ import absolute_import, division, unicode_literalsfrom pip._vendor.six import text_type, binary_typefrom pip._vendor.six.moves import http_client, urllibimport codecsimport refrom pip._vendor import webencodingsfrom .constants import EOF, spaceCharacters, asciiLetters, asciiUppercasefrom .constants import ReparseExceptionfrom . import _utilsfrom io import StringIOtry:from io import BytesIOexcept ImportError:BytesIO = StringIO# Non-unicode versions of constants for use in the pre-parserspaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters])asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters])asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase])spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"])invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]" # noqaif _utils.supports_lone_surrogates:# Use one extra step of indirection and create surrogates with# eval. Not using this indirection would introduce an illegal# unicode literal on platforms not supporting such lone# surrogates.assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] +eval('"\\uD800-\\uDFFF"') + # pylint:disable=eval-used"]")else:invalid_unicode_re = re.compile(invalid_unicode_no_surrogate)non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF,0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE,0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF,0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF,0x10FFFE, 0x10FFFF])ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005B-\u0060\u007B-\u007E]")# Cache for charsUntil()charsUntilRegEx = {}class BufferedStream(object):"""Buffering for streams that do not have buffering of their ownThe buffer is implemented as a list of chunks on the assumption thatjoining many strings will be slow since it is O(n**2)"""def __init__(self, stream):self.stream = streamself.buffer = []self.position = [-1, 0] # chunk number, offsetdef tell(self):pos = 0for chunk in self.buffer[:self.position[0]]:pos += len(chunk)pos += self.position[1]return posdef seek(self, pos):assert pos <= self._bufferedBytes()offset = posi = 0while len(self.buffer[i]) < offset:offset -= len(self.buffer[i])i += 1self.position = [i, offset]def read(self, bytes):if not self.buffer:return self._readStream(bytes)elif (self.position[0] == len(self.buffer) andself.position[1] == len(self.buffer[-1])):return self._readStream(bytes)else:return self._readFromBuffer(bytes)def _bufferedBytes(self):return sum([len(item) for item in self.buffer])def _readStream(self, bytes):data = self.stream.read(bytes)self.buffer.append(data)self.position[0] += 1self.position[1] = len(data)return datadef _readFromBuffer(self, bytes):remainingBytes = bytesrv = []bufferIndex = self.position[0]bufferOffset = self.position[1]while bufferIndex < len(self.buffer) and remainingBytes != 0:assert remainingBytes > 0bufferedData = self.buffer[bufferIndex]if remainingBytes <= len(bufferedData) - bufferOffset:bytesToRead = remainingBytesself.position = [bufferIndex, bufferOffset + bytesToRead]else:bytesToRead = len(bufferedData) - bufferOffsetself.position = [bufferIndex, len(bufferedData)]bufferIndex += 1rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead])remainingBytes -= bytesToReadbufferOffset = 0if remainingBytes:rv.append(self._readStream(remainingBytes))return b"".join(rv)def HTMLInputStream(source, **kwargs):# Work around Python bug #20007: read(0) closes the connection.# http://bugs.python.org/issue20007if (isinstance(source, http_client.HTTPResponse) or# Also check for addinfourl wrapping HTTPResponse(isinstance(source, urllib.response.addbase) andisinstance(source.fp, http_client.HTTPResponse))):isUnicode = Falseelif hasattr(source, "read"):isUnicode = isinstance(source.read(0), text_type)else:isUnicode = isinstance(source, text_type)if isUnicode:encodings = [x for x in kwargs if x.endswith("_encoding")]if encodings:raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings)return HTMLUnicodeInputStream(source, **kwargs)else:return HTMLBinaryInputStream(source, **kwargs)class HTMLUnicodeInputStream(object):"""Provides a unicode stream of characters to the HTMLTokenizer.This class takes care of character encoding and removing or replacingincorrect byte-sequences and also provides column and line tracking."""_defaultChunkSize = 10240def __init__(self, source):"""Initialises the HTMLInputStream.HTMLInputStream(source, [encoding]) -> Normalized stream from sourcefor use by html5lib.source can be either a file-object, local filename or a string.The optional encoding parameter must be a string that indicatesthe encoding. If specified, that encoding will be used,regardless of any BOM or later declaration (such as in a metaelement)"""if not _utils.supports_lone_surrogates:# Such platforms will have already checked for such# surrogate errors, so no need to do this checking.self.reportCharacterErrors = Noneelif len("\U0010FFFF") == 1:self.reportCharacterErrors = self.characterErrorsUCS4else:self.reportCharacterErrors = self.characterErrorsUCS2# List of where new lines occurself.newLines = [0]self.charEncoding = (lookupEncoding("utf-8"), "certain")self.dataStream = self.openStream(source)self.reset()def reset(self):self.chunk = ""self.chunkSize = 0self.chunkOffset = 0self.errors = []# number of (complete) lines in previous chunksself.prevNumLines = 0# number of columns in the last line of the previous chunkself.prevNumCols = 0# Deal with CR LF and surrogates split over chunk boundariesself._bufferedCharacter = Nonedef openStream(self, source):"""Produces a file object from source.source can be either a file object, local filename or a string."""# Already a file objectif hasattr(source, 'read'):stream = sourceelse:stream = StringIO(source)return streamdef _position(self, offset):chunk = self.chunknLines = chunk.count('\n', 0, offset)positionLine = self.prevNumLines + nLineslastLinePos = chunk.rfind('\n', 0, offset)if lastLinePos == -1:positionColumn = self.prevNumCols + offsetelse:positionColumn = offset - (lastLinePos + 1)return (positionLine, positionColumn)def position(self):"""Returns (line, col) of the current position in the stream."""line, col = self._position(self.chunkOffset)return (line + 1, col)def char(self):""" Read one character from the stream or queue if available. ReturnEOF when EOF is reached."""# Read a new chunk from the input stream if necessaryif self.chunkOffset >= self.chunkSize:if not self.readChunk():return EOFchunkOffset = self.chunkOffsetchar = self.chunk[chunkOffset]self.chunkOffset = chunkOffset + 1return chardef readChunk(self, chunkSize=None):if chunkSize is None:chunkSize = self._defaultChunkSizeself.prevNumLines, self.prevNumCols = self._position(self.chunkSize)self.chunk = ""self.chunkSize = 0self.chunkOffset = 0data = self.dataStream.read(chunkSize)# Deal with CR LF and surrogates broken across chunksif self._bufferedCharacter:data = self._bufferedCharacter + dataself._bufferedCharacter = Noneelif not data:# We have no more data, bye-bye streamreturn Falseif len(data) > 1:lastv = ord(data[-1])if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF:self._bufferedCharacter = data[-1]data = data[:-1]if self.reportCharacterErrors:self.reportCharacterErrors(data)# Replace invalid charactersdata = data.replace("\r\n", "\n")data = data.replace("\r", "\n")self.chunk = dataself.chunkSize = len(data)return Truedef characterErrorsUCS4(self, data):for _ in range(len(invalid_unicode_re.findall(data))):self.errors.append("invalid-codepoint")def characterErrorsUCS2(self, data):# Someone picked the wrong compile option# You loseskip = Falsefor match in invalid_unicode_re.finditer(data):if skip:continuecodepoint = ord(match.group())pos = match.start()# Pretty sure there should be endianness issues hereif _utils.isSurrogatePair(data[pos:pos + 2]):# We have a surrogate pair!char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2])if char_val in non_bmp_invalid_codepoints:self.errors.append("invalid-codepoint")skip = Trueelif (codepoint >= 0xD800 and codepoint <= 0xDFFF andpos == len(data) - 1):self.errors.append("invalid-codepoint")else:skip = Falseself.errors.append("invalid-codepoint")def charsUntil(self, characters, opposite=False):""" Returns a string of characters from the stream up to but notincluding any character in 'characters' or EOF. 'characters' must bea container that supports the 'in' method and iteration over itscharacters."""# Use a cache of regexps to find the required characterstry:chars = charsUntilRegEx[(characters, opposite)]except KeyError:if __debug__:for c in characters:assert(ord(c) < 128)regex = "".join(["\\x%02x" % ord(c) for c in characters])if not opposite:regex = "^%s" % regexchars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex)rv = []while True:# Find the longest matching prefixm = chars.match(self.chunk, self.chunkOffset)if m is None:# If nothing matched, and it wasn't because we ran out of chunk,# then stopif self.chunkOffset != self.chunkSize:breakelse:end = m.end()# If not the whole chunk matched, return everything# up to the part that didn't matchif end != self.chunkSize:rv.append(self.chunk[self.chunkOffset:end])self.chunkOffset = endbreak# If the whole remainder of the chunk matched,# use it all and read the next chunkrv.append(self.chunk[self.chunkOffset:])if not self.readChunk():# Reached EOFbreakr = "".join(rv)return rdef unget(self, char):# Only one character is allowed to be ungotten at once - it must# be consumed again before any further call to ungetif char is not None:if self.chunkOffset == 0:# unget is called quite rarely, so it's a good idea to do# more work here if it saves a bit of work in the frequently# called char and charsUntil.# So, just prepend the ungotten character onto the current# chunk:self.chunk = char + self.chunkself.chunkSize += 1else:self.chunkOffset -= 1assert self.chunk[self.chunkOffset] == charclass HTMLBinaryInputStream(HTMLUnicodeInputStream):"""Provides a unicode stream of characters to the HTMLTokenizer.This class takes care of character encoding and removing or replacingincorrect byte-sequences and also provides column and line tracking."""def __init__(self, source, override_encoding=None, transport_encoding=None,same_origin_parent_encoding=None, likely_encoding=None,default_encoding="windows-1252", useChardet=True):"""Initialises the HTMLInputStream.HTMLInputStream(source, [encoding]) -> Normalized stream from sourcefor use by html5lib.source can be either a file-object, local filename or a string.The optional encoding parameter must be a string that indicatesthe encoding. If specified, that encoding will be used,regardless of any BOM or later declaration (such as in a metaelement)"""# Raw Stream - for unicode objects this will encode to utf-8 and set# self.charEncoding as appropriateself.rawStream = self.openStream(source)HTMLUnicodeInputStream.__init__(self, self.rawStream)# Encoding Information# Number of bytes to use when looking for a meta element with# encoding informationself.numBytesMeta = 1024# Number of bytes to use when using detecting encoding using chardetself.numBytesChardet = 100# Things from argsself.override_encoding = override_encodingself.transport_encoding = transport_encodingself.same_origin_parent_encoding = same_origin_parent_encodingself.likely_encoding = likely_encodingself.default_encoding = default_encoding# Determine encodingself.charEncoding = self.determineEncoding(useChardet)assert self.charEncoding[0] is not None# Call superclassself.reset()def reset(self):self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace')HTMLUnicodeInputStream.reset(self)def openStream(self, source):"""Produces a file object from source.source can be either a file object, local filename or a string."""# Already a file objectif hasattr(source, 'read'):stream = sourceelse:stream = BytesIO(source)try:stream.seek(stream.tell())except: # pylint:disable=bare-exceptstream = BufferedStream(stream)return streamdef determineEncoding(self, chardet=True):# BOMs take precedence over everything# This will also read past the BOM if presentcharEncoding = self.detectBOM(), "certain"if charEncoding[0] is not None:return charEncoding# If we've been overriden, we've been overridencharEncoding = lookupEncoding(self.override_encoding), "certain"if charEncoding[0] is not None:return charEncoding# Now check the transport layercharEncoding = lookupEncoding(self.transport_encoding), "certain"if charEncoding[0] is not None:return charEncoding# Look for meta elements with encoding informationcharEncoding = self.detectEncodingMeta(), "tentative"if charEncoding[0] is not None:return charEncoding# Parent document encodingcharEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative"if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"):return charEncoding# "likely" encodingcharEncoding = lookupEncoding(self.likely_encoding), "tentative"if charEncoding[0] is not None:return charEncoding# Guess with chardet, if availableif chardet:try:from chardet.universaldetector import UniversalDetectorexcept ImportError:passelse:buffers = []detector = UniversalDetector()while not detector.done:buffer = self.rawStream.read(self.numBytesChardet)assert isinstance(buffer, bytes)if not buffer:breakbuffers.append(buffer)detector.feed(buffer)detector.close()encoding = lookupEncoding(detector.result['encoding'])self.rawStream.seek(0)if encoding is not None:return encoding, "tentative"# Try the default encodingcharEncoding = lookupEncoding(self.default_encoding), "tentative"if charEncoding[0] is not None:return charEncoding# Fallback to html5lib's default if even that hasn't workedreturn lookupEncoding("windows-1252"), "tentative"def changeEncoding(self, newEncoding):assert self.charEncoding[1] != "certain"newEncoding = lookupEncoding(newEncoding)if newEncoding is None:returnif newEncoding.name in ("utf-16be", "utf-16le"):newEncoding = lookupEncoding("utf-8")assert newEncoding is not Noneelif newEncoding == self.charEncoding[0]:self.charEncoding = (self.charEncoding[0], "certain")else:self.rawStream.seek(0)self.charEncoding = (newEncoding, "certain")self.reset()raise ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding))def detectBOM(self):"""Attempts to detect at BOM at the start of the stream. Ifan encoding can be determined from the BOM return the name of theencoding otherwise return None"""bomDict = {codecs.BOM_UTF8: 'utf-8',codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be',codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be'}# Go to beginning of file and read in 4 bytesstring = self.rawStream.read(4)assert isinstance(string, bytes)# Try detecting the BOM using bytes from the stringencoding = bomDict.get(string[:3]) # UTF-8seek = 3if not encoding:# Need to detect UTF-32 before UTF-16encoding = bomDict.get(string) # UTF-32seek = 4if not encoding:encoding = bomDict.get(string[:2]) # UTF-16seek = 2# Set the read position past the BOM if one was found, otherwise# set it to the start of the streamif encoding:self.rawStream.seek(seek)return lookupEncoding(encoding)else:self.rawStream.seek(0)return Nonedef detectEncodingMeta(self):"""Report the encoding declared by the meta element"""buffer = self.rawStream.read(self.numBytesMeta)assert isinstance(buffer, bytes)parser = EncodingParser(buffer)self.rawStream.seek(0)encoding = parser.getEncoding()if encoding is not None and encoding.name in ("utf-16be", "utf-16le"):encoding = lookupEncoding("utf-8")return encodingclass EncodingBytes(bytes):"""String-like object with an associated position and various extra methodsIf the position is ever greater than the string length then an exception israised"""def __new__(self, value):assert isinstance(value, bytes)return bytes.__new__(self, value.lower())def __init__(self, value):# pylint:disable=unused-argumentself._position = -1def __iter__(self):return selfdef __next__(self):p = self._position = self._position + 1if p >= len(self):raise StopIterationelif p < 0:raise TypeErrorreturn self[p:p + 1]def next(self):# Py2 compatreturn self.__next__()def previous(self):p = self._positionif p >= len(self):raise StopIterationelif p < 0:raise TypeErrorself._position = p = p - 1return self[p:p + 1]def setPosition(self, position):if self._position >= len(self):raise StopIterationself._position = positiondef getPosition(self):if self._position >= len(self):raise StopIterationif self._position >= 0:return self._positionelse:return Noneposition = property(getPosition, setPosition)def getCurrentByte(self):return self[self.position:self.position + 1]currentByte = property(getCurrentByte)def skip(self, chars=spaceCharactersBytes):"""Skip past a list of characters"""p = self.position # use property for the error-checkingwhile p < len(self):c = self[p:p + 1]if c not in chars:self._position = preturn cp += 1self._position = preturn Nonedef skipUntil(self, chars):p = self.positionwhile p < len(self):c = self[p:p + 1]if c in chars:self._position = preturn cp += 1self._position = preturn Nonedef matchBytes(self, bytes):"""Look for a sequence of bytes at the start of a string. If the bytesare found return True and advance the position to the byte after thematch. Otherwise return False and leave the position alone"""p = self.positiondata = self[p:p + len(bytes)]rv = data.startswith(bytes)if rv:self.position += len(bytes)return rvdef jumpTo(self, bytes):"""Look for the next sequence of bytes matching a given sequence. Ifa match is found advance the position to the last byte of the match"""newPosition = self[self.position:].find(bytes)if newPosition > -1:# XXX: This is ugly, but I can't see a nicer way to fix this.if self._position == -1:self._position = 0self._position += (newPosition + len(bytes) - 1)return Trueelse:raise StopIterationclass EncodingParser(object):"""Mini parser for detecting character encoding from meta elements"""def __init__(self, data):"""string - the data to work on for encoding detection"""self.data = EncodingBytes(data)self.encoding = Nonedef getEncoding(self):methodDispatch = ((b"<!--", self.handleComment),(b"<meta", self.handleMeta),(b"</", self.handlePossibleEndTag),(b"<!", self.handleOther),(b"<?", self.handleOther),(b"<", self.handlePossibleStartTag))for _ in self.data:keepParsing = Truefor key, method in methodDispatch:if self.data.matchBytes(key):try:keepParsing = method()breakexcept StopIteration:keepParsing = Falsebreakif not keepParsing:breakreturn self.encodingdef handleComment(self):"""Skip over comments"""return self.data.jumpTo(b"-->")def handleMeta(self):if self.data.currentByte not in spaceCharactersBytes:# if we have <meta not followed by a space so just keep goingreturn True# We have a valid meta element we want to search for attributeshasPragma = FalsependingEncoding = Nonewhile True:# Try to find the next attribute after the current positionattr = self.getAttribute()if attr is None:return Trueelse:if attr[0] == b"http-equiv":hasPragma = attr[1] == b"content-type"if hasPragma and pendingEncoding is not None:self.encoding = pendingEncodingreturn Falseelif attr[0] == b"charset":tentativeEncoding = attr[1]codec = lookupEncoding(tentativeEncoding)if codec is not None:self.encoding = codecreturn Falseelif attr[0] == b"content":contentParser = ContentAttrParser(EncodingBytes(attr[1]))tentativeEncoding = contentParser.parse()if tentativeEncoding is not None:codec = lookupEncoding(tentativeEncoding)if codec is not None:if hasPragma:self.encoding = codecreturn Falseelse:pendingEncoding = codecdef handlePossibleStartTag(self):return self.handlePossibleTag(False)def handlePossibleEndTag(self):next(self.data)return self.handlePossibleTag(True)def handlePossibleTag(self, endTag):data = self.dataif data.currentByte not in asciiLettersBytes:# If the next byte is not an ascii letter either ignore this# fragment (possible start tag case) or treat it according to# handleOtherif endTag:data.previous()self.handleOther()return Truec = data.skipUntil(spacesAngleBrackets)if c == b"<":# return to the first step in the overall "two step" algorithm# reprocessing the < bytedata.previous()else:# Read all attributesattr = self.getAttribute()while attr is not None:attr = self.getAttribute()return Truedef handleOther(self):return self.data.jumpTo(b">")def getAttribute(self):"""Return a name,value pair for the next attribute in the stream,if one is found, or None"""data = self.data# Step 1 (skip chars)c = data.skip(spaceCharactersBytes | frozenset([b"/"]))assert c is None or len(c) == 1# Step 2if c in (b">", None):return None# Step 3attrName = []attrValue = []# Step 4 attribute namewhile True:if c == b"=" and attrName:breakelif c in spaceCharactersBytes:# Step 6!c = data.skip()breakelif c in (b"/", b">"):return b"".join(attrName), b""elif c in asciiUppercaseBytes:attrName.append(c.lower())elif c is None:return Noneelse:attrName.append(c)# Step 5c = next(data)# Step 7if c != b"=":data.previous()return b"".join(attrName), b""# Step 8next(data)# Step 9c = data.skip()# Step 10if c in (b"'", b'"'):# 10.1quoteChar = cwhile True:# 10.2c = next(data)# 10.3if c == quoteChar:next(data)return b"".join(attrName), b"".join(attrValue)# 10.4elif c in asciiUppercaseBytes:attrValue.append(c.lower())# 10.5else:attrValue.append(c)elif c == b">":return b"".join(attrName), b""elif c in asciiUppercaseBytes:attrValue.append(c.lower())elif c is None:return Noneelse:attrValue.append(c)# Step 11while True:c = next(data)if c in spacesAngleBrackets:return b"".join(attrName), b"".join(attrValue)elif c in asciiUppercaseBytes:attrValue.append(c.lower())elif c is None:return Noneelse:attrValue.append(c)class ContentAttrParser(object):def __init__(self, data):assert isinstance(data, bytes)self.data = datadef parse(self):try:# Check if the attr name is charset# otherwise returnself.data.jumpTo(b"charset")self.data.position += 1self.data.skip()if not self.data.currentByte == b"=":# If there is no = sign keep looking for attrsreturn Noneself.data.position += 1self.data.skip()# Look for an encoding between matching quote marksif self.data.currentByte in (b'"', b"'"):quoteMark = self.data.currentByteself.data.position += 1oldPosition = self.data.positionif self.data.jumpTo(quoteMark):return self.data[oldPosition:self.data.position]else:return Noneelse:# Unquoted valueoldPosition = self.data.positiontry:self.data.skipUntil(spaceCharactersBytes)return self.data[oldPosition:self.data.position]except StopIteration:# Return the whole remaining valuereturn self.data[oldPosition:]except StopIteration:return Nonedef lookupEncoding(encoding):"""Return the python codec name corresponding to an encoding or None if thestring doesn't correspond to a valid encoding."""if isinstance(encoding, binary_type):try:encoding = encoding.decode("ascii")except UnicodeDecodeError:return Noneif encoding is not None:try:return webencodings.lookup(encoding)except AttributeError:return Noneelse:return None
from __future__ import absolute_import, division, unicode_literalsimport reimport warningsfrom .constants import DataLossWarningbaseChar = """[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] |[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] |[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] |[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 |[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] |[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] |[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] |[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] |[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 |[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] |[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] |[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D |[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] |[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] |[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] |[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] |[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] |[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] |[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 |[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] |[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] |[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] |[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] |[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] |[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] |[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] |[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] |[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] |[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] |[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A |#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 |#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] |#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] |[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] |[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C |#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 |[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] |[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] |[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 |[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] |[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B |#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE |[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] |[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 |[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] |[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]"""ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]"""combiningCharacter = """[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] |[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 |[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] |[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] |#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] |[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] |[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 |#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] |[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC |[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] |#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] |[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] |[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] |[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] |[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] |[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] |#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 |[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] |#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] |[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] |[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] |#x3099 | #x309A"""digit = """[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] |[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] |[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] |[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]"""extender = """#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 |#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]"""letter = " | ".join([baseChar, ideographic])# Without thename = " | ".join([letter, digit, ".", "-", "_", combiningCharacter,extender])nameFirst = " | ".join([letter, "_"])reChar = re.compile(r"#x([\d|A-F]{4,4})")reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]")def charStringToList(chars):charRanges = [item.strip() for item in chars.split(" | ")]rv = []for item in charRanges:foundMatch = Falsefor regexp in (reChar, reCharRange):match = regexp.match(item)if match is not None:rv.append([hexToInt(item) for item in match.groups()])if len(rv[-1]) == 1:rv[-1] = rv[-1] * 2foundMatch = Truebreakif not foundMatch:assert len(item) == 1rv.append([ord(item)] * 2)rv = normaliseCharList(rv)return rvdef normaliseCharList(charList):charList = sorted(charList)for item in charList:assert item[1] >= item[0]rv = []i = 0while i < len(charList):j = 1rv.append(charList[i])while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1:rv[-1][1] = charList[i + j][1]j += 1i += jreturn rv# We don't really support characters above the BMP :(max_unicode = int("FFFF", 16)def missingRanges(charList):rv = []if charList[0] != 0:rv.append([0, charList[0][0] - 1])for i, item in enumerate(charList[:-1]):rv.append([item[1] + 1, charList[i + 1][0] - 1])if charList[-1][1] != max_unicode:rv.append([charList[-1][1] + 1, max_unicode])return rvdef listToRegexpStr(charList):rv = []for item in charList:if item[0] == item[1]:rv.append(escapeRegexp(chr(item[0])))else:rv.append(escapeRegexp(chr(item[0])) + "-" +escapeRegexp(chr(item[1])))return "[%s]" % "".join(rv)def hexToInt(hex_str):return int(hex_str, 16)def escapeRegexp(string):specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}","[", "]", "|", "(", ")", "-")for char in specialCharacters:string = string.replace(char, "\\" + char)return string# output from the abovenonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqanonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa# Simpler thingsnonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\-\'()+,./:=?;!*#@$_%]")class InfosetFilter(object):replacementRegexp = re.compile(r"U[\dA-F]{5,5}")def __init__(self,dropXmlnsLocalName=False,dropXmlnsAttrNs=False,preventDoubleDashComments=False,preventDashAtCommentEnd=False,replaceFormFeedCharacters=True,preventSingleQuotePubid=False):self.dropXmlnsLocalName = dropXmlnsLocalNameself.dropXmlnsAttrNs = dropXmlnsAttrNsself.preventDoubleDashComments = preventDoubleDashCommentsself.preventDashAtCommentEnd = preventDashAtCommentEndself.replaceFormFeedCharacters = replaceFormFeedCharactersself.preventSingleQuotePubid = preventSingleQuotePubidself.replaceCache = {}def coerceAttribute(self, name, namespace=None):if self.dropXmlnsLocalName and name.startswith("xmlns:"):warnings.warn("Attributes cannot begin with xmlns", DataLossWarning)return Noneelif (self.dropXmlnsAttrNs andnamespace == "http://www.w3.org/2000/xmlns/"):warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning)return Noneelse:return self.toXmlName(name)def coerceElement(self, name):return self.toXmlName(name)def coerceComment(self, data):if self.preventDoubleDashComments:while "--" in data:warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning)data = data.replace("--", "- -")if data.endswith("-"):warnings.warn("Comments cannot end in a dash", DataLossWarning)data += " "return datadef coerceCharacters(self, data):if self.replaceFormFeedCharacters:for _ in range(data.count("\x0C")):warnings.warn("Text cannot contain U+000C", DataLossWarning)data = data.replace("\x0C", " ")# Other non-xml charactersreturn datadef coercePubid(self, data):dataOutput = datafor char in nonPubidCharRegexp.findall(data):warnings.warn("Coercing non-XML pubid", DataLossWarning)replacement = self.getReplacementCharacter(char)dataOutput = dataOutput.replace(char, replacement)if self.preventSingleQuotePubid and dataOutput.find("'") >= 0:warnings.warn("Pubid cannot contain single quote", DataLossWarning)dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'"))return dataOutputdef toXmlName(self, name):nameFirst = name[0]nameRest = name[1:]m = nonXmlNameFirstBMPRegexp.match(nameFirst)if m:warnings.warn("Coercing non-XML name", DataLossWarning)nameFirstOutput = self.getReplacementCharacter(nameFirst)else:nameFirstOutput = nameFirstnameRestOutput = nameRestreplaceChars = set(nonXmlNameBMPRegexp.findall(nameRest))for char in replaceChars:warnings.warn("Coercing non-XML name", DataLossWarning)replacement = self.getReplacementCharacter(char)nameRestOutput = nameRestOutput.replace(char, replacement)return nameFirstOutput + nameRestOutputdef getReplacementCharacter(self, char):if char in self.replaceCache:replacement = self.replaceCache[char]else:replacement = self.escapeChar(char)return replacementdef fromXmlName(self, name):for item in set(self.replacementRegexp.findall(name)):name = name.replace(item, self.unescapeChar(item))return namedef escapeChar(self, char):replacement = "U%05X" % ord(char)self.replaceCache[char] = replacementreturn replacementdef unescapeChar(self, charcode):return chr(int(charcode[1:], 16))
"""HTML parsing library based on the WHATWG "HTML5"specification. The parser is designed to be compatible with existingHTML found in the wild and implements well-defined error recovery thatis largely compatible with modern desktop web browsers.Example usage:import html5libf = open("my_document.html")tree = html5lib.parse(f)"""from __future__ import absolute_import, division, unicode_literalsfrom .html5parser import HTMLParser, parse, parseFragmentfrom .treebuilders import getTreeBuilderfrom .treewalkers import getTreeWalkerfrom .serializer import serialize__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder","getTreeWalker", "serialize"]# this has to be at the top level, see how setup.py parses this__version__ = "1.0b10"
# Copyright 2015,2016 Nir Cohen## Licensed under the Apache License, Version 2.0 (the "License");# you may not use this file except in compliance with the License.# You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License."""The ``distro`` package (``distro`` stands for Linux Distribution) providesinformation about the Linux distribution it runs on, such as a reliablemachine-readable distro ID, or version information.It is a renewed alternative implementation for Python's original:py:func:`platform.linux_distribution` function, but it provides much morefunctionality. An alternative implementation became necessary because Python3.5 deprecated this function, and Python 3.7 is expected to remove italtogether. Its predecessor function :py:func:`platform.dist` was alreadydeprecated since Python 2.6 and is also expected to be removed in Python 3.7.Still, there are many cases in which access to Linux distribution informationis needed. See `Python issue 1322 <https://bugs.python.org/issue1322>`_ formore information."""import osimport reimport sysimport jsonimport shleximport loggingimport subprocessif not sys.platform.startswith('linux'):raise ImportError('Unsupported platform: {0}'.format(sys.platform))_UNIXCONFDIR = '/etc'_OS_RELEASE_BASENAME = 'os-release'#: Translation table for normalizing the "ID" attribute defined in os-release#: files, for use by the :func:`distro.id` method.#:#: * Key: Value as defined in the os-release file, translated to lower case,#: with blanks translated to underscores.#:#: * Value: Normalized value.NORMALIZED_OS_ID = {}#: Translation table for normalizing the "Distributor ID" attribute returned by#: the lsb_release command, for use by the :func:`distro.id` method.#:#: * Key: Value as returned by the lsb_release command, translated to lower#: case, with blanks translated to underscores.#:#: * Value: Normalized value.NORMALIZED_LSB_ID = {'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux'redhatenterpriseworkstation': 'rhel', # RHEL 6.7}#: Translation table for normalizing the distro ID derived from the file name#: of distro release files, for use by the :func:`distro.id` method.#:#: * Key: Value as derived from the file name of a distro release file,#: translated to lower case, with blanks translated to underscores.#:#: * Value: Normalized value.NORMALIZED_DISTRO_ID = {'redhat': 'rhel', # RHEL 6.x, 7.x}# Pattern for content of distro release file (reversed)_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(r'(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)')# Pattern for base file name of distro release file_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r'(\w+)[-_](release|version)$')# Base file names to be ignored when searching for distro release file_DISTRO_RELEASE_IGNORE_BASENAMES = ('debian_version','lsb-release','oem-release',_OS_RELEASE_BASENAME,'system-release')def linux_distribution(full_distribution_name=True):"""Return information about the current Linux distribution as a tuple``(id_name, version, codename)`` with items as follows:* ``id_name``: If *full_distribution_name* is false, the result of:func:`distro.id`. Otherwise, the result of :func:`distro.name`.* ``version``: The result of :func:`distro.version`.* ``codename``: The result of :func:`distro.codename`.The interface of this function is compatible with the original:py:func:`platform.linux_distribution` function, supporting a subset ofits parameters.The data it returns may not exactly be the same, because it uses more datasources than the original function, and that may lead to different data ifthe Linux distribution is not consistent across multiple data sources itprovides (there are indeed such distributions ...).Another reason for differences is the fact that the :func:`distro.id`method normalizes the distro ID string to a reliable machine-readable valuefor a number of popular Linux distributions."""return _distro.linux_distribution(full_distribution_name)def id():"""Return the distro ID of the current Linux distribution, as amachine-readable string.For a number of Linux distributions, the returned distro ID value is*reliable*, in the sense that it is documented and that it does not changeacross releases of the distribution.This package maintains the following reliable distro ID values:============== =========================================Distro ID Distribution============== ========================================="ubuntu" Ubuntu"debian" Debian"rhel" RedHat Enterprise Linux"centos" CentOS"fedora" Fedora"sles" SUSE Linux Enterprise Server"opensuse" openSUSE"amazon" Amazon Linux"arch" Arch Linux"cloudlinux" CloudLinux OS"exherbo" Exherbo Linux"gentoo" GenToo Linux"ibm_powerkvm" IBM PowerKVM"kvmibm" KVM for IBM z Systems"linuxmint" Linux Mint"mageia" Mageia"mandriva" Mandriva Linux"parallels" Parallels"pidora" Pidora"raspbian" Raspbian"oracle" Oracle Linux (and Oracle Enterprise Linux)"scientific" Scientific Linux"slackware" Slackware"xenserver" XenServer============== =========================================If you have a need to get distros for reliable IDs added into this set,or if you find that the :func:`distro.id` function returns a differentdistro ID for one of the listed distros, please create an issue in the`distro issue tracker`_.**Lookup hierarchy and transformations:**First, the ID is obtained from the following sources, in the specifiedorder. The first available and non-empty value is used:* the value of the "ID" attribute of the os-release file,* the value of the "Distributor ID" attribute returned by the lsb_releasecommand,* the first part of the file name of the distro release file,The so determined ID value then passes the following transformations,before it is returned by this method:* it is translated to lower case,* blanks (which should not be there anyway) are translated to underscores,* a normalization of the ID is performed, based upon`normalization tables`_. The purpose of this normalization is to ensurethat the ID is as reliable as possible, even across incompatible changesin the Linux distributions. A common reason for an incompatible change isthe addition of an os-release file, or the addition of the lsb_releasecommand, with ID values that differ from what was previously determinedfrom the distro release file name."""return _distro.id()def name(pretty=False):"""Return the name of the current Linux distribution, as a human-readablestring.If *pretty* is false, the name is returned without version or codename.(e.g. "CentOS Linux")If *pretty* is true, the version and codename are appended.(e.g. "CentOS Linux 7.1.1503 (Core)")**Lookup hierarchy:**The name is obtained from the following sources, in the specified order.The first available and non-empty value is used:* If *pretty* is false:- the value of the "NAME" attribute of the os-release file,- the value of the "Distributor ID" attribute returned by the lsb_releasecommand,- the value of the "<name>" field of the distro release file.* If *pretty* is true:- the value of the "PRETTY_NAME" attribute of the os-release file,- the value of the "Description" attribute returned by the lsb_releasecommand,- the value of the "<name>" field of the distro release file, appendedwith the value of the pretty version ("<version_id>" and "<codename>"fields) of the distro release file, if available."""return _distro.name(pretty)def version(pretty=False, best=False):"""Return the version of the current Linux distribution, as a human-readablestring.If *pretty* is false, the version is returned without codename (e.g."7.0").If *pretty* is true, the codename in parenthesis is appended, if thecodename is non-empty (e.g. "7.0 (Maipo)").Some distributions provide version numbers with different precisions inthe different sources of distribution information. Examining the differentsources in a fixed priority order does not always yield the most preciseversion (e.g. for Debian 8.2, or CentOS 7.1).The *best* parameter can be used to control the approach for the returnedversion:If *best* is false, the first non-empty version number in priority order ofthe examined sources is returned.If *best* is true, the most precise version number out of all examinedsources is returned.**Lookup hierarchy:**In all cases, the version number is obtained from the following sources.If *best* is false, this order represents the priority order:* the value of the "VERSION_ID" attribute of the os-release file,* the value of the "Release" attribute returned by the lsb_releasecommand,* the version number parsed from the "<version_id>" field of the first lineof the distro release file,* the version number parsed from the "PRETTY_NAME" attribute of theos-release file, if it follows the format of the distro release files.* the version number parsed from the "Description" attribute returned bythe lsb_release command, if it follows the format of the distro releasefiles."""return _distro.version(pretty, best)def version_parts(best=False):"""Return the version of the current Linux distribution as a tuple``(major, minor, build_number)`` with items as follows:* ``major``: The result of :func:`distro.major_version`.* ``minor``: The result of :func:`distro.minor_version`.* ``build_number``: The result of :func:`distro.build_number`.For a description of the *best* parameter, see the :func:`distro.version`method."""return _distro.version_parts(best)def major_version(best=False):"""Return the major version of the current Linux distribution, as a string,if provided.Otherwise, the empty string is returned. The major version is the firstpart of the dot-separated version string.For a description of the *best* parameter, see the :func:`distro.version`method."""return _distro.major_version(best)def minor_version(best=False):"""Return the minor version of the current Linux distribution, as a string,if provided.Otherwise, the empty string is returned. The minor version is the secondpart of the dot-separated version string.For a description of the *best* parameter, see the :func:`distro.version`method."""return _distro.minor_version(best)def build_number(best=False):"""Return the build number of the current Linux distribution, as a string,if provided.Otherwise, the empty string is returned. The build number is the third partof the dot-separated version string.For a description of the *best* parameter, see the :func:`distro.version`method."""return _distro.build_number(best)def like():"""Return a space-separated list of distro IDs of distributions that areclosely related to the current Linux distribution in regards to packagingand programming interfaces, for example distributions the currentdistribution is a derivative from.**Lookup hierarchy:**This information item is only provided by the os-release file.For details, see the description of the "ID_LIKE" attribute in the`os-release man page<http://www.freedesktop.org/software/systemd/man/os-release.html>`_."""return _distro.like()def codename():"""Return the codename for the release of the current Linux distribution,as a string.If the distribution does not have a codename, an empty string is returned.Note that the returned codename is not always really a codename. Forexample, openSUSE returns "x86_64". This function does not handle suchcases in any special way and just returns the string it finds, if any.**Lookup hierarchy:*** the codename within the "VERSION" attribute of the os-release file, ifprovided,* the value of the "Codename" attribute returned by the lsb_releasecommand,* the value of the "<codename>" field of the distro release file."""return _distro.codename()def info(pretty=False, best=False):"""Return certain machine-readable information items about the current Linuxdistribution in a dictionary, as shown in the following example:.. sourcecode:: python{'id': 'rhel','version': '7.0','version_parts': {'major': '7','minor': '0','build_number': ''},'like': 'fedora','codename': 'Maipo'}The dictionary structure and keys are always the same, regardless of whichinformation items are available in the underlying data sources. The valuesfor the various keys are as follows:* ``id``: The result of :func:`distro.id`.* ``version``: The result of :func:`distro.version`.* ``version_parts -> major``: The result of :func:`distro.major_version`.* ``version_parts -> minor``: The result of :func:`distro.minor_version`.* ``version_parts -> build_number``: The result of:func:`distro.build_number`.* ``like``: The result of :func:`distro.like`.* ``codename``: The result of :func:`distro.codename`.For a description of the *pretty* and *best* parameters, see the:func:`distro.version` method."""return _distro.info(pretty, best)def os_release_info():"""Return a dictionary containing key-value pairs for the information itemsfrom the os-release file data source of the current Linux distribution.See `os-release file`_ for details about these information items."""return _distro.os_release_info()def lsb_release_info():"""Return a dictionary containing key-value pairs for the information itemsfrom the lsb_release command data source of the current Linux distribution.See `lsb_release command output`_ for details about these informationitems."""return _distro.lsb_release_info()def distro_release_info():"""Return a dictionary containing key-value pairs for the information itemsfrom the distro release file data source of the current Linux distribution.See `distro release file`_ for details about these information items."""return _distro.distro_release_info()def os_release_attr(attribute):"""Return a single named information item from the os-release file data sourceof the current Linux distribution.Parameters:* ``attribute`` (string): Key of the information item.Returns:* (string): Value of the information item, if the item exists.The empty string, if the item does not exist.See `os-release file`_ for details about these information items."""return _distro.os_release_attr(attribute)def lsb_release_attr(attribute):"""Return a single named information item from the lsb_release command outputdata source of the current Linux distribution.Parameters:* ``attribute`` (string): Key of the information item.Returns:* (string): Value of the information item, if the item exists.The empty string, if the item does not exist.See `lsb_release command output`_ for details about these informationitems."""return _distro.lsb_release_attr(attribute)def distro_release_attr(attribute):"""Return a single named information item from the distro release filedata source of the current Linux distribution.Parameters:* ``attribute`` (string): Key of the information item.Returns:* (string): Value of the information item, if the item exists.The empty string, if the item does not exist.See `distro release file`_ for details about these information items."""return _distro.distro_release_attr(attribute)class LinuxDistribution(object):"""Provides information about a Linux distribution.This package creates a private module-global instance of this class withdefault initialization arguments, that is used by the`consolidated accessor functions`_ and `single source accessor functions`_.By using default initialization arguments, that module-global instancereturns data about the current Linux distribution (i.e. the distro thispackage runs on).Normally, it is not necessary to create additional instances of this class.However, in situations where control is needed over the exact data sourcesthat are used, instances of this class can be created with a specificdistro release file, or a specific os-release file, or without invoking thelsb_release command."""def __init__(self,include_lsb=True,os_release_file='',distro_release_file=''):"""The initialization method of this class gathers information from theavailable data sources, and stores that in private instance attributes.Subsequent access to the information items uses these private instanceattributes, so that the data sources are read only once.Parameters:* ``include_lsb`` (bool): Controls whether the`lsb_release command output`_ is included as a data source.If the lsb_release command is not available in the program executionpath, the data source for the lsb_release command will be empty.* ``os_release_file`` (string): The path name of the`os-release file`_ that is to be used as a data source.An empty string (the default) will cause the default path name tobe used (see `os-release file`_ for details).If the specified or defaulted os-release file does not exist, thedata source for the os-release file will be empty.* ``distro_release_file`` (string): The path name of the`distro release file`_ that is to be used as a data source.An empty string (the default) will cause a default search algorithmto be used (see `distro release file`_ for details).If the specified distro release file does not exist, or if no defaultdistro release file can be found, the data source for the distrorelease file will be empty.Public instance attributes:* ``os_release_file`` (string): The path name of the`os-release file`_ that is actually used as a data source. Theempty string if no distro release file is used as a data source.* ``distro_release_file`` (string): The path name of the`distro release file`_ that is actually used as a data source. Theempty string if no distro release file is used as a data source.Raises:* :py:exc:`IOError`: Some I/O issue with an os-release file or distrorelease file.* :py:exc:`subprocess.CalledProcessError`: The lsb_release command hadsome issue (other than not being available in the program executionpath).* :py:exc:`UnicodeError`: A data source has unexpected characters oruses an unexpected encoding."""self.os_release_file = os_release_file or \os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME)self.distro_release_file = distro_release_file or '' # updated laterself._os_release_info = self._get_os_release_info()self._lsb_release_info = self._get_lsb_release_info() \if include_lsb else {}self._distro_release_info = self._get_distro_release_info()def __repr__(self):"""Return repr of all info"""return \"LinuxDistribution(" \"os_release_file={0!r}, " \"distro_release_file={1!r}, " \"_os_release_info={2!r}, " \"_lsb_release_info={3!r}, " \"_distro_release_info={4!r})".format(self.os_release_file,self.distro_release_file,self._os_release_info,self._lsb_release_info,self._distro_release_info)def linux_distribution(self, full_distribution_name=True):"""Return information about the Linux distribution that is compatiblewith Python's :func:`platform.linux_distribution`, supporting a subsetof its parameters.For details, see :func:`distro.linux_distribution`."""return (self.name() if full_distribution_name else self.id(),self.version(),self.codename())def id(self):"""Return the distro ID of the Linux distribution, as a string.For details, see :func:`distro.id`."""def normalize(distro_id, table):distro_id = distro_id.lower().replace(' ', '_')return table.get(distro_id, distro_id)distro_id = self.os_release_attr('id')if distro_id:return normalize(distro_id, NORMALIZED_OS_ID)distro_id = self.lsb_release_attr('distributor_id')if distro_id:return normalize(distro_id, NORMALIZED_LSB_ID)distro_id = self.distro_release_attr('id')if distro_id:return normalize(distro_id, NORMALIZED_DISTRO_ID)return ''def name(self, pretty=False):"""Return the name of the Linux distribution, as a string.For details, see :func:`distro.name`."""name = self.os_release_attr('name') \or self.lsb_release_attr('distributor_id') \or self.distro_release_attr('name')if pretty:name = self.os_release_attr('pretty_name') \or self.lsb_release_attr('description')if not name:name = self.distro_release_attr('name')version = self.version(pretty=True)if version:name = name + ' ' + versionreturn name or ''def version(self, pretty=False, best=False):"""Return the version of the Linux distribution, as a string.For details, see :func:`distro.version`."""versions = [self.os_release_attr('version_id'),self.lsb_release_attr('release'),self.distro_release_attr('version_id'),self._parse_distro_release_content(self.os_release_attr('pretty_name')).get('version_id', ''),self._parse_distro_release_content(self.lsb_release_attr('description')).get('version_id', '')]version = ''if best:# This algorithm uses the last version in priority order that has# the best precision. If the versions are not in conflict, that# does not matter; otherwise, using the last one instead of the# first one might be considered a surprise.for v in versions:if v.count(".") > version.count(".") or version == '':version = velse:for v in versions:if v != '':version = vbreakif pretty and version and self.codename():version = u'{0} ({1})'.format(version, self.codename())return versiondef version_parts(self, best=False):"""Return the version of the Linux distribution, as a tuple of versionnumbers.For details, see :func:`distro.version_parts`."""version_str = self.version(best=best)if version_str:version_regex = re.compile(r'(\d+)\.?(\d+)?\.?(\d+)?')matches = version_regex.match(version_str)if matches:major, minor, build_number = matches.groups()return major, minor or '', build_number or ''return '', '', ''def major_version(self, best=False):"""Return the major version number of the current distribution.For details, see :func:`distro.major_version`."""return self.version_parts(best)[0]def minor_version(self, best=False):"""Return the minor version number of the Linux distribution.For details, see :func:`distro.minor_version`."""return self.version_parts(best)[1]def build_number(self, best=False):"""Return the build number of the Linux distribution.For details, see :func:`distro.build_number`."""return self.version_parts(best)[2]def like(self):"""Return the IDs of distributions that are like the Linux distribution.For details, see :func:`distro.like`."""return self.os_release_attr('id_like') or ''def codename(self):"""Return the codename of the Linux distribution.For details, see :func:`distro.codename`."""return self.os_release_attr('codename') \or self.lsb_release_attr('codename') \or self.distro_release_attr('codename') \or ''def info(self, pretty=False, best=False):"""Return certain machine-readable information about the Linuxdistribution.For details, see :func:`distro.info`."""return dict(id=self.id(),version=self.version(pretty, best),version_parts=dict(major=self.major_version(best),minor=self.minor_version(best),build_number=self.build_number(best)),like=self.like(),codename=self.codename(),)def os_release_info(self):"""Return a dictionary containing key-value pairs for the informationitems from the os-release file data source of the Linux distribution.For details, see :func:`distro.os_release_info`."""return self._os_release_infodef lsb_release_info(self):"""Return a dictionary containing key-value pairs for the informationitems from the lsb_release command data source of the Linuxdistribution.For details, see :func:`distro.lsb_release_info`."""return self._lsb_release_infodef distro_release_info(self):"""Return a dictionary containing key-value pairs for the informationitems from the distro release file data source of the Linuxdistribution.For details, see :func:`distro.distro_release_info`."""return self._distro_release_infodef os_release_attr(self, attribute):"""Return a single named information item from the os-release file datasource of the Linux distribution.For details, see :func:`distro.os_release_attr`."""return self._os_release_info.get(attribute, '')def lsb_release_attr(self, attribute):"""Return a single named information item from the lsb_release commandoutput data source of the Linux distribution.For details, see :func:`distro.lsb_release_attr`."""return self._lsb_release_info.get(attribute, '')def distro_release_attr(self, attribute):"""Return a single named information item from the distro release filedata source of the Linux distribution.For details, see :func:`distro.distro_release_attr`."""return self._distro_release_info.get(attribute, '')def _get_os_release_info(self):"""Get the information items from the specified os-release file.Returns:A dictionary containing all information items."""if os.path.isfile(self.os_release_file):with open(self.os_release_file) as release_file:return self._parse_os_release_content(release_file)return {}@staticmethoddef _parse_os_release_content(lines):"""Parse the lines of an os-release file.Parameters:* lines: Iterable through the lines in the os-release file.Each line must be a unicode string or a UTF-8 encoded bytestring.Returns:A dictionary containing all information items."""props = {}lexer = shlex.shlex(lines, posix=True)lexer.whitespace_split = True# The shlex module defines its `wordchars` variable using literals,# making it dependent on the encoding of the Python source file.# In Python 2.6 and 2.7, the shlex source file is encoded in# 'iso-8859-1', and the `wordchars` variable is defined as a byte# string. This causes a UnicodeDecodeError to be raised when the# parsed content is a unicode object. The following fix resolves that# (... but it should be fixed in shlex...):if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):lexer.wordchars = lexer.wordchars.decode('iso-8859-1')tokens = list(lexer)for token in tokens:# At this point, all shell-like parsing has been done (i.e.# comments processed, quotes and backslash escape sequences# processed, multi-line values assembled, trailing newlines# stripped, etc.), so the tokens are now either:# * variable assignments: var=value# * commands or their arguments (not allowed in os-release)if '=' in token:k, v = token.split('=', 1)if isinstance(v, bytes):v = v.decode('utf-8')props[k.lower()] = vif k == 'VERSION':# this handles cases in which the codename is in# the `(CODENAME)` (rhel, centos, fedora) format# or in the `, CODENAME` format (Ubuntu).codename = re.search(r'(\(\D+\))|,(\s+)?\D+', v)if codename:codename = codename.group()codename = codename.strip('()')codename = codename.strip(',')codename = codename.strip()# codename appears within paranthese.props['codename'] = codenameelse:props['codename'] = ''else:# Ignore any tokens that are not variable assignmentspassreturn propsdef _get_lsb_release_info(self):"""Get the information items from the lsb_release command output.Returns:A dictionary containing all information items."""cmd = 'lsb_release -a'process = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)stdout, stderr = process.communicate()stdout, stderr = stdout.decode('utf-8'), stderr.decode('utf-8')code = process.returncodeif code == 0:content = stdout.splitlines()return self._parse_lsb_release_content(content)elif code == 127: # Command not foundreturn {}else:if sys.version_info[:2] >= (3, 5):raise subprocess.CalledProcessError(code, cmd, stdout, stderr)elif sys.version_info[:2] >= (2, 7):raise subprocess.CalledProcessError(code, cmd, stdout)elif sys.version_info[:2] == (2, 6):raise subprocess.CalledProcessError(code, cmd)@staticmethoddef _parse_lsb_release_content(lines):"""Parse the output of the lsb_release command.Parameters:* lines: Iterable through the lines of the lsb_release output.Each line must be a unicode string or a UTF-8 encoded bytestring.Returns:A dictionary containing all information items."""props = {}for line in lines:line = line.decode('utf-8') if isinstance(line, bytes) else linekv = line.strip('\n').split(':', 1)if len(kv) != 2:# Ignore lines without colon.continuek, v = kvprops.update({k.replace(' ', '_').lower(): v.strip()})return propsdef _get_distro_release_info(self):"""Get the information items from the specified distro release file.Returns:A dictionary containing all information items."""if self.distro_release_file:# If it was specified, we use it and parse what we can, even if# its file name or content does not match the expected pattern.distro_info = self._parse_distro_release_file(self.distro_release_file)basename = os.path.basename(self.distro_release_file)# The file name pattern for user-specified distro release files# is somewhat more tolerant (compared to when searching for the# file), because we want to use what was specified as best as# possible.match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)if match:distro_info['id'] = match.group(1)return distro_infoelse:basenames = os.listdir(_UNIXCONFDIR)# We sort for repeatability in cases where there are multiple# distro specific files; e.g. CentOS, Oracle, Enterprise all# containing `redhat-release` on top of their own.basenames.sort()for basename in basenames:if basename in _DISTRO_RELEASE_IGNORE_BASENAMES:continuematch = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)if match:filepath = os.path.join(_UNIXCONFDIR, basename)distro_info = self._parse_distro_release_file(filepath)if 'name' in distro_info:# The name is always present if the pattern matchesself.distro_release_file = filepathdistro_info['id'] = match.group(1)return distro_inforeturn {}def _parse_distro_release_file(self, filepath):"""Parse a distro release file.Parameters:* filepath: Path name of the distro release file.Returns:A dictionary containing all information items."""if os.path.isfile(filepath):with open(filepath) as fp:# Only parse the first line. For instance, on SLES there# are multiple lines. We don't want them...return self._parse_distro_release_content(fp.readline())return {}@staticmethoddef _parse_distro_release_content(line):"""Parse a line from a distro release file.Parameters:* line: Line from the distro release file. Must be a unicode stringor a UTF-8 encoded byte string.Returns:A dictionary containing all information items."""if isinstance(line, bytes):line = line.decode('utf-8')matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1])distro_info = {}if matches:# regexp ensures non-Nonedistro_info['name'] = matches.group(3)[::-1]if matches.group(2):distro_info['version_id'] = matches.group(2)[::-1]if matches.group(1):distro_info['codename'] = matches.group(1)[::-1]elif line:distro_info['name'] = line.strip()return distro_info_distro = LinuxDistribution()def main():import argparselogger = logging.getLogger(__name__)logger.setLevel(logging.DEBUG)logger.addHandler(logging.StreamHandler(sys.stdout))parser = argparse.ArgumentParser(description="Linux distro info tool")parser.add_argument('--json','-j',help="Output in machine readable format",action="store_true")args = parser.parse_args()if args.json:logger.info(json.dumps(info(), indent=4, sort_keys=True))else:logger.info('Name: %s', name(pretty=True))distribution_version = version(pretty=True)if distribution_version:logger.info('Version: %s', distribution_version)distribution_codename = codename()if distribution_codename:logger.info('Codename: %s', distribution_codename)if __name__ == '__main__':main()
# -*- coding: utf-8 -*-## Copyright (C) 2013-2016 Vinay Sajip.# Licensed to the Python Software Foundation under a contributor agreement.# See LICENSE.txt and CONTRIBUTORS.txt.#from __future__ import unicode_literalsimport base64import codecsimport datetimeimport distutils.utilfrom email import message_from_fileimport hashlibimport impimport jsonimport loggingimport osimport posixpathimport reimport shutilimport sysimport tempfileimport zipfilefrom . import __version__, DistlibExceptionfrom .compat import sysconfig, ZipFile, fsdecode, text_type, filterfrom .database import InstalledDistributionfrom .metadata import Metadata, METADATA_FILENAMEfrom .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,cached_property, get_cache_base, read_exports, tempdir)from .version import NormalizedVersion, UnsupportedVersionErrorlogger = logging.getLogger(__name__)cache = None # created when neededif hasattr(sys, 'pypy_version_info'):IMP_PREFIX = 'pp'elif sys.platform.startswith('java'):IMP_PREFIX = 'jy'elif sys.platform == 'cli':IMP_PREFIX = 'ip'else:IMP_PREFIX = 'cp'VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')if not VER_SUFFIX: # pragma: no coverVER_SUFFIX = '%s%s' % sys.version_info[:2]PYVER = 'py' + VER_SUFFIXIMPVER = IMP_PREFIX + VER_SUFFIXARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_')ABI = sysconfig.get_config_var('SOABI')if ABI and ABI.startswith('cpython-'):ABI = ABI.replace('cpython-', 'cp')else:def _derive_abi():parts = ['cp', VER_SUFFIX]if sysconfig.get_config_var('Py_DEBUG'):parts.append('d')if sysconfig.get_config_var('WITH_PYMALLOC'):parts.append('m')if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4:parts.append('u')return ''.join(parts)ABI = _derive_abi()del _derive_abiFILENAME_RE = re.compile(r'''(?P<nm>[^-]+)-(?P<vn>\d+[^-]*)(-(?P<bn>\d+[^-]*))?-(?P<py>\w+\d+(\.\w+\d+)*)-(?P<bi>\w+)-(?P<ar>\w+(\.\w+)*)\.whl$''', re.IGNORECASE | re.VERBOSE)NAME_VERSION_RE = re.compile(r'''(?P<nm>[^-]+)-(?P<vn>\d+[^-]*)(-(?P<bn>\d+[^-]*))?$''', re.IGNORECASE | re.VERBOSE)SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')SHEBANG_PYTHON = b'#!python'SHEBANG_PYTHONW = b'#!pythonw'if os.sep == '/':to_posix = lambda o: oelse:to_posix = lambda o: o.replace(os.sep, '/')class Mounter(object):def __init__(self):self.impure_wheels = {}self.libs = {}def add(self, pathname, extensions):self.impure_wheels[pathname] = extensionsself.libs.update(extensions)def remove(self, pathname):extensions = self.impure_wheels.pop(pathname)for k, v in extensions:if k in self.libs:del self.libs[k]def find_module(self, fullname, path=None):if fullname in self.libs:result = selfelse:result = Nonereturn resultdef load_module(self, fullname):if fullname in sys.modules:result = sys.modules[fullname]else:if fullname not in self.libs:raise ImportError('unable to find extension for %s' % fullname)result = imp.load_dynamic(fullname, self.libs[fullname])result.__loader__ = selfparts = fullname.rsplit('.', 1)if len(parts) > 1:result.__package__ = parts[0]return result_hook = Mounter()class Wheel(object):"""Class to build and install from Wheel files (PEP 427)."""wheel_version = (1, 1)hash_kind = 'sha256'def __init__(self, filename=None, sign=False, verify=False):"""Initialise an instance using a (valid) filename."""self.sign = signself.should_verify = verifyself.buildver = ''self.pyver = [PYVER]self.abi = ['none']self.arch = ['any']self.dirname = os.getcwd()if filename is None:self.name = 'dummy'self.version = '0.1'self._filename = self.filenameelse:m = NAME_VERSION_RE.match(filename)if m:info = m.groupdict('')self.name = info['nm']# Reinstate the local version separatorself.version = info['vn'].replace('_', '-')self.buildver = info['bn']self._filename = self.filenameelse:dirname, filename = os.path.split(filename)m = FILENAME_RE.match(filename)if not m:raise DistlibException('Invalid name or ''filename: %r' % filename)if dirname:self.dirname = os.path.abspath(dirname)self._filename = filenameinfo = m.groupdict('')self.name = info['nm']self.version = info['vn']self.buildver = info['bn']self.pyver = info['py'].split('.')self.abi = info['bi'].split('.')self.arch = info['ar'].split('.')@propertydef filename(self):"""Build and return a filename from the various components."""if self.buildver:buildver = '-' + self.buildverelse:buildver = ''pyver = '.'.join(self.pyver)abi = '.'.join(self.abi)arch = '.'.join(self.arch)# replace - with _ as a local version separatorversion = self.version.replace('-', '_')return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver,pyver, abi, arch)@propertydef exists(self):path = os.path.join(self.dirname, self.filename)return os.path.isfile(path)@propertydef tags(self):for pyver in self.pyver:for abi in self.abi:for arch in self.arch:yield pyver, abi, arch@cached_propertydef metadata(self):pathname = os.path.join(self.dirname, self.filename)name_ver = '%s-%s' % (self.name, self.version)info_dir = '%s.dist-info' % name_verwrapper = codecs.getreader('utf-8')with ZipFile(pathname, 'r') as zf:wheel_metadata = self.get_wheel_metadata(zf)wv = wheel_metadata['Wheel-Version'].split('.', 1)file_version = tuple([int(i) for i in wv])if file_version < (1, 1):fn = 'METADATA'else:fn = METADATA_FILENAMEtry:metadata_filename = posixpath.join(info_dir, fn)with zf.open(metadata_filename) as bf:wf = wrapper(bf)result = Metadata(fileobj=wf)except KeyError:raise ValueError('Invalid wheel, because %s is ''missing' % fn)return resultdef get_wheel_metadata(self, zf):name_ver = '%s-%s' % (self.name, self.version)info_dir = '%s.dist-info' % name_vermetadata_filename = posixpath.join(info_dir, 'WHEEL')with zf.open(metadata_filename) as bf:wf = codecs.getreader('utf-8')(bf)message = message_from_file(wf)return dict(message)@cached_propertydef info(self):pathname = os.path.join(self.dirname, self.filename)with ZipFile(pathname, 'r') as zf:result = self.get_wheel_metadata(zf)return resultdef process_shebang(self, data):m = SHEBANG_RE.match(data)if m:end = m.end()shebang, data_after_shebang = data[:end], data[end:]# Preserve any arguments after the interpreterif b'pythonw' in shebang.lower():shebang_python = SHEBANG_PYTHONWelse:shebang_python = SHEBANG_PYTHONm = SHEBANG_DETAIL_RE.match(shebang)if m:args = b' ' + m.groups()[-1]else:args = b''shebang = shebang_python + argsdata = shebang + data_after_shebangelse:cr = data.find(b'\r')lf = data.find(b'\n')if cr < 0 or cr > lf:term = b'\n'else:if data[cr:cr + 2] == b'\r\n':term = b'\r\n'else:term = b'\r'data = SHEBANG_PYTHON + term + datareturn datadef get_hash(self, data, hash_kind=None):if hash_kind is None:hash_kind = self.hash_kindtry:hasher = getattr(hashlib, hash_kind)except AttributeError:raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)result = hasher(data).digest()result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')return hash_kind, resultdef write_record(self, records, record_path, base):records = list(records) # make a copy for sortingp = to_posix(os.path.relpath(record_path, base))records.append((p, '', ''))records.sort()with CSVWriter(record_path) as writer:for row in records:writer.writerow(row)def write_records(self, info, libdir, archive_paths):records = []distinfo, info_dir = infohasher = getattr(hashlib, self.hash_kind)for ap, p in archive_paths:with open(p, 'rb') as f:data = f.read()digest = '%s=%s' % self.get_hash(data)size = os.path.getsize(p)records.append((ap, digest, size))p = os.path.join(distinfo, 'RECORD')self.write_record(records, p, libdir)ap = to_posix(os.path.join(info_dir, 'RECORD'))archive_paths.append((ap, p))def build_zip(self, pathname, archive_paths):with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:for ap, p in archive_paths:logger.debug('Wrote %s to %s in wheel', p, ap)zf.write(p, ap)def build(self, paths, tags=None, wheel_version=None):"""Build a wheel from files in specified paths, and use any specified tagswhen determining the name of the wheel."""if tags is None:tags = {}libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]if libkey == 'platlib':is_pure = 'false'default_pyver = [IMPVER]default_abi = [ABI]default_arch = [ARCH]else:is_pure = 'true'default_pyver = [PYVER]default_abi = ['none']default_arch = ['any']self.pyver = tags.get('pyver', default_pyver)self.abi = tags.get('abi', default_abi)self.arch = tags.get('arch', default_arch)libdir = paths[libkey]name_ver = '%s-%s' % (self.name, self.version)data_dir = '%s.data' % name_verinfo_dir = '%s.dist-info' % name_verarchive_paths = []# First, stuff which is not in site-packagesfor key in ('data', 'headers', 'scripts'):if key not in paths:continuepath = paths[key]if os.path.isdir(path):for root, dirs, files in os.walk(path):for fn in files:p = fsdecode(os.path.join(root, fn))rp = os.path.relpath(p, path)ap = to_posix(os.path.join(data_dir, key, rp))archive_paths.append((ap, p))if key == 'scripts' and not p.endswith('.exe'):with open(p, 'rb') as f:data = f.read()data = self.process_shebang(data)with open(p, 'wb') as f:f.write(data)# Now, stuff which is in site-packages, other than the# distinfo stuff.path = libdirdistinfo = Nonefor root, dirs, files in os.walk(path):if root == path:# At the top level only, save distinfo for later# and skip it for nowfor i, dn in enumerate(dirs):dn = fsdecode(dn)if dn.endswith('.dist-info'):distinfo = os.path.join(root, dn)del dirs[i]breakassert distinfo, '.dist-info directory expected, not found'for fn in files:# comment out next suite to leave .pyc files inif fsdecode(fn).endswith(('.pyc', '.pyo')):continuep = os.path.join(root, fn)rp = to_posix(os.path.relpath(p, path))archive_paths.append((rp, p))# Now distinfo. Assumed to be flat, i.e. os.listdir is enough.files = os.listdir(distinfo)for fn in files:if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):p = fsdecode(os.path.join(distinfo, fn))ap = to_posix(os.path.join(info_dir, fn))archive_paths.append((ap, p))wheel_metadata = ['Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),'Generator: distlib %s' % __version__,'Root-Is-Purelib: %s' % is_pure,]for pyver, abi, arch in self.tags:wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))p = os.path.join(distinfo, 'WHEEL')with open(p, 'w') as f:f.write('\n'.join(wheel_metadata))ap = to_posix(os.path.join(info_dir, 'WHEEL'))archive_paths.append((ap, p))# Now, at last, RECORD.# Paths in here are archive paths - nothing else makes sense.self.write_records((distinfo, info_dir), libdir, archive_paths)# Now, ready to build the zip filepathname = os.path.join(self.dirname, self.filename)self.build_zip(pathname, archive_paths)return pathnamedef install(self, paths, maker, **kwargs):"""Install a wheel to the specified paths. If kwarg ``warner`` isspecified, it should be a callable, which will be called with twotuples indicating the wheel version of this software and the wheelversion in the file, if there is a discrepancy in the versions.This can be used to issue any warnings to raise any exceptions.If kwarg ``lib_only`` is True, only the purelib/platlib files areinstalled, and the headers, scripts, data and dist-info metadata arenot written.The return value is a :class:`InstalledDistribution` instance unless``options.lib_only`` is True, in which case the return value is ``None``."""dry_run = maker.dry_runwarner = kwargs.get('warner')lib_only = kwargs.get('lib_only', False)pathname = os.path.join(self.dirname, self.filename)name_ver = '%s-%s' % (self.name, self.version)data_dir = '%s.data' % name_verinfo_dir = '%s.dist-info' % name_vermetadata_name = posixpath.join(info_dir, METADATA_FILENAME)wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')record_name = posixpath.join(info_dir, 'RECORD')wrapper = codecs.getreader('utf-8')with ZipFile(pathname, 'r') as zf:with zf.open(wheel_metadata_name) as bwf:wf = wrapper(bwf)message = message_from_file(wf)wv = message['Wheel-Version'].split('.', 1)file_version = tuple([int(i) for i in wv])if (file_version != self.wheel_version) and warner:warner(self.wheel_version, file_version)if message['Root-Is-Purelib'] == 'true':libdir = paths['purelib']else:libdir = paths['platlib']records = {}with zf.open(record_name) as bf:with CSVReader(stream=bf) as reader:for row in reader:p = row[0]records[p] = rowdata_pfx = posixpath.join(data_dir, '')info_pfx = posixpath.join(info_dir, '')script_pfx = posixpath.join(data_dir, 'scripts', '')# make a new instance rather than a copy of maker's,# as we mutate itfileop = FileOperator(dry_run=dry_run)fileop.record = True # so we can rollback if neededbc = not sys.dont_write_bytecode # Double negatives. Lovely!outfiles = [] # for RECORD writing# for script copying/shebang processingworkdir = tempfile.mkdtemp()# set target dir later# we default add_launchers to False, as the# Python Launcher should be used insteadmaker.source_dir = workdirmaker.target_dir = Nonetry:for zinfo in zf.infolist():arcname = zinfo.filenameif isinstance(arcname, text_type):u_arcname = arcnameelse:u_arcname = arcname.decode('utf-8')# The signature file won't be in RECORD,# and we don't currently don't do anything with itif u_arcname.endswith('/RECORD.jws'):continuerow = records[u_arcname]if row[2] and str(zinfo.file_size) != row[2]:raise DistlibException('size mismatch for ''%s' % u_arcname)if row[1]:kind, value = row[1].split('=', 1)with zf.open(arcname) as bf:data = bf.read()_, digest = self.get_hash(data, kind)if digest != value:raise DistlibException('digest mismatch for ''%s' % arcname)if lib_only and u_arcname.startswith((info_pfx, data_pfx)):logger.debug('lib_only: skipping %s', u_arcname)continueis_script = (u_arcname.startswith(script_pfx)and not u_arcname.endswith('.exe'))if u_arcname.startswith(data_pfx):_, where, rp = u_arcname.split('/', 2)outfile = os.path.join(paths[where], convert_path(rp))else:# meant for site-packages.if u_arcname in (wheel_metadata_name, record_name):continueoutfile = os.path.join(libdir, convert_path(u_arcname))if not is_script:with zf.open(arcname) as bf:fileop.copy_stream(bf, outfile)outfiles.append(outfile)# Double check the digest of the written fileif not dry_run and row[1]:with open(outfile, 'rb') as bf:data = bf.read()_, newdigest = self.get_hash(data, kind)if newdigest != digest:raise DistlibException('digest mismatch ''on write for ''%s' % outfile)if bc and outfile.endswith('.py'):try:pyc = fileop.byte_compile(outfile)outfiles.append(pyc)except Exception:# Don't give up if byte-compilation fails,# but log it and perhaps warn the userlogger.warning('Byte-compilation failed',exc_info=True)else:fn = os.path.basename(convert_path(arcname))workname = os.path.join(workdir, fn)with zf.open(arcname) as bf:fileop.copy_stream(bf, workname)dn, fn = os.path.split(outfile)maker.target_dir = dnfilenames = maker.make(fn)fileop.set_executable_mode(filenames)outfiles.extend(filenames)if lib_only:logger.debug('lib_only: returning None')dist = Noneelse:# Generate scripts# Try to get pydist.json so we can see if there are# any commands to generate. If this fails (e.g. because# of a legacy wheel), log a warning but don't give up.commands = Nonefile_version = self.info['Wheel-Version']if file_version == '1.0':# Use legacy infoep = posixpath.join(info_dir, 'entry_points.txt')try:with zf.open(ep) as bwf:epdata = read_exports(bwf)commands = {}for key in ('console', 'gui'):k = '%s_scripts' % keyif k in epdata:commands['wrap_%s' % key] = d = {}for v in epdata[k].values():s = '%s:%s' % (v.prefix, v.suffix)if v.flags:s += ' %s' % v.flagsd[v.name] = sexcept Exception:logger.warning('Unable to read legacy script ''metadata, so cannot generate ''scripts')else:try:with zf.open(metadata_name) as bwf:wf = wrapper(bwf)commands = json.load(wf).get('extensions')if commands:commands = commands.get('python.commands')except Exception:logger.warning('Unable to read JSON metadata, so ''cannot generate scripts')if commands:console_scripts = commands.get('wrap_console', {})gui_scripts = commands.get('wrap_gui', {})if console_scripts or gui_scripts:script_dir = paths.get('scripts', '')if not os.path.isdir(script_dir):raise ValueError('Valid script path not ''specified')maker.target_dir = script_dirfor k, v in console_scripts.items():script = '%s = %s' % (k, v)filenames = maker.make(script)fileop.set_executable_mode(filenames)if gui_scripts:options = {'gui': True }for k, v in gui_scripts.items():script = '%s = %s' % (k, v)filenames = maker.make(script, options)fileop.set_executable_mode(filenames)p = os.path.join(libdir, info_dir)dist = InstalledDistribution(p)# Write SHAREDpaths = dict(paths) # don't change passed in dictdel paths['purelib']del paths['platlib']paths['lib'] = libdirp = dist.write_shared_locations(paths, dry_run)if p:outfiles.append(p)# Write RECORDdist.write_installed_files(outfiles, paths['prefix'],dry_run)return distexcept Exception: # pragma: no coverlogger.exception('installation failed.')fileop.rollback()raisefinally:shutil.rmtree(workdir)def _get_dylib_cache(self):global cacheif cache is None:# Use native string to avoid issues on 2.x: see Python #20140.base = os.path.join(get_cache_base(), str('dylib-cache'),sys.version[:3])cache = Cache(base)return cachedef _get_extensions(self):pathname = os.path.join(self.dirname, self.filename)name_ver = '%s-%s' % (self.name, self.version)info_dir = '%s.dist-info' % name_verarcname = posixpath.join(info_dir, 'EXTENSIONS')wrapper = codecs.getreader('utf-8')result = []with ZipFile(pathname, 'r') as zf:try:with zf.open(arcname) as bf:wf = wrapper(bf)extensions = json.load(wf)cache = self._get_dylib_cache()prefix = cache.prefix_to_dir(pathname)cache_base = os.path.join(cache.base, prefix)if not os.path.isdir(cache_base):os.makedirs(cache_base)for name, relpath in extensions.items():dest = os.path.join(cache_base, convert_path(relpath))if not os.path.exists(dest):extract = Trueelse:file_time = os.stat(dest).st_mtimefile_time = datetime.datetime.fromtimestamp(file_time)info = zf.getinfo(relpath)wheel_time = datetime.datetime(*info.date_time)extract = wheel_time > file_timeif extract:zf.extract(relpath, cache_base)result.append((name, dest))except KeyError:passreturn resultdef is_compatible(self):"""Determine if a wheel is compatible with the running system."""return is_compatible(self)def is_mountable(self):"""Determine if a wheel is asserted as mountable by its metadata."""return True # for now - metadata details TBDdef mount(self, append=False):pathname = os.path.abspath(os.path.join(self.dirname, self.filename))if not self.is_compatible():msg = 'Wheel %s not compatible with this Python.' % pathnameraise DistlibException(msg)if not self.is_mountable():msg = 'Wheel %s is marked as not mountable.' % pathnameraise DistlibException(msg)if pathname in sys.path:logger.debug('%s already in path', pathname)else:if append:sys.path.append(pathname)else:sys.path.insert(0, pathname)extensions = self._get_extensions()if extensions:if _hook not in sys.meta_path:sys.meta_path.append(_hook)_hook.add(pathname, extensions)def unmount(self):pathname = os.path.abspath(os.path.join(self.dirname, self.filename))if pathname not in sys.path:logger.debug('%s not in path', pathname)else:sys.path.remove(pathname)if pathname in _hook.impure_wheels:_hook.remove(pathname)if not _hook.impure_wheels:if _hook in sys.meta_path:sys.meta_path.remove(_hook)def verify(self):pathname = os.path.join(self.dirname, self.filename)name_ver = '%s-%s' % (self.name, self.version)data_dir = '%s.data' % name_verinfo_dir = '%s.dist-info' % name_vermetadata_name = posixpath.join(info_dir, METADATA_FILENAME)wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')record_name = posixpath.join(info_dir, 'RECORD')wrapper = codecs.getreader('utf-8')with ZipFile(pathname, 'r') as zf:with zf.open(wheel_metadata_name) as bwf:wf = wrapper(bwf)message = message_from_file(wf)wv = message['Wheel-Version'].split('.', 1)file_version = tuple([int(i) for i in wv])# TODO version verificationrecords = {}with zf.open(record_name) as bf:with CSVReader(stream=bf) as reader:for row in reader:p = row[0]records[p] = rowfor zinfo in zf.infolist():arcname = zinfo.filenameif isinstance(arcname, text_type):u_arcname = arcnameelse:u_arcname = arcname.decode('utf-8')if '..' in u_arcname:raise DistlibException('invalid entry in ''wheel: %r' % u_arcname)# The signature file won't be in RECORD,# and we don't currently don't do anything with itif u_arcname.endswith('/RECORD.jws'):continuerow = records[u_arcname]if row[2] and str(zinfo.file_size) != row[2]:raise DistlibException('size mismatch for ''%s' % u_arcname)if row[1]:kind, value = row[1].split('=', 1)with zf.open(arcname) as bf:data = bf.read()_, digest = self.get_hash(data, kind)if digest != value:raise DistlibException('digest mismatch for ''%s' % arcname)def update(self, modifier, dest_dir=None, **kwargs):"""Update the contents of a wheel in a generic way. The modifier shouldbe a callable which expects a dictionary argument: its keys arearchive-entry paths, and its values are absolute filesystem pathswhere the contents the corresponding archive entries can be found. Themodifier is free to change the contents of the files pointed to, addnew entries and remove entries, before returning. This method willextract the entire contents of the wheel to a temporary location, callthe modifier, and then use the passed (and possibly updated)dictionary to write a new wheel. If ``dest_dir`` is specified, the newwheel is written there -- otherwise, the original wheel is overwritten.The modifier should return True if it updated the wheel, else False.This method returns the same value the modifier returns."""def get_version(path_map, info_dir):version = path = Nonekey = '%s/%s' % (info_dir, METADATA_FILENAME)if key not in path_map:key = '%s/PKG-INFO' % info_dirif key in path_map:path = path_map[key]version = Metadata(path=path).versionreturn version, pathdef update_version(version, path):updated = Nonetry:v = NormalizedVersion(version)i = version.find('-')if i < 0:updated = '%s+1' % versionelse:parts = [int(s) for s in version[i + 1:].split('.')]parts[-1] += 1updated = '%s+%s' % (version[:i],'.'.join(str(i) for i in parts))except UnsupportedVersionError:logger.debug('Cannot update non-compliant (PEP-440) ''version %r', version)if updated:md = Metadata(path=path)md.version = updatedlegacy = not path.endswith(METADATA_FILENAME)md.write(path=path, legacy=legacy)logger.debug('Version updated from %r to %r', version,updated)pathname = os.path.join(self.dirname, self.filename)name_ver = '%s-%s' % (self.name, self.version)info_dir = '%s.dist-info' % name_verrecord_name = posixpath.join(info_dir, 'RECORD')with tempdir() as workdir:with ZipFile(pathname, 'r') as zf:path_map = {}for zinfo in zf.infolist():arcname = zinfo.filenameif isinstance(arcname, text_type):u_arcname = arcnameelse:u_arcname = arcname.decode('utf-8')if u_arcname == record_name:continueif '..' in u_arcname:raise DistlibException('invalid entry in ''wheel: %r' % u_arcname)zf.extract(zinfo, workdir)path = os.path.join(workdir, convert_path(u_arcname))path_map[u_arcname] = path# Remember the version.original_version, _ = get_version(path_map, info_dir)# Files extracted. Call the modifier.modified = modifier(path_map, **kwargs)if modified:# Something changed - need to build a new wheel.current_version, path = get_version(path_map, info_dir)if current_version and (current_version == original_version):# Add or update local version to signify changes.update_version(current_version, path)# Decide where the new wheel goes.if dest_dir is None:fd, newpath = tempfile.mkstemp(suffix='.whl',prefix='wheel-update-',dir=workdir)os.close(fd)else:if not os.path.isdir(dest_dir):raise DistlibException('Not a directory: %r' % dest_dir)newpath = os.path.join(dest_dir, self.filename)archive_paths = list(path_map.items())distinfo = os.path.join(workdir, info_dir)info = distinfo, info_dirself.write_records(info, workdir, archive_paths)self.build_zip(newpath, archive_paths)if dest_dir is None:shutil.copyfile(newpath, pathname)return modifieddef compatible_tags():"""Return (pyver, abi, arch) tuples compatible with this Python."""versions = [VER_SUFFIX]major = VER_SUFFIX[0]for minor in range(sys.version_info[1] - 1, - 1, -1):versions.append(''.join([major, str(minor)]))abis = []for suffix, _, _ in imp.get_suffixes():if suffix.startswith('.abi'):abis.append(suffix.split('.', 2)[1])abis.sort()if ABI != 'none':abis.insert(0, ABI)abis.append('none')result = []arches = [ARCH]if sys.platform == 'darwin':m = re.match('(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)if m:name, major, minor, arch = m.groups()minor = int(minor)matches = [arch]if arch in ('i386', 'ppc'):matches.append('fat')if arch in ('i386', 'ppc', 'x86_64'):matches.append('fat3')if arch in ('ppc64', 'x86_64'):matches.append('fat64')if arch in ('i386', 'x86_64'):matches.append('intel')if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):matches.append('universal')while minor >= 0:for match in matches:s = '%s_%s_%s_%s' % (name, major, minor, match)if s != ARCH: # already therearches.append(s)minor -= 1# Most specific - our Python version, ABI and archfor abi in abis:for arch in arches:result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))# where no ABI / arch dependency, but IMP_PREFIX dependencyfor i, version in enumerate(versions):result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))if i == 0:result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))# no IMP_PREFIX, ABI or arch dependencyfor i, version in enumerate(versions):result.append((''.join(('py', version)), 'none', 'any'))if i == 0:result.append((''.join(('py', version[0])), 'none', 'any'))return set(result)COMPATIBLE_TAGS = compatible_tags()del compatible_tagsdef is_compatible(wheel, tags=None):if not isinstance(wheel, Wheel):wheel = Wheel(wheel) # assume it's a filenameresult = Falseif tags is None:tags = COMPATIBLE_TAGSfor ver, abi, arch in tags:if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:result = Truebreakreturn result
# -*- coding: utf-8 -*-## Copyright (C) 2012-2016 The Python Software Foundation.# See LICENSE.txt and CONTRIBUTORS.txt.#"""Implementation of a flexible versioning scheme providing support for PEP-440,setuptools-compatible and semantic versioning."""import loggingimport refrom .compat import string_types__all__ = ['NormalizedVersion', 'NormalizedMatcher','LegacyVersion', 'LegacyMatcher','SemanticVersion', 'SemanticMatcher','UnsupportedVersionError', 'get_scheme']logger = logging.getLogger(__name__)class UnsupportedVersionError(ValueError):"""This is an unsupported version."""passclass Version(object):def __init__(self, s):self._string = s = s.strip()self._parts = parts = self.parse(s)assert isinstance(parts, tuple)assert len(parts) > 0def parse(self, s):raise NotImplementedError('please implement in a subclass')def _check_compatible(self, other):if type(self) != type(other):raise TypeError('cannot compare %r and %r' % (self, other))def __eq__(self, other):self._check_compatible(other)return self._parts == other._partsdef __ne__(self, other):return not self.__eq__(other)def __lt__(self, other):self._check_compatible(other)return self._parts < other._partsdef __gt__(self, other):return not (self.__lt__(other) or self.__eq__(other))def __le__(self, other):return self.__lt__(other) or self.__eq__(other)def __ge__(self, other):return self.__gt__(other) or self.__eq__(other)# See http://docs.python.org/reference/datamodel#object.__hash__def __hash__(self):return hash(self._parts)def __repr__(self):return "%s('%s')" % (self.__class__.__name__, self._string)def __str__(self):return self._string@propertydef is_prerelease(self):raise NotImplementedError('Please implement in subclasses.')class Matcher(object):version_class = Nonedist_re = re.compile(r"^(\w[\s\w'.-]*)(\((.*)\))?")comp_re = re.compile(r'^(<=|>=|<|>|!=|={2,3}|~=)?\s*([^\s,]+)$')num_re = re.compile(r'^\d+(\.\d+)*$')# value is either a callable or the name of a method_operators = {'<': lambda v, c, p: v < c,'>': lambda v, c, p: v > c,'<=': lambda v, c, p: v == c or v < c,'>=': lambda v, c, p: v == c or v > c,'==': lambda v, c, p: v == c,'===': lambda v, c, p: v == c,# by default, compatible => >=.'~=': lambda v, c, p: v == c or v > c,'!=': lambda v, c, p: v != c,}def __init__(self, s):if self.version_class is None:raise ValueError('Please specify a version class')self._string = s = s.strip()m = self.dist_re.match(s)if not m:raise ValueError('Not valid: %r' % s)groups = m.groups('')self.name = groups[0].strip()self.key = self.name.lower() # for case-insensitive comparisonsclist = []if groups[2]:constraints = [c.strip() for c in groups[2].split(',')]for c in constraints:m = self.comp_re.match(c)if not m:raise ValueError('Invalid %r in %r' % (c, s))groups = m.groups()op = groups[0] or '~='s = groups[1]if s.endswith('.*'):if op not in ('==', '!='):raise ValueError('\'.*\' not allowed for ''%r constraints' % op)# Could be a partial version (e.g. for '2.*') which# won't parse as a version, so keep it as a stringvn, prefix = s[:-2], Trueif not self.num_re.match(vn):# Just to check that vn is a valid versionself.version_class(vn)else:# Should parse as a version, so we can create an# instance for the comparisonvn, prefix = self.version_class(s), Falseclist.append((op, vn, prefix))self._parts = tuple(clist)def match(self, version):"""Check if the provided version matches the constraints.:param version: The version to match against this instance.:type version: String or :class:`Version` instance."""if isinstance(version, string_types):version = self.version_class(version)for operator, constraint, prefix in self._parts:f = self._operators.get(operator)if isinstance(f, string_types):f = getattr(self, f)if not f:msg = ('%r not implemented ''for %s' % (operator, self.__class__.__name__))raise NotImplementedError(msg)if not f(version, constraint, prefix):return Falsereturn True@propertydef exact_version(self):result = Noneif len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):result = self._parts[0][1]return resultdef _check_compatible(self, other):if type(self) != type(other) or self.name != other.name:raise TypeError('cannot compare %s and %s' % (self, other))def __eq__(self, other):self._check_compatible(other)return self.key == other.key and self._parts == other._partsdef __ne__(self, other):return not self.__eq__(other)# See http://docs.python.org/reference/datamodel#object.__hash__def __hash__(self):return hash(self.key) + hash(self._parts)def __repr__(self):return "%s(%r)" % (self.__class__.__name__, self._string)def __str__(self):return self._stringPEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?'r'(\.(post)(\d+))?(\.(dev)(\d+))?'r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$')def _pep_440_key(s):s = s.strip()m = PEP440_VERSION_RE.match(s)if not m:raise UnsupportedVersionError('Not a valid version: %s' % s)groups = m.groups()nums = tuple(int(v) for v in groups[1].split('.'))while len(nums) > 1 and nums[-1] == 0:nums = nums[:-1]if not groups[0]:epoch = 0else:epoch = int(groups[0])pre = groups[4:6]post = groups[7:9]dev = groups[10:12]local = groups[13]if pre == (None, None):pre = ()else:pre = pre[0], int(pre[1])if post == (None, None):post = ()else:post = post[0], int(post[1])if dev == (None, None):dev = ()else:dev = dev[0], int(dev[1])if local is None:local = ()else:parts = []for part in local.split('.'):# to ensure that numeric compares as > lexicographic, avoid# comparing them directly, but encode a tuple which ensures# correct sortingif part.isdigit():part = (1, int(part))else:part = (0, part)parts.append(part)local = tuple(parts)if not pre:# either before pre-release, or final release and afterif not post and dev:# before pre-releasepre = ('a', -1) # to sort before a0else:pre = ('z',) # to sort after all pre-releases# now look at the state of post and dev.if not post:post = ('_',) # sort before 'a'if not dev:dev = ('final',)#print('%s -> %s' % (s, m.groups()))return epoch, nums, pre, post, dev, local_normalized_key = _pep_440_keyclass NormalizedVersion(Version):"""A rational version.Good:1.2 # equivalent to "1.2.0"1.2.01.2a11.2.3a21.2.3b11.2.3c11.2.3.4TODO: fill this outBad:1 # minimum two numbers1.2a # release level must have a release serial1.2.3b"""def parse(self, s):result = _normalized_key(s)# _normalized_key loses trailing zeroes in the release# clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0# However, PEP 440 prefix matching needs it: for example,# (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).m = PEP440_VERSION_RE.match(s) # must succeedgroups = m.groups()self._release_clause = tuple(int(v) for v in groups[1].split('.'))return resultPREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])@propertydef is_prerelease(self):return any(t[0] in self.PREREL_TAGS for t in self._parts if t)def _match_prefix(x, y):x = str(x)y = str(y)if x == y:return Trueif not x.startswith(y):return Falsen = len(y)return x[n] == '.'class NormalizedMatcher(Matcher):version_class = NormalizedVersion# value is either a callable or the name of a method_operators = {'~=': '_match_compatible','<': '_match_lt','>': '_match_gt','<=': '_match_le','>=': '_match_ge','==': '_match_eq','===': '_match_arbitrary','!=': '_match_ne',}def _adjust_local(self, version, constraint, prefix):if prefix:strip_local = '+' not in constraint and version._parts[-1]else:# both constraint and version are# NormalizedVersion instances.# If constraint does not have a local component,# ensure the version doesn't, either.strip_local = not constraint._parts[-1] and version._parts[-1]if strip_local:s = version._string.split('+', 1)[0]version = self.version_class(s)return version, constraintdef _match_lt(self, version, constraint, prefix):version, constraint = self._adjust_local(version, constraint, prefix)if version >= constraint:return Falserelease_clause = constraint._release_clausepfx = '.'.join([str(i) for i in release_clause])return not _match_prefix(version, pfx)def _match_gt(self, version, constraint, prefix):version, constraint = self._adjust_local(version, constraint, prefix)if version <= constraint:return Falserelease_clause = constraint._release_clausepfx = '.'.join([str(i) for i in release_clause])return not _match_prefix(version, pfx)def _match_le(self, version, constraint, prefix):version, constraint = self._adjust_local(version, constraint, prefix)return version <= constraintdef _match_ge(self, version, constraint, prefix):version, constraint = self._adjust_local(version, constraint, prefix)return version >= constraintdef _match_eq(self, version, constraint, prefix):version, constraint = self._adjust_local(version, constraint, prefix)if not prefix:result = (version == constraint)else:result = _match_prefix(version, constraint)return resultdef _match_arbitrary(self, version, constraint, prefix):return str(version) == str(constraint)def _match_ne(self, version, constraint, prefix):version, constraint = self._adjust_local(version, constraint, prefix)if not prefix:result = (version != constraint)else:result = not _match_prefix(version, constraint)return resultdef _match_compatible(self, version, constraint, prefix):version, constraint = self._adjust_local(version, constraint, prefix)if version == constraint:return Trueif version < constraint:return False# if not prefix:# return Truerelease_clause = constraint._release_clauseif len(release_clause) > 1:release_clause = release_clause[:-1]pfx = '.'.join([str(i) for i in release_clause])return _match_prefix(version, pfx)_REPLACEMENTS = ((re.compile('[.+-]$'), ''), # remove trailing puncts(re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start(re.compile('^[.-]'), ''), # remove leading puncts(re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses(re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion)(re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion)(re.compile('[.]{2,}'), '.'), # multiple runs of '.'(re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha(re.compile(r'\b(pre-alpha|prealpha)\b'),'pre.alpha'), # standardise(re.compile(r'\(beta\)$'), 'beta'), # remove parentheses)_SUFFIX_REPLACEMENTS = ((re.compile('^[:~._+-]+'), ''), # remove leading puncts(re.compile('[,*")([\]]'), ''), # remove unwanted chars(re.compile('[~:+_ -]'), '.'), # replace illegal chars(re.compile('[.]{2,}'), '.'), # multiple runs of '.'(re.compile(r'\.$'), ''), # trailing '.')_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')def _suggest_semantic_version(s):"""Try to suggest a semantic form for a version for which_suggest_normalized_version couldn't come up with anything."""result = s.strip().lower()for pat, repl in _REPLACEMENTS:result = pat.sub(repl, result)if not result:result = '0.0.0'# Now look for numeric prefix, and separate it out from# the rest.#import pdb; pdb.set_trace()m = _NUMERIC_PREFIX.match(result)if not m:prefix = '0.0.0'suffix = resultelse:prefix = m.groups()[0].split('.')prefix = [int(i) for i in prefix]while len(prefix) < 3:prefix.append(0)if len(prefix) == 3:suffix = result[m.end():]else:suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]prefix = prefix[:3]prefix = '.'.join([str(i) for i in prefix])suffix = suffix.strip()if suffix:#import pdb; pdb.set_trace()# massage the suffix.for pat, repl in _SUFFIX_REPLACEMENTS:suffix = pat.sub(repl, suffix)if not suffix:result = prefixelse:sep = '-' if 'dev' in suffix else '+'result = prefix + sep + suffixif not is_semver(result):result = Nonereturn resultdef _suggest_normalized_version(s):"""Suggest a normalized version close to the given version string.If you have a version string that isn't rational (i.e. NormalizedVersiondoesn't like it) then you might be able to get an equivalent (or close)rational version from this function.This does a number of simple normalizations to the given string, basedon observation of versions currently in use on PyPI. Given a dump ofthose version during PyCon 2009, 4287 of them:- 2312 (53.93%) match NormalizedVersion without changewith the automatic suggestion- 3474 (81.04%) match when using this suggestion method@param s {str} An irrational version string.@returns A rational version string, or None, if couldn't determine one."""try:_normalized_key(s)return s # already rationalexcept UnsupportedVersionError:passrs = s.lower()# part of this could use maketransfor orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),('beta', 'b'), ('rc', 'c'), ('-final', ''),('-pre', 'c'),('-release', ''), ('.release', ''), ('-stable', ''),('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),('final', '')):rs = rs.replace(orig, repl)# if something ends with dev or pre, we add a 0rs = re.sub(r"pre$", r"pre0", rs)rs = re.sub(r"dev$", r"dev0", rs)# if we have something like "b-2" or "a.2" at the end of the# version, that is probably beta, alpha, etc# let's remove the dash or dotrs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)# 1.0-dev-r371 -> 1.0.dev371# 0.1-dev-r79 -> 0.1.dev79rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)# Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)# Clean: v0.3, v1.0if rs.startswith('v'):rs = rs[1:]# Clean leading '0's on numbers.#TODO: unintended side-effect on, e.g., "2003.05.09"# PyPI stats: 77 (~2%) betterrs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)# Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers# zero.# PyPI stats: 245 (7.56%) betterrs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)# the 'dev-rNNN' tag is a dev tagrs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)# clean the - when used as a pre delimiterrs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)# a terminal "dev" or "devel" can be changed into ".dev0"rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)# a terminal "dev" can be changed into ".dev0"rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)# a terminal "final" or "stable" can be removedrs = re.sub(r"(final|stable)$", "", rs)# The 'r' and the '-' tags are post release tags# 0.4a1.r10 -> 0.4a1.post10# 0.9.33-17222 -> 0.9.33.post17222# 0.9.33-r17222 -> 0.9.33.post17222rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)# Clean 'r' instead of 'dev' usage:# 0.9.33+r17222 -> 0.9.33.dev17222# 1.0dev123 -> 1.0.dev123# 1.0.git123 -> 1.0.dev123# 1.0.bzr123 -> 1.0.dev123# 0.1a0dev.123 -> 0.1a0.dev123# PyPI stats: ~150 (~4%) betterrs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)# Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:# 0.2.pre1 -> 0.2c1# 0.2-c1 -> 0.2c1# 1.0preview123 -> 1.0c123# PyPI stats: ~21 (0.62%) betterrs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)# Tcl/Tk uses "px" for their post release markersrs = re.sub(r"p(\d+)$", r".post\1", rs)try:_normalized_key(rs)except UnsupportedVersionError:rs = Nonereturn rs## Legacy version processing (distribute-compatible)#_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)_VERSION_REPLACE = {'pre': 'c','preview': 'c','-': 'final-','rc': 'c','dev': '@','': None,'.': None,}def _legacy_key(s):def get_parts(s):result = []for p in _VERSION_PART.split(s.lower()):p = _VERSION_REPLACE.get(p, p)if p:if '0' <= p[:1] <= '9':p = p.zfill(8)else:p = '*' + presult.append(p)result.append('*final')return resultresult = []for p in get_parts(s):if p.startswith('*'):if p < '*final':while result and result[-1] == '*final-':result.pop()while result and result[-1] == '00000000':result.pop()result.append(p)return tuple(result)class LegacyVersion(Version):def parse(self, s):return _legacy_key(s)@propertydef is_prerelease(self):result = Falsefor x in self._parts:if (isinstance(x, string_types) and x.startswith('*') andx < '*final'):result = Truebreakreturn resultclass LegacyMatcher(Matcher):version_class = LegacyVersion_operators = dict(Matcher._operators)_operators['~='] = '_match_compatible'numeric_re = re.compile('^(\d+(\.\d+)*)')def _match_compatible(self, version, constraint, prefix):if version < constraint:return Falsem = self.numeric_re.match(str(constraint))if not m:logger.warning('Cannot compute compatible match for version %s '' and constraint %s', version, constraint)return Trues = m.groups()[0]if '.' in s:s = s.rsplit('.', 1)[0]return _match_prefix(version, s)## Semantic versioning#_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)def is_semver(s):return _SEMVER_RE.match(s)def _semantic_key(s):def make_tuple(s, absent):if s is None:result = (absent,)else:parts = s[1:].split('.')# We can't compare ints and strings on Python 3, so fudge it# by zero-filling numeric values so simulate a numeric comparisonresult = tuple([p.zfill(8) if p.isdigit() else p for p in parts])return resultm = is_semver(s)if not m:raise UnsupportedVersionError(s)groups = m.groups()major, minor, patch = [int(i) for i in groups[:3]]# choose the '|' and '*' so that versions sort correctlypre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')return (major, minor, patch), pre, buildclass SemanticVersion(Version):def parse(self, s):return _semantic_key(s)@propertydef is_prerelease(self):return self._parts[1][0] != '|'class SemanticMatcher(Matcher):version_class = SemanticVersionclass VersionScheme(object):def __init__(self, key, matcher, suggester=None):self.key = keyself.matcher = matcherself.suggester = suggesterdef is_valid_version(self, s):try:self.matcher.version_class(s)result = Trueexcept UnsupportedVersionError:result = Falsereturn resultdef is_valid_matcher(self, s):try:self.matcher(s)result = Trueexcept UnsupportedVersionError:result = Falsereturn resultdef is_valid_constraint_list(self, s):"""Used for processing some metadata fields"""return self.is_valid_matcher('dummy_name (%s)' % s)def suggest(self, s):if self.suggester is None:result = Noneelse:result = self.suggester(s)return result_SCHEMES = {'normalized': VersionScheme(_normalized_key, NormalizedMatcher,_suggest_normalized_version),'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),'semantic': VersionScheme(_semantic_key, SemanticMatcher,_suggest_semantic_version),}_SCHEMES['default'] = _SCHEMES['normalized']def get_scheme(name):if name not in _SCHEMES:raise ValueError('unknown scheme name: %r' % name)return _SCHEMES[name]
## Copyright (C) 2012-2016 The Python Software Foundation.# See LICENSE.txt and CONTRIBUTORS.txt.#import codecsfrom collections import dequeimport contextlibimport csvfrom glob import iglob as std_iglobimport ioimport jsonimport loggingimport osimport py_compileimport reimport shutilimport sockettry:import sslexcept ImportError: # pragma: no coverssl = Noneimport subprocessimport sysimport tarfileimport tempfileimport textwraptry:import threadingexcept ImportError: # pragma: no coverimport dummy_threading as threadingimport timefrom . import DistlibExceptionfrom .compat import (string_types, text_type, shutil, raw_input, StringIO,cache_from_source, urlopen, urljoin, httplib, xmlrpclib,splittype, HTTPHandler, BaseConfigurator, valid_ident,Container, configparser, URLError, ZipFile, fsdecode,unquote)logger = logging.getLogger(__name__)## Requirement parsing code for name + optional constraints + optional extras## e.g. 'foo >= 1.2, < 2.0 [bar, baz]'## The regex can seem a bit hairy, so we build it up out of smaller pieces# which are manageable.#COMMA = r'\s*,\s*'COMMA_RE = re.compile(COMMA)IDENT = r'(\w|[.-])+'EXTRA_IDENT = r'(\*|:(\*|\w+):|' + IDENT + ')'VERSPEC = IDENT + r'\*?'RELOP = '([<>=!~]=)|[<>]'## The first relop is optional - if absent, will be taken as '~='#BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + VERSPEC + ')(' + COMMA + '(' +RELOP + r')\s*(' + VERSPEC + '))*')DIRECT_REF = '(from\s+(?P<diref>.*))'## Either the bare constraints or the bare constraints in parentheses#CONSTRAINTS = (r'\(\s*(?P<c1>' + BARE_CONSTRAINTS + '|' + DIRECT_REF +r')\s*\)|(?P<c2>' + BARE_CONSTRAINTS + '\s*)')EXTRA_LIST = EXTRA_IDENT + '(' + COMMA + EXTRA_IDENT + ')*'EXTRAS = r'\[\s*(?P<ex>' + EXTRA_LIST + r')?\s*\]'REQUIREMENT = ('(?P<dn>' + IDENT + r')\s*(' + EXTRAS + r'\s*)?(\s*' +CONSTRAINTS + ')?$')REQUIREMENT_RE = re.compile(REQUIREMENT)## Used to scan through the constraints#RELOP_IDENT = '(?P<op>' + RELOP + r')\s*(?P<vn>' + VERSPEC + ')'RELOP_IDENT_RE = re.compile(RELOP_IDENT)def parse_requirement(s):def get_constraint(m):d = m.groupdict()return d['op'], d['vn']result = Nonem = REQUIREMENT_RE.match(s)if m:d = m.groupdict()name = d['dn']cons = d['c1'] or d['c2']if not d['diref']:url = Noneelse:# direct referencecons = Noneurl = d['diref'].strip()if not cons:cons = Noneconstr = ''rs = d['dn']else:if cons[0] not in '<>!=':cons = '~=' + consiterator = RELOP_IDENT_RE.finditer(cons)cons = [get_constraint(m) for m in iterator]rs = '%s (%s)' % (name, ', '.join(['%s %s' % con for con in cons]))if not d['ex']:extras = Noneelse:extras = COMMA_RE.split(d['ex'])result = Container(name=name, constraints=cons, extras=extras,requirement=rs, source=s, url=url)return resultdef get_resources_dests(resources_root, rules):"""Find destinations for resources files"""def get_rel_path(base, path):# normalizes and returns a lstripped-/-separated pathbase = base.replace(os.path.sep, '/')path = path.replace(os.path.sep, '/')assert path.startswith(base)return path[len(base):].lstrip('/')destinations = {}for base, suffix, dest in rules:prefix = os.path.join(resources_root, base)for abs_base in iglob(prefix):abs_glob = os.path.join(abs_base, suffix)for abs_path in iglob(abs_glob):resource_file = get_rel_path(resources_root, abs_path)if dest is None: # remove the entry if it was heredestinations.pop(resource_file, None)else:rel_path = get_rel_path(abs_base, abs_path)rel_dest = dest.replace(os.path.sep, '/').rstrip('/')destinations[resource_file] = rel_dest + '/' + rel_pathreturn destinationsdef in_venv():if hasattr(sys, 'real_prefix'):# virtualenv venvsresult = Trueelse:# PEP 405 venvsresult = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)return resultdef get_executable():# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as# changes to the stub launcher mean that sys.executable always points# to the stub on macOS# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'# in os.environ):# result = os.environ['__PYVENV_LAUNCHER__']# else:# result = sys.executable# return resultresult = os.path.normcase(sys.executable)if not isinstance(result, text_type):result = fsdecode(result)return resultdef proceed(prompt, allowed_chars, error_prompt=None, default=None):p = promptwhile True:s = raw_input(p)p = promptif not s and default:s = defaultif s:c = s[0].lower()if c in allowed_chars:breakif error_prompt:p = '%c: %s\n%s' % (c, error_prompt, prompt)return cdef extract_by_key(d, keys):if isinstance(keys, string_types):keys = keys.split()result = {}for key in keys:if key in d:result[key] = d[key]return resultdef read_exports(stream):if sys.version_info[0] >= 3:# needs to be a text streamstream = codecs.getreader('utf-8')(stream)# Try to load as JSON, falling back on legacy formatdata = stream.read()stream = StringIO(data)try:jdata = json.load(stream)result = jdata['extensions']['python.exports']['exports']for group, entries in result.items():for k, v in entries.items():s = '%s = %s' % (k, v)entry = get_export_entry(s)assert entry is not Noneentries[k] = entryreturn resultexcept Exception:stream.seek(0, 0)def read_stream(cp, stream):if hasattr(cp, 'read_file'):cp.read_file(stream)else:cp.readfp(stream)cp = configparser.ConfigParser()try:read_stream(cp, stream)except configparser.MissingSectionHeaderError:stream.close()data = textwrap.dedent(data)stream = StringIO(data)read_stream(cp, stream)result = {}for key in cp.sections():result[key] = entries = {}for name, value in cp.items(key):s = '%s = %s' % (name, value)entry = get_export_entry(s)assert entry is not None#entry.dist = selfentries[name] = entryreturn resultdef write_exports(exports, stream):if sys.version_info[0] >= 3:# needs to be a text streamstream = codecs.getwriter('utf-8')(stream)cp = configparser.ConfigParser()for k, v in exports.items():# TODO check k, v for valid valuescp.add_section(k)for entry in v.values():if entry.suffix is None:s = entry.prefixelse:s = '%s:%s' % (entry.prefix, entry.suffix)if entry.flags:s = '%s [%s]' % (s, ', '.join(entry.flags))cp.set(k, entry.name, s)cp.write(stream)@contextlib.contextmanagerdef tempdir():td = tempfile.mkdtemp()try:yield tdfinally:shutil.rmtree(td)@contextlib.contextmanagerdef chdir(d):cwd = os.getcwd()try:os.chdir(d)yieldfinally:os.chdir(cwd)@contextlib.contextmanagerdef socket_timeout(seconds=15):cto = socket.getdefaulttimeout()try:socket.setdefaulttimeout(seconds)yieldfinally:socket.setdefaulttimeout(cto)class cached_property(object):def __init__(self, func):self.func = func#for attr in ('__name__', '__module__', '__doc__'):# setattr(self, attr, getattr(func, attr, None))def __get__(self, obj, cls=None):if obj is None:return selfvalue = self.func(obj)object.__setattr__(obj, self.func.__name__, value)#obj.__dict__[self.func.__name__] = value = self.func(obj)return valuedef convert_path(pathname):"""Return 'pathname' as a name that will work on the native filesystem.The path is split on '/' and put back together again using the currentdirectory separator. Needed because filenames in the setup script arealways supplied in Unix style, and have to be converted to the localconvention before we can actually use them in the filesystem. RaisesValueError on non-Unix-ish systems if 'pathname' either starts orends with a slash."""if os.sep == '/':return pathnameif not pathname:return pathnameif pathname[0] == '/':raise ValueError("path '%s' cannot be absolute" % pathname)if pathname[-1] == '/':raise ValueError("path '%s' cannot end with '/'" % pathname)paths = pathname.split('/')while os.curdir in paths:paths.remove(os.curdir)if not paths:return os.curdirreturn os.path.join(*paths)class FileOperator(object):def __init__(self, dry_run=False):self.dry_run = dry_runself.ensured = set()self._init_record()def _init_record(self):self.record = Falseself.files_written = set()self.dirs_created = set()def record_as_written(self, path):if self.record:self.files_written.add(path)def newer(self, source, target):"""Tell if the target is newer than the source.Returns true if 'source' exists and is more recently modified than'target', or if 'source' exists and 'target' doesn't.Returns false if both exist and 'target' is the same age or youngerthan 'source'. Raise PackagingFileError if 'source' does not exist.Note that this test is not very accurate: files created in the samesecond will have the same "age"."""if not os.path.exists(source):raise DistlibException("file '%r' does not exist" %os.path.abspath(source))if not os.path.exists(target):return Truereturn os.stat(source).st_mtime > os.stat(target).st_mtimedef copy_file(self, infile, outfile, check=True):"""Copy a file respecting dry-run and force flags."""self.ensure_dir(os.path.dirname(outfile))logger.info('Copying %s to %s', infile, outfile)if not self.dry_run:msg = Noneif check:if os.path.islink(outfile):msg = '%s is a symlink' % outfileelif os.path.exists(outfile) and not os.path.isfile(outfile):msg = '%s is a non-regular file' % outfileif msg:raise ValueError(msg + ' which would be overwritten')shutil.copyfile(infile, outfile)self.record_as_written(outfile)def copy_stream(self, instream, outfile, encoding=None):assert not os.path.isdir(outfile)self.ensure_dir(os.path.dirname(outfile))logger.info('Copying stream %s to %s', instream, outfile)if not self.dry_run:if encoding is None:outstream = open(outfile, 'wb')else:outstream = codecs.open(outfile, 'w', encoding=encoding)try:shutil.copyfileobj(instream, outstream)finally:outstream.close()self.record_as_written(outfile)def write_binary_file(self, path, data):self.ensure_dir(os.path.dirname(path))if not self.dry_run:with open(path, 'wb') as f:f.write(data)self.record_as_written(path)def write_text_file(self, path, data, encoding):self.ensure_dir(os.path.dirname(path))if not self.dry_run:with open(path, 'wb') as f:f.write(data.encode(encoding))self.record_as_written(path)def set_mode(self, bits, mask, files):if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):# Set the executable bits (owner, group, and world) on# all the files specified.for f in files:if self.dry_run:logger.info("changing mode of %s", f)else:mode = (os.stat(f).st_mode | bits) & masklogger.info("changing mode of %s to %o", f, mode)os.chmod(f, mode)set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)def ensure_dir(self, path):path = os.path.abspath(path)if path not in self.ensured and not os.path.exists(path):self.ensured.add(path)d, f = os.path.split(path)self.ensure_dir(d)logger.info('Creating %s' % path)if not self.dry_run:os.mkdir(path)if self.record:self.dirs_created.add(path)def byte_compile(self, path, optimize=False, force=False, prefix=None):dpath = cache_from_source(path, not optimize)logger.info('Byte-compiling %s to %s', path, dpath)if not self.dry_run:if force or self.newer(path, dpath):if not prefix:diagpath = Noneelse:assert path.startswith(prefix)diagpath = path[len(prefix):]py_compile.compile(path, dpath, diagpath, True) # raise errorself.record_as_written(dpath)return dpathdef ensure_removed(self, path):if os.path.exists(path):if os.path.isdir(path) and not os.path.islink(path):logger.debug('Removing directory tree at %s', path)if not self.dry_run:shutil.rmtree(path)if self.record:if path in self.dirs_created:self.dirs_created.remove(path)else:if os.path.islink(path):s = 'link'else:s = 'file'logger.debug('Removing %s %s', s, path)if not self.dry_run:os.remove(path)if self.record:if path in self.files_written:self.files_written.remove(path)def is_writable(self, path):result = Falsewhile not result:if os.path.exists(path):result = os.access(path, os.W_OK)breakparent = os.path.dirname(path)if parent == path:breakpath = parentreturn resultdef commit(self):"""Commit recorded changes, turn off recording, returnchanges."""assert self.recordresult = self.files_written, self.dirs_createdself._init_record()return resultdef rollback(self):if not self.dry_run:for f in list(self.files_written):if os.path.exists(f):os.remove(f)# dirs should all be empty now, except perhaps for# __pycache__ subdirs# reverse so that subdirs appear before their parentsdirs = sorted(self.dirs_created, reverse=True)for d in dirs:flist = os.listdir(d)if flist:assert flist == ['__pycache__']sd = os.path.join(d, flist[0])os.rmdir(sd)os.rmdir(d) # should fail if non-emptyself._init_record()def resolve(module_name, dotted_path):if module_name in sys.modules:mod = sys.modules[module_name]else:mod = __import__(module_name)if dotted_path is None:result = modelse:parts = dotted_path.split('.')result = getattr(mod, parts.pop(0))for p in parts:result = getattr(result, p)return resultclass ExportEntry(object):def __init__(self, name, prefix, suffix, flags):self.name = nameself.prefix = prefixself.suffix = suffixself.flags = flags@cached_propertydef value(self):return resolve(self.prefix, self.suffix)def __repr__(self): # pragma: no coverreturn '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix,self.suffix, self.flags)def __eq__(self, other):if not isinstance(other, ExportEntry):result = Falseelse:result = (self.name == other.name andself.prefix == other.prefix andself.suffix == other.suffix andself.flags == other.flags)return result__hash__ = object.__hash__ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.+])+)\s*=\s*(?P<callable>(\w+)([:\.]\w+)*)\s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?''', re.VERBOSE)def get_export_entry(specification):m = ENTRY_RE.search(specification)if not m:result = Noneif '[' in specification or ']' in specification:raise DistlibException("Invalid specification ""'%s'" % specification)else:d = m.groupdict()name = d['name']path = d['callable']colons = path.count(':')if colons == 0:prefix, suffix = path, Noneelse:if colons != 1:raise DistlibException("Invalid specification ""'%s'" % specification)prefix, suffix = path.split(':')flags = d['flags']if flags is None:if '[' in specification or ']' in specification:raise DistlibException("Invalid specification ""'%s'" % specification)flags = []else:flags = [f.strip() for f in flags.split(',')]result = ExportEntry(name, prefix, suffix, flags)return resultdef get_cache_base(suffix=None):"""Return the default base location for distlib caches. If the directory doesnot exist, it is created. Use the suffix provided for the base directory,and default to '.distlib' if it isn't provided.On Windows, if LOCALAPPDATA is defined in the environment, then it isassumed to be a directory, and will be the parent directory of the result.On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's homedirectory - using os.expanduser('~') - will be the parent directory ofthe result.The result is just the directory '.distlib' in the parent directory asdetermined above, or with the name specified with ``suffix``."""if suffix is None:suffix = '.distlib'if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:result = os.path.expandvars('$localappdata')else:# Assume posix, or old Windowsresult = os.path.expanduser('~')# we use 'isdir' instead of 'exists', because we want to# fail if there's a file with that nameif os.path.isdir(result):usable = os.access(result, os.W_OK)if not usable:logger.warning('Directory exists but is not writable: %s', result)else:try:os.makedirs(result)usable = Trueexcept OSError:logger.warning('Unable to create %s', result, exc_info=True)usable = Falseif not usable:result = tempfile.mkdtemp()logger.warning('Default location unusable, using %s', result)return os.path.join(result, suffix)def path_to_cache_dir(path):"""Convert an absolute path to a directory name for use in a cache.The algorithm used is:#. On Windows, any ``':'`` in the drive is replaced with ``'---'``.#. Any occurrence of ``os.sep`` is replaced with ``'--'``.#. ``'.cache'`` is appended."""d, p = os.path.splitdrive(os.path.abspath(path))if d:d = d.replace(':', '---')p = p.replace(os.sep, '--')return d + p + '.cache'def ensure_slash(s):if not s.endswith('/'):return s + '/'return sdef parse_credentials(netloc):username = password = Noneif '@' in netloc:prefix, netloc = netloc.split('@', 1)if ':' not in prefix:username = prefixelse:username, password = prefix.split(':', 1)return username, password, netlocdef get_process_umask():result = os.umask(0o22)os.umask(result)return resultdef is_string_sequence(seq):result = Truei = Nonefor i, s in enumerate(seq):if not isinstance(s, string_types):result = Falsebreakassert i is not Nonereturn resultPROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-''([a-z0-9_.+-]+)', re.I)PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')def split_filename(filename, project_name=None):"""Extract name, version, python version from a filename (no extension)Return name, version, pyver or None"""result = Nonepyver = Nonefilename = unquote(filename).replace(' ', '-')m = PYTHON_VERSION.search(filename)if m:pyver = m.group(1)filename = filename[:m.start()]if project_name and len(filename) > len(project_name) + 1:m = re.match(re.escape(project_name) + r'\b', filename)if m:n = m.end()result = filename[:n], filename[n + 1:], pyverif result is None:m = PROJECT_NAME_AND_VERSION.match(filename)if m:result = m.group(1), m.group(3), pyverreturn result# Allow spaces in name because of legacy dists like "Twisted Core"NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*'r'\(\s*(?P<ver>[^\s)]+)\)$')def parse_name_and_version(p):"""A utility method used to get name and version from a string.From e.g. a Provides-Dist value.:param p: A value in a form 'foo (1.0)':return: The name and version as a tuple."""m = NAME_VERSION_RE.match(p)if not m:raise DistlibException('Ill-formed name/version string: \'%s\'' % p)d = m.groupdict()return d['name'].strip().lower(), d['ver']def get_extras(requested, available):result = set()requested = set(requested or [])available = set(available or [])if '*' in requested:requested.remove('*')result |= availablefor r in requested:if r == '-':result.add(r)elif r.startswith('-'):unwanted = r[1:]if unwanted not in available:logger.warning('undeclared extra: %s' % unwanted)if unwanted in result:result.remove(unwanted)else:if r not in available:logger.warning('undeclared extra: %s' % r)result.add(r)return result## Extended metadata functionality#def _get_external_data(url):result = {}try:# urlopen might fail if it runs into redirections,# because of Python issue #13696. Fixed in locators# using a custom redirect handler.resp = urlopen(url)headers = resp.info()ct = headers.get('Content-Type')if not ct.startswith('application/json'):logger.debug('Unexpected response for JSON request: %s', ct)else:reader = codecs.getreader('utf-8')(resp)#data = reader.read().decode('utf-8')#result = json.loads(data)result = json.load(reader)except Exception as e:logger.exception('Failed to get external data for %s: %s', url, e)return result_external_data_base_url = 'https://www.red-dove.com/pypi/projects/'def get_project_data(name):url = '%s/%s/project.json' % (name[0].upper(), name)url = urljoin(_external_data_base_url, url)result = _get_external_data(url)return resultdef get_package_data(name, version):url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)url = urljoin(_external_data_base_url, url)return _get_external_data(url)class Cache(object):"""A class implementing a cache for resources that need to live in the file systeme.g. shared libraries. This class was moved from resources to here because itcould be used by other modules, e.g. the wheel module."""def __init__(self, base):"""Initialise an instance.:param base: The base directory where the cache should be located."""# we use 'isdir' instead of 'exists', because we want to# fail if there's a file with that nameif not os.path.isdir(base): # pragma: no coveros.makedirs(base)if (os.stat(base).st_mode & 0o77) != 0:logger.warning('Directory \'%s\' is not private', base)self.base = os.path.abspath(os.path.normpath(base))def prefix_to_dir(self, prefix):"""Converts a resource prefix to a directory name in the cache."""return path_to_cache_dir(prefix)def clear(self):"""Clear the cache."""not_removed = []for fn in os.listdir(self.base):fn = os.path.join(self.base, fn)try:if os.path.islink(fn) or os.path.isfile(fn):os.remove(fn)elif os.path.isdir(fn):shutil.rmtree(fn)except Exception:not_removed.append(fn)return not_removedclass EventMixin(object):"""A very simple publish/subscribe system."""def __init__(self):self._subscribers = {}def add(self, event, subscriber, append=True):"""Add a subscriber for an event.:param event: The name of an event.:param subscriber: The subscriber to be added (and called when theevent is published).:param append: Whether to append or prepend the subscriber to anexisting subscriber list for the event."""subs = self._subscribersif event not in subs:subs[event] = deque([subscriber])else:sq = subs[event]if append:sq.append(subscriber)else:sq.appendleft(subscriber)def remove(self, event, subscriber):"""Remove a subscriber for an event.:param event: The name of an event.:param subscriber: The subscriber to be removed."""subs = self._subscribersif event not in subs:raise ValueError('No subscribers: %r' % event)subs[event].remove(subscriber)def get_subscribers(self, event):"""Return an iterator for the subscribers for an event.:param event: The event to return subscribers for."""return iter(self._subscribers.get(event, ()))def publish(self, event, *args, **kwargs):"""Publish a event and return a list of values returned by itssubscribers.:param event: The event to publish.:param args: The positional arguments to pass to the event'ssubscribers.:param kwargs: The keyword arguments to pass to the event'ssubscribers."""result = []for subscriber in self.get_subscribers(event):try:value = subscriber(event, *args, **kwargs)except Exception:logger.exception('Exception during event publication')value = Noneresult.append(value)logger.debug('publish %s: args = %s, kwargs = %s, result = %s',event, args, kwargs, result)return result## Simple sequencing#class Sequencer(object):def __init__(self):self._preds = {}self._succs = {}self._nodes = set() # nodes with no preds/succsdef add_node(self, node):self._nodes.add(node)def remove_node(self, node, edges=False):if node in self._nodes:self._nodes.remove(node)if edges:for p in set(self._preds.get(node, ())):self.remove(p, node)for s in set(self._succs.get(node, ())):self.remove(node, s)# Remove emptiesfor k, v in list(self._preds.items()):if not v:del self._preds[k]for k, v in list(self._succs.items()):if not v:del self._succs[k]def add(self, pred, succ):assert pred != succself._preds.setdefault(succ, set()).add(pred)self._succs.setdefault(pred, set()).add(succ)def remove(self, pred, succ):assert pred != succtry:preds = self._preds[succ]succs = self._succs[pred]except KeyError: # pragma: no coverraise ValueError('%r not a successor of anything' % succ)try:preds.remove(pred)succs.remove(succ)except KeyError: # pragma: no coverraise ValueError('%r not a successor of %r' % (succ, pred))def is_step(self, step):return (step in self._preds or step in self._succs orstep in self._nodes)def get_steps(self, final):if not self.is_step(final):raise ValueError('Unknown: %r' % final)result = []todo = []seen = set()todo.append(final)while todo:step = todo.pop(0)if step in seen:# if a step was already seen,# move it to the end (so it will appear earlier# when reversed on return) ... but not for the# final step, as that would be confusing for# usersif step != final:result.remove(step)result.append(step)else:seen.add(step)result.append(step)preds = self._preds.get(step, ())todo.extend(preds)return reversed(result)@propertydef strong_connections(self):#http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithmindex_counter = [0]stack = []lowlinks = {}index = {}result = []graph = self._succsdef strongconnect(node):# set the depth index for this node to the smallest unused indexindex[node] = index_counter[0]lowlinks[node] = index_counter[0]index_counter[0] += 1stack.append(node)# Consider successorstry:successors = graph[node]except Exception:successors = []for successor in successors:if successor not in lowlinks:# Successor has not yet been visitedstrongconnect(successor)lowlinks[node] = min(lowlinks[node],lowlinks[successor])elif successor in stack:# the successor is in the stack and hence in the current# strongly connected component (SCC)lowlinks[node] = min(lowlinks[node],index[successor])# If `node` is a root node, pop the stack and generate an SCCif lowlinks[node] == index[node]:connected_component = []while True:successor = stack.pop()connected_component.append(successor)if successor == node: breakcomponent = tuple(connected_component)# storing the resultresult.append(component)for node in graph:if node not in lowlinks:strongconnect(node)return result@propertydef dot(self):result = ['digraph G {']for succ in self._preds:preds = self._preds[succ]for pred in preds:result.append(' %s -> %s;' % (pred, succ))for node in self._nodes:result.append(' %s;' % node)result.append('}')return '\n'.join(result)## Unarchiving functionality for zip, tar, tgz, tbz, whl#ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip','.tgz', '.tbz', '.whl')def unarchive(archive_filename, dest_dir, format=None, check=True):def check_path(path):if not isinstance(path, text_type):path = path.decode('utf-8')p = os.path.abspath(os.path.join(dest_dir, path))if not p.startswith(dest_dir) or p[plen] != os.sep:raise ValueError('path outside destination: %r' % p)dest_dir = os.path.abspath(dest_dir)plen = len(dest_dir)archive = Noneif format is None:if archive_filename.endswith(('.zip', '.whl')):format = 'zip'elif archive_filename.endswith(('.tar.gz', '.tgz')):format = 'tgz'mode = 'r:gz'elif archive_filename.endswith(('.tar.bz2', '.tbz')):format = 'tbz'mode = 'r:bz2'elif archive_filename.endswith('.tar'):format = 'tar'mode = 'r'else: # pragma: no coverraise ValueError('Unknown format for %r' % archive_filename)try:if format == 'zip':archive = ZipFile(archive_filename, 'r')if check:names = archive.namelist()for name in names:check_path(name)else:archive = tarfile.open(archive_filename, mode)if check:names = archive.getnames()for name in names:check_path(name)if format != 'zip' and sys.version_info[0] < 3:# See Python issue 17153. If the dest path contains Unicode,# tarfile extraction fails on Python 2.x if a member path name# contains non-ASCII characters - it leads to an implicit# bytes -> unicode conversion using ASCII to decode.for tarinfo in archive.getmembers():if not isinstance(tarinfo.name, text_type):tarinfo.name = tarinfo.name.decode('utf-8')archive.extractall(dest_dir)finally:if archive:archive.close()def zip_dir(directory):"""zip a directory tree into a BytesIO object"""result = io.BytesIO()dlen = len(directory)with ZipFile(result, "w") as zf:for root, dirs, files in os.walk(directory):for name in files:full = os.path.join(root, name)rel = root[dlen:]dest = os.path.join(rel, name)zf.write(full, dest)return result## Simple progress bar#UNITS = ('', 'K', 'M', 'G','T','P')class Progress(object):unknown = 'UNKNOWN'def __init__(self, minval=0, maxval=100):assert maxval is None or maxval >= minvalself.min = self.cur = minvalself.max = maxvalself.started = Noneself.elapsed = 0self.done = Falsedef update(self, curval):assert self.min <= curvalassert self.max is None or curval <= self.maxself.cur = curvalnow = time.time()if self.started is None:self.started = nowelse:self.elapsed = now - self.starteddef increment(self, incr):assert incr >= 0self.update(self.cur + incr)def start(self):self.update(self.min)return selfdef stop(self):if self.max is not None:self.update(self.max)self.done = True@propertydef maximum(self):return self.unknown if self.max is None else self.max@propertydef percentage(self):if self.done:result = '100 %'elif self.max is None:result = ' ?? %'else:v = 100.0 * (self.cur - self.min) / (self.max - self.min)result = '%3d %%' % vreturn resultdef format_duration(self, duration):if (duration <= 0) and self.max is None or self.cur == self.min:result = '??:??:??'#elif duration < 1:# result = '--:--:--'else:result = time.strftime('%H:%M:%S', time.gmtime(duration))return result@propertydef ETA(self):if self.done:prefix = 'Done't = self.elapsed#import pdb; pdb.set_trace()else:prefix = 'ETA 'if self.max is None:t = -1elif self.elapsed == 0 or (self.cur == self.min):t = 0else:#import pdb; pdb.set_trace()t = float(self.max - self.min)t /= self.cur - self.mint = (t - 1) * self.elapsedreturn '%s: %s' % (prefix, self.format_duration(t))@propertydef speed(self):if self.elapsed == 0:result = 0.0else:result = (self.cur - self.min) / self.elapsedfor unit in UNITS:if result < 1000:breakresult /= 1000.0return '%d %sB/s' % (result, unit)## Glob functionality#RICH_GLOB = re.compile(r'\{([^}]*)\}')_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')def iglob(path_glob):"""Extended globbing function that supports ** and {opt1,opt2,opt3}."""if _CHECK_RECURSIVE_GLOB.search(path_glob):msg = """invalid glob %r: recursive glob "**" must be used alone"""raise ValueError(msg % path_glob)if _CHECK_MISMATCH_SET.search(path_glob):msg = """invalid glob %r: mismatching set marker '{' or '}'"""raise ValueError(msg % path_glob)return _iglob(path_glob)def _iglob(path_glob):rich_path_glob = RICH_GLOB.split(path_glob, 1)if len(rich_path_glob) > 1:assert len(rich_path_glob) == 3, rich_path_globprefix, set, suffix = rich_path_globfor item in set.split(','):for path in _iglob(''.join((prefix, item, suffix))):yield pathelse:if '**' not in path_glob:for item in std_iglob(path_glob):yield itemelse:prefix, radical = path_glob.split('**', 1)if prefix == '':prefix = '.'if radical == '':radical = '*'else:# we support bothradical = radical.lstrip('/')radical = radical.lstrip('\\')for path, dir, files in os.walk(prefix):path = os.path.normpath(path)for fn in _iglob(os.path.join(path, radical)):yield fnif ssl:from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname,CertificateError)## HTTPSConnection which verifies certificates/matches domains#class HTTPSConnection(httplib.HTTPSConnection):ca_certs = None # set this to the path to the certs file (.pem)check_domain = True # only used if ca_certs is not None# noinspection PyPropertyAccessdef connect(self):sock = socket.create_connection((self.host, self.port), self.timeout)if getattr(self, '_tunnel_host', False):self.sock = sockself._tunnel()if not hasattr(ssl, 'SSLContext'):# For 2.xif self.ca_certs:cert_reqs = ssl.CERT_REQUIREDelse:cert_reqs = ssl.CERT_NONEself.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,cert_reqs=cert_reqs,ssl_version=ssl.PROTOCOL_SSLv23,ca_certs=self.ca_certs)else: # pragma: no covercontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)context.options |= ssl.OP_NO_SSLv2if self.cert_file:context.load_cert_chain(self.cert_file, self.key_file)kwargs = {}if self.ca_certs:context.verify_mode = ssl.CERT_REQUIREDcontext.load_verify_locations(cafile=self.ca_certs)if getattr(ssl, 'HAS_SNI', False):kwargs['server_hostname'] = self.hostself.sock = context.wrap_socket(sock, **kwargs)if self.ca_certs and self.check_domain:try:match_hostname(self.sock.getpeercert(), self.host)logger.debug('Host verified: %s', self.host)except CertificateError: # pragma: no coverself.sock.shutdown(socket.SHUT_RDWR)self.sock.close()raiseclass HTTPSHandler(BaseHTTPSHandler):def __init__(self, ca_certs, check_domain=True):BaseHTTPSHandler.__init__(self)self.ca_certs = ca_certsself.check_domain = check_domaindef _conn_maker(self, *args, **kwargs):"""This is called to create a connection instance. Normally you'dpass a connection class to do_open, but it doesn't actually check fora class, and just expects a callable. As long as we behave just as aconstructor would have, we should be OK. If it ever changes so thatwe *must* pass a class, we'll create an UnsafeHTTPSConnection classwhich just sets check_domain to False in the class definition, andchoose which one to pass to do_open."""result = HTTPSConnection(*args, **kwargs)if self.ca_certs:result.ca_certs = self.ca_certsresult.check_domain = self.check_domainreturn resultdef https_open(self, req):try:return self.do_open(self._conn_maker, req)except URLError as e:if 'certificate verify failed' in str(e.reason):raise CertificateError('Unable to verify server certificate ''for %s' % req.host)else:raise## To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-# Middle proxy using HTTP listens on port 443, or an index mistakenly serves# HTML containing a http://xyz link when it should be https://xyz),# you can use the following handler class, which does not allow HTTP traffic.## It works by inheriting from HTTPHandler - so build_opener won't add a# handler for HTTP itself.#class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):def http_open(self, req):raise URLError('Unexpected HTTP request on what should be a secure ''connection: %s' % req)## XML-RPC with timeouts#_ver_info = sys.version_info[:2]if _ver_info == (2, 6):class HTTP(httplib.HTTP):def __init__(self, host='', port=None, **kwargs):if port == 0: # 0 means use port 0, not the default portport = Noneself._setup(self._connection_class(host, port, **kwargs))if ssl:class HTTPS(httplib.HTTPS):def __init__(self, host='', port=None, **kwargs):if port == 0: # 0 means use port 0, not the default portport = Noneself._setup(self._connection_class(host, port, **kwargs))class Transport(xmlrpclib.Transport):def __init__(self, timeout, use_datetime=0):self.timeout = timeoutxmlrpclib.Transport.__init__(self, use_datetime)def make_connection(self, host):h, eh, x509 = self.get_host_info(host)if _ver_info == (2, 6):result = HTTP(h, timeout=self.timeout)else:if not self._connection or host != self._connection[0]:self._extra_headers = ehself._connection = host, httplib.HTTPConnection(h)result = self._connection[1]return resultif ssl:class SafeTransport(xmlrpclib.SafeTransport):def __init__(self, timeout, use_datetime=0):self.timeout = timeoutxmlrpclib.SafeTransport.__init__(self, use_datetime)def make_connection(self, host):h, eh, kwargs = self.get_host_info(host)if not kwargs:kwargs = {}kwargs['timeout'] = self.timeoutif _ver_info == (2, 6):result = HTTPS(host, None, **kwargs)else:if not self._connection or host != self._connection[0]:self._extra_headers = ehself._connection = host, httplib.HTTPSConnection(h, None,**kwargs)result = self._connection[1]return resultclass ServerProxy(xmlrpclib.ServerProxy):def __init__(self, uri, **kwargs):self.timeout = timeout = kwargs.pop('timeout', None)# The above classes only come into play if a timeout# is specifiedif timeout is not None:scheme, _ = splittype(uri)use_datetime = kwargs.get('use_datetime', 0)if scheme == 'https':tcls = SafeTransportelse:tcls = Transportkwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)self.transport = txmlrpclib.ServerProxy.__init__(self, uri, **kwargs)## CSV functionality. This is provided because on 2.x, the csv module can't# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.#def _csv_open(fn, mode, **kwargs):if sys.version_info[0] < 3:mode += 'b'else:kwargs['newline'] = ''return open(fn, mode, **kwargs)class CSVBase(object):defaults = {'delimiter': str(','), # The strs are used because we need native'quotechar': str('"'), # str in the csv API (2.x won't take'lineterminator': str('\n') # Unicode)}def __enter__(self):return selfdef __exit__(self, *exc_info):self.stream.close()class CSVReader(CSVBase):def __init__(self, **kwargs):if 'stream' in kwargs:stream = kwargs['stream']if sys.version_info[0] >= 3:# needs to be a text streamstream = codecs.getreader('utf-8')(stream)self.stream = streamelse:self.stream = _csv_open(kwargs['path'], 'r')self.reader = csv.reader(self.stream, **self.defaults)def __iter__(self):return selfdef next(self):result = next(self.reader)if sys.version_info[0] < 3:for i, item in enumerate(result):if not isinstance(item, text_type):result[i] = item.decode('utf-8')return result__next__ = nextclass CSVWriter(CSVBase):def __init__(self, fn, **kwargs):self.stream = _csv_open(fn, 'w')self.writer = csv.writer(self.stream, **self.defaults)def writerow(self, row):if sys.version_info[0] < 3:r = []for item in row:if isinstance(item, text_type):item = item.encode('utf-8')r.append(item)row = rself.writer.writerow(row)## Configurator functionality#class Configurator(BaseConfigurator):value_converters = dict(BaseConfigurator.value_converters)value_converters['inc'] = 'inc_convert'def __init__(self, config, base=None):super(Configurator, self).__init__(config)self.base = base or os.getcwd()def configure_custom(self, config):def convert(o):if isinstance(o, (list, tuple)):result = type(o)([convert(i) for i in o])elif isinstance(o, dict):if '()' in o:result = self.configure_custom(o)else:result = {}for k in o:result[k] = convert(o[k])else:result = self.convert(o)return resultc = config.pop('()')if not callable(c):c = self.resolve(c)props = config.pop('.', None)# Check for valid identifiersargs = config.pop('[]', ())if args:args = tuple([convert(o) for o in args])items = [(k, convert(config[k])) for k in config if valid_ident(k)]kwargs = dict(items)result = c(*args, **kwargs)if props:for n, v in props.items():setattr(result, n, convert(v))return resultdef __getitem__(self, key):result = self.config[key]if isinstance(result, dict) and '()' in result:self.config[key] = result = self.configure_custom(result)return resultdef inc_convert(self, value):"""Default converter for the inc:// protocol."""if not os.path.isabs(value):value = os.path.join(self.base, value)with codecs.open(value, 'r', encoding='utf-8') as f:result = json.load(f)return result## Mixin for running subprocesses and capturing their output#class SubprocessMixin(object):def __init__(self, verbose=False, progress=None):self.verbose = verboseself.progress = progressdef reader(self, stream, context):"""Read lines from a subprocess' output stream and either pass to a progresscallable (if specified) or write progress information to sys.stderr."""progress = self.progressverbose = self.verbosewhile True:s = stream.readline()if not s:breakif progress is not None:progress(s, context)else:if not verbose:sys.stderr.write('.')else:sys.stderr.write(s.decode('utf-8'))sys.stderr.flush()stream.close()def run_command(self, cmd, **kwargs):p = subprocess.Popen(cmd, stdout=subprocess.PIPE,stderr=subprocess.PIPE, **kwargs)t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))t1.start()t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))t2.start()p.wait()t1.join()t2.join()if self.progress is not None:self.progress('done.', 'main')elif self.verbose:sys.stderr.write('done.\n')return pdef normalize_name(name):"""Normalize a python package name a la PEP 503"""# https://www.python.org/dev/peps/pep-0503/#normalized-namesreturn re.sub('[-_.]+', '-', name).lower()
# -*- coding: utf-8 -*-## Copyright (C) 2013-2015 Vinay Sajip.# Licensed to the Python Software Foundation under a contributor agreement.# See LICENSE.txt and CONTRIBUTORS.txt.#from io import BytesIOimport loggingimport osimport reimport structimport sysfrom .compat import sysconfig, detect_encoding, ZipFilefrom .resources import finderfrom .util import (FileOperator, get_export_entry, convert_path,get_executable, in_venv)logger = logging.getLogger(__name__)_DEFAULT_MANIFEST = '''<?xml version="1.0" encoding="UTF-8" standalone="yes"?><assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"><assemblyIdentity version="1.0.0.0"processorArchitecture="X86"name="%s"type="win32"/><!-- Identify the application security requirements. --><trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"><security><requestedPrivileges><requestedExecutionLevel level="asInvoker" uiAccess="false"/></requestedPrivileges></security></trustInfo></assembly>'''.strip()# check if Python is called on the first line with this expressionFIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-if __name__ == '__main__':import sys, redef _resolve(module, func):__import__(module)mod = sys.modules[module]parts = func.split('.')result = getattr(mod, parts.pop(0))for p in parts:result = getattr(result, p)return resulttry:sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])func = _resolve('%(module)s', '%(func)s')rc = func() # None interpreted as 0except Exception as e: # only supporting Python >= 2.6sys.stderr.write('%%s\\n' %% e)rc = 1sys.exit(rc)'''def _enquote_executable(executable):if ' ' in executable:# make sure we quote only the executable in case of env# for example /usr/bin/env "/dir with spaces/bin/jython"# instead of "/usr/bin/env /dir with spaces/bin/jython"# otherwise wholeif executable.startswith('/usr/bin/env '):env, _executable = executable.split(' ', 1)if ' ' in _executable and not _executable.startswith('"'):executable = '%s "%s"' % (env, _executable)else:if not executable.startswith('"'):executable = '"%s"' % executablereturn executableclass ScriptMaker(object):"""A class to copy or create scripts from source scripts or callablespecifications."""script_template = SCRIPT_TEMPLATEexecutable = None # for shebangsdef __init__(self, source_dir, target_dir, add_launchers=True,dry_run=False, fileop=None):self.source_dir = source_dirself.target_dir = target_dirself.add_launchers = add_launchersself.force = Falseself.clobber = False# It only makes sense to set mode bits on POSIX.self.set_mode = (os.name == 'posix') or (os.name == 'java' andos._name == 'posix')self.variants = set(('', 'X.Y'))self._fileop = fileop or FileOperator(dry_run)self._is_nt = os.name == 'nt' or (os.name == 'java' and os._name == 'nt')def _get_alternate_executable(self, executable, options):if options.get('gui', False) and self._is_nt: # pragma: no coverdn, fn = os.path.split(executable)fn = fn.replace('python', 'pythonw')executable = os.path.join(dn, fn)return executableif sys.platform.startswith('java'): # pragma: no coverdef _is_shell(self, executable):"""Determine if the specified executable is a script(contains a #! line)"""try:with open(executable) as fp:return fp.read(2) == '#!'except (OSError, IOError):logger.warning('Failed to open %s', executable)return Falsedef _fix_jython_executable(self, executable):if self._is_shell(executable):# Workaround for Jython is not needed on Linux systems.import javaif java.lang.System.getProperty('os.name') == 'Linux':return executableelif executable.lower().endswith('jython.exe'):# Use wrapper exe for Jython on Windowsreturn executablereturn '/usr/bin/env %s' % executabledef _get_shebang(self, encoding, post_interp=b'', options=None):enquote = Trueif self.executable:executable = self.executableenquote = False # assume this will be taken care ofelif not sysconfig.is_python_build():executable = get_executable()elif in_venv(): # pragma: no coverexecutable = os.path.join(sysconfig.get_path('scripts'),'python%s' % sysconfig.get_config_var('EXE'))else: # pragma: no coverexecutable = os.path.join(sysconfig.get_config_var('BINDIR'),'python%s%s' % (sysconfig.get_config_var('VERSION'),sysconfig.get_config_var('EXE')))if options:executable = self._get_alternate_executable(executable, options)if sys.platform.startswith('java'): # pragma: no coverexecutable = self._fix_jython_executable(executable)# Normalise case for Windowsexecutable = os.path.normcase(executable)# If the user didn't specify an executable, it may be necessary to# cater for executable paths with spaces (not uncommon on Windows)if enquote:executable = _enquote_executable(executable)# Issue #51: don't use fsencode, since we later try to# check that the shebang is decodable using utf-8.executable = executable.encode('utf-8')# in case of IronPython, play safe and enable frames supportif (sys.platform == 'cli' and '-X:Frames' not in post_interpand '-X:FullFrames' not in post_interp): # pragma: no coverpost_interp += b' -X:Frames'shebang = b'#!' + executable + post_interp + b'\n'# Python parser starts to read a script using UTF-8 until# it gets a #coding:xxx cookie. The shebang has to be the# first line of a file, the #coding:xxx cookie cannot be# written before. So the shebang has to be decodable from# UTF-8.try:shebang.decode('utf-8')except UnicodeDecodeError: # pragma: no coverraise ValueError('The shebang (%r) is not decodable from utf-8' % shebang)# If the script is encoded to a custom encoding (use a# #coding:xxx cookie), the shebang has to be decodable from# the script encoding too.if encoding != 'utf-8':try:shebang.decode(encoding)except UnicodeDecodeError: # pragma: no coverraise ValueError('The shebang (%r) is not decodable ''from the script encoding (%r)' % (shebang, encoding))return shebangdef _get_script_text(self, entry):return self.script_template % dict(module=entry.prefix,func=entry.suffix)manifest = _DEFAULT_MANIFESTdef get_manifest(self, exename):base = os.path.basename(exename)return self.manifest % basedef _write_script(self, names, shebang, script_bytes, filenames, ext):use_launcher = self.add_launchers and self._is_ntlinesep = os.linesep.encode('utf-8')if not use_launcher:script_bytes = shebang + linesep + script_byteselse: # pragma: no coverif ext == 'py':launcher = self._get_launcher('t')else:launcher = self._get_launcher('w')stream = BytesIO()with ZipFile(stream, 'w') as zf:zf.writestr('__main__.py', script_bytes)zip_data = stream.getvalue()script_bytes = launcher + shebang + linesep + zip_datafor name in names:outname = os.path.join(self.target_dir, name)if use_launcher: # pragma: no covern, e = os.path.splitext(outname)if e.startswith('.py'):outname = noutname = '%s.exe' % outnametry:self._fileop.write_binary_file(outname, script_bytes)except Exception:# Failed writing an executable - it might be in use.logger.warning('Failed to write executable - trying to ''use .deleteme logic')dfname = '%s.deleteme' % outnameif os.path.exists(dfname):os.remove(dfname) # Not allowed to fail hereos.rename(outname, dfname) # nor hereself._fileop.write_binary_file(outname, script_bytes)logger.debug('Able to replace executable using ''.deleteme logic')try:os.remove(dfname)except Exception:pass # still in use - ignore errorelse:if self._is_nt and not outname.endswith('.' + ext): # pragma: no coveroutname = '%s.%s' % (outname, ext)if os.path.exists(outname) and not self.clobber:logger.warning('Skipping existing file %s', outname)continueself._fileop.write_binary_file(outname, script_bytes)if self.set_mode:self._fileop.set_executable_mode([outname])filenames.append(outname)def _make_script(self, entry, filenames, options=None):post_interp = b''if options:args = options.get('interpreter_args', [])if args:args = ' %s' % ' '.join(args)post_interp = args.encode('utf-8')shebang = self._get_shebang('utf-8', post_interp, options=options)script = self._get_script_text(entry).encode('utf-8')name = entry.namescriptnames = set()if '' in self.variants:scriptnames.add(name)if 'X' in self.variants:scriptnames.add('%s%s' % (name, sys.version[0]))if 'X.Y' in self.variants:scriptnames.add('%s-%s' % (name, sys.version[:3]))if options and options.get('gui', False):ext = 'pyw'else:ext = 'py'self._write_script(scriptnames, shebang, script, filenames, ext)def _copy_script(self, script, filenames):adjust = Falsescript = os.path.join(self.source_dir, convert_path(script))outname = os.path.join(self.target_dir, os.path.basename(script))if not self.force and not self._fileop.newer(script, outname):logger.debug('not copying %s (up-to-date)', script)return# Always open the file, but ignore failures in dry-run mode --# that way, we'll get accurate feedback if we can read the# script.try:f = open(script, 'rb')except IOError: # pragma: no coverif not self.dry_run:raisef = Noneelse:first_line = f.readline()if not first_line: # pragma: no coverlogger.warning('%s: %s is an empty file (skipping)',self.get_command_name(), script)returnmatch = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))if match:adjust = Truepost_interp = match.group(1) or b''if not adjust:if f:f.close()self._fileop.copy_file(script, outname)if self.set_mode:self._fileop.set_executable_mode([outname])filenames.append(outname)else:logger.info('copying and adjusting %s -> %s', script,self.target_dir)if not self._fileop.dry_run:encoding, lines = detect_encoding(f.readline)f.seek(0)shebang = self._get_shebang(encoding, post_interp)if b'pythonw' in first_line: # pragma: no coverext = 'pyw'else:ext = 'py'n = os.path.basename(outname)self._write_script([n], shebang, f.read(), filenames, ext)if f:f.close()@propertydef dry_run(self):return self._fileop.dry_run@dry_run.setterdef dry_run(self, value):self._fileop.dry_run = valueif os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover# Executable launcher support.# Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/def _get_launcher(self, kind):if struct.calcsize('P') == 8: # 64-bitbits = '64'else:bits = '32'name = '%s%s.exe' % (kind, bits)# Issue 31: don't hardcode an absolute package name, but# determine it relative to the current packagedistlib_package = __name__.rsplit('.', 1)[0]result = finder(distlib_package).find(name).bytesreturn result# Public API followsdef make(self, specification, options=None):"""Make a script.:param specification: The specification, which is either a valid exportentry specification (to make a script from acallable) or a filename (to make a script bycopying from a source location).:param options: A dictionary of options controlling script generation.:return: A list of all absolute pathnames written to."""filenames = []entry = get_export_entry(specification)if entry is None:self._copy_script(specification, filenames)else:self._make_script(entry, filenames, options=options)return filenamesdef make_multiple(self, specifications, options=None):"""Take a list of specifications and make scripts from them,:param specifications: A list of specifications.:return: A list of all absolute pathnames written to,"""filenames = []for specification in specifications:filenames.extend(self.make(specification, options))return filenames
# -*- coding: utf-8 -*-## Copyright (C) 2013-2016 Vinay Sajip.# Licensed to the Python Software Foundation under a contributor agreement.# See LICENSE.txt and CONTRIBUTORS.txt.#from __future__ import unicode_literalsimport bisectimport ioimport loggingimport osimport pkgutilimport shutilimport sysimport typesimport zipimportfrom . import DistlibExceptionfrom .util import cached_property, get_cache_base, path_to_cache_dir, Cachelogger = logging.getLogger(__name__)cache = None # created when neededclass ResourceCache(Cache):def __init__(self, base=None):if base is None:# Use native string to avoid issues on 2.x: see Python #20140.base = os.path.join(get_cache_base(), str('resource-cache'))super(ResourceCache, self).__init__(base)def is_stale(self, resource, path):"""Is the cache stale for the given resource?:param resource: The :class:`Resource` being cached.:param path: The path of the resource in the cache.:return: True if the cache is stale."""# Cache invalidation is a hard problem :-)return Truedef get(self, resource):"""Get a resource into the cache,:param resource: A :class:`Resource` instance.:return: The pathname of the resource in the cache."""prefix, path = resource.finder.get_cache_info(resource)if prefix is None:result = pathelse:result = os.path.join(self.base, self.prefix_to_dir(prefix), path)dirname = os.path.dirname(result)if not os.path.isdir(dirname):os.makedirs(dirname)if not os.path.exists(result):stale = Trueelse:stale = self.is_stale(resource, path)if stale:# write the bytes of the resource to the cache locationwith open(result, 'wb') as f:f.write(resource.bytes)return resultclass ResourceBase(object):def __init__(self, finder, name):self.finder = finderself.name = nameclass Resource(ResourceBase):"""A class representing an in-package resource, such as a data file. This isnot normally instantiated by user code, but rather by a:class:`ResourceFinder` which manages the resource."""is_container = False # Backwards compatibilitydef as_stream(self):"""Get the resource as a stream.This is not a property to make it obvious that it returns a new streameach time."""return self.finder.get_stream(self)@cached_propertydef file_path(self):global cacheif cache is None:cache = ResourceCache()return cache.get(self)@cached_propertydef bytes(self):return self.finder.get_bytes(self)@cached_propertydef size(self):return self.finder.get_size(self)class ResourceContainer(ResourceBase):is_container = True # Backwards compatibility@cached_propertydef resources(self):return self.finder.get_resources(self)class ResourceFinder(object):"""Resource finder for file system resources."""if sys.platform.startswith('java'):skipped_extensions = ('.pyc', '.pyo', '.class')else:skipped_extensions = ('.pyc', '.pyo')def __init__(self, module):self.module = moduleself.loader = getattr(module, '__loader__', None)self.base = os.path.dirname(getattr(module, '__file__', ''))def _adjust_path(self, path):return os.path.realpath(path)def _make_path(self, resource_name):# Issue #50: need to preserve type of path on Python 2.x# like os.path._get_sepif isinstance(resource_name, bytes): # should only happen on 2.xsep = b'/'else:sep = '/'parts = resource_name.split(sep)parts.insert(0, self.base)result = os.path.join(*parts)return self._adjust_path(result)def _find(self, path):return os.path.exists(path)def get_cache_info(self, resource):return None, resource.pathdef find(self, resource_name):path = self._make_path(resource_name)if not self._find(path):result = Noneelse:if self._is_directory(path):result = ResourceContainer(self, resource_name)else:result = Resource(self, resource_name)result.path = pathreturn resultdef get_stream(self, resource):return open(resource.path, 'rb')def get_bytes(self, resource):with open(resource.path, 'rb') as f:return f.read()def get_size(self, resource):return os.path.getsize(resource.path)def get_resources(self, resource):def allowed(f):return (f != '__pycache__' and notf.endswith(self.skipped_extensions))return set([f for f in os.listdir(resource.path) if allowed(f)])def is_container(self, resource):return self._is_directory(resource.path)_is_directory = staticmethod(os.path.isdir)def iterator(self, resource_name):resource = self.find(resource_name)if resource is not None:todo = [resource]while todo:resource = todo.pop(0)yield resourceif resource.is_container:rname = resource.namefor name in resource.resources:if not rname:new_name = nameelse:new_name = '/'.join([rname, name])child = self.find(new_name)if child.is_container:todo.append(child)else:yield childclass ZipResourceFinder(ResourceFinder):"""Resource finder for resources in .zip files."""def __init__(self, module):super(ZipResourceFinder, self).__init__(module)archive = self.loader.archiveself.prefix_len = 1 + len(archive)# PyPy doesn't have a _files attr on zipimporter, and you can't set oneif hasattr(self.loader, '_files'):self._files = self.loader._fileselse:self._files = zipimport._zip_directory_cache[archive]self.index = sorted(self._files)def _adjust_path(self, path):return pathdef _find(self, path):path = path[self.prefix_len:]if path in self._files:result = Trueelse:if path and path[-1] != os.sep:path = path + os.sepi = bisect.bisect(self.index, path)try:result = self.index[i].startswith(path)except IndexError:result = Falseif not result:logger.debug('_find failed: %r %r', path, self.loader.prefix)else:logger.debug('_find worked: %r %r', path, self.loader.prefix)return resultdef get_cache_info(self, resource):prefix = self.loader.archivepath = resource.path[1 + len(prefix):]return prefix, pathdef get_bytes(self, resource):return self.loader.get_data(resource.path)def get_stream(self, resource):return io.BytesIO(self.get_bytes(resource))def get_size(self, resource):path = resource.path[self.prefix_len:]return self._files[path][3]def get_resources(self, resource):path = resource.path[self.prefix_len:]if path and path[-1] != os.sep:path += os.sepplen = len(path)result = set()i = bisect.bisect(self.index, path)while i < len(self.index):if not self.index[i].startswith(path):breaks = self.index[i][plen:]result.add(s.split(os.sep, 1)[0]) # only immediate childreni += 1return resultdef _is_directory(self, path):path = path[self.prefix_len:]if path and path[-1] != os.sep:path += os.sepi = bisect.bisect(self.index, path)try:result = self.index[i].startswith(path)except IndexError:result = Falsereturn result_finder_registry = {type(None): ResourceFinder,zipimport.zipimporter: ZipResourceFinder}try:# In Python 3.6, _frozen_importlib -> _frozen_importlib_externaltry:import _frozen_importlib_external as _fiexcept ImportError:import _frozen_importlib as _fi_finder_registry[_fi.SourceFileLoader] = ResourceFinder_finder_registry[_fi.FileFinder] = ResourceFinderdel _fiexcept (ImportError, AttributeError):passdef register_finder(loader, finder_maker):_finder_registry[type(loader)] = finder_maker_finder_cache = {}def finder(package):"""Return a resource finder for a package.:param package: The name of the package.:return: A :class:`ResourceFinder` instance for the package."""if package in _finder_cache:result = _finder_cache[package]else:if package not in sys.modules:__import__(package)module = sys.modules[package]path = getattr(module, '__path__', None)if path is None:raise DistlibException('You cannot get a finder for a module, ''only for a package')loader = getattr(module, '__loader__', None)finder_maker = _finder_registry.get(type(loader))if finder_maker is None:raise DistlibException('Unable to locate finder for %r' % package)result = finder_maker(module)_finder_cache[package] = resultreturn result_dummy_module = types.ModuleType(str('__dummy__'))def finder_for_path(path):"""Return a resource finder for a path, which should represent a container.:param path: The path.:return: A :class:`ResourceFinder` instance for the path."""result = None# calls any path hooks, gets importer into cachepkgutil.get_importer(path)loader = sys.path_importer_cache.get(path)finder = _finder_registry.get(type(loader))if finder:module = _dummy_modulemodule.__file__ = os.path.join(path, '')module.__loader__ = loaderresult = finder(module)return result
# -*- coding: utf-8 -*-## Copyright (C) 2012 The Python Software Foundation.# See LICENSE.txt and CONTRIBUTORS.txt.#"""Implementation of the Metadata for Python packages PEPs.Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental)."""from __future__ import unicode_literalsimport codecsfrom email import message_from_fileimport jsonimport loggingimport refrom . import DistlibException, __version__from .compat import StringIO, string_types, text_typefrom .markers import interpretfrom .util import extract_by_key, get_extrasfrom .version import get_scheme, PEP440_VERSION_RElogger = logging.getLogger(__name__)class MetadataMissingError(DistlibException):"""A required metadata is missing"""class MetadataConflictError(DistlibException):"""Attempt to read or write metadata fields that are conflictual."""class MetadataUnrecognizedVersionError(DistlibException):"""Unknown metadata version number."""class MetadataInvalidError(DistlibException):"""A metadata value is invalid"""# public API of this module__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']# Encoding used for the PKG-INFO filesPKG_INFO_ENCODING = 'utf-8'# preferred version. Hopefully will be changed# to 1.2 once PEP 345 is supported everywherePKG_INFO_PREFERRED_VERSION = '1.1'_LINE_PREFIX_1_2 = re.compile('\n \|')_LINE_PREFIX_PRE_1_2 = re.compile('\n ')_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform','Summary', 'Description','Keywords', 'Home-page', 'Author', 'Author-email','License')_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform','Supported-Platform', 'Summary', 'Description','Keywords', 'Home-page', 'Author', 'Author-email','License', 'Classifier', 'Download-URL', 'Obsoletes','Provides', 'Requires')_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier','Download-URL')_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform','Supported-Platform', 'Summary', 'Description','Keywords', 'Home-page', 'Author', 'Author-email','Maintainer', 'Maintainer-email', 'License','Classifier', 'Download-URL', 'Obsoletes-Dist','Project-URL', 'Provides-Dist', 'Requires-Dist','Requires-Python', 'Requires-External')_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python','Obsoletes-Dist', 'Requires-External', 'Maintainer','Maintainer-email', 'Project-URL')_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform','Supported-Platform', 'Summary', 'Description','Keywords', 'Home-page', 'Author', 'Author-email','Maintainer', 'Maintainer-email', 'License','Classifier', 'Download-URL', 'Obsoletes-Dist','Project-URL', 'Provides-Dist', 'Requires-Dist','Requires-Python', 'Requires-External', 'Private-Version','Obsoleted-By', 'Setup-Requires-Dist', 'Extension','Provides-Extra')_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By','Setup-Requires-Dist', 'Extension')_ALL_FIELDS = set()_ALL_FIELDS.update(_241_FIELDS)_ALL_FIELDS.update(_314_FIELDS)_ALL_FIELDS.update(_345_FIELDS)_ALL_FIELDS.update(_426_FIELDS)EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''')def _version2fieldlist(version):if version == '1.0':return _241_FIELDSelif version == '1.1':return _314_FIELDSelif version == '1.2':return _345_FIELDSelif version == '2.0':return _426_FIELDSraise MetadataUnrecognizedVersionError(version)def _best_version(fields):"""Detect the best version depending on the fields used."""def _has_marker(keys, markers):for marker in markers:if marker in keys:return Truereturn Falsekeys = []for key, value in fields.items():if value in ([], 'UNKNOWN', None):continuekeys.append(key)possible_versions = ['1.0', '1.1', '1.2', '2.0']# first let's try to see if a field is not part of one of the versionfor key in keys:if key not in _241_FIELDS and '1.0' in possible_versions:possible_versions.remove('1.0')if key not in _314_FIELDS and '1.1' in possible_versions:possible_versions.remove('1.1')if key not in _345_FIELDS and '1.2' in possible_versions:possible_versions.remove('1.2')if key not in _426_FIELDS and '2.0' in possible_versions:possible_versions.remove('2.0')# possible_version contains qualified versionsif len(possible_versions) == 1:return possible_versions[0] # found !elif len(possible_versions) == 0:raise MetadataConflictError('Unknown metadata set')# let's see if one unique marker is foundis_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS)if int(is_1_1) + int(is_1_2) + int(is_2_0) > 1:raise MetadataConflictError('You used incompatible 1.1/1.2/2.0 fields')# we have the choice, 1.0, or 1.2, or 2.0# - 1.0 has a broken Summary field but works with all tools# - 1.1 is to avoid# - 1.2 fixes Summary but has little adoption# - 2.0 adds more features and is very newif not is_1_1 and not is_1_2 and not is_2_0:# we couldn't find any specific markerif PKG_INFO_PREFERRED_VERSION in possible_versions:return PKG_INFO_PREFERRED_VERSIONif is_1_1:return '1.1'if is_1_2:return '1.2'return '2.0'_ATTR2FIELD = {'metadata_version': 'Metadata-Version','name': 'Name','version': 'Version','platform': 'Platform','supported_platform': 'Supported-Platform','summary': 'Summary','description': 'Description','keywords': 'Keywords','home_page': 'Home-page','author': 'Author','author_email': 'Author-email','maintainer': 'Maintainer','maintainer_email': 'Maintainer-email','license': 'License','classifier': 'Classifier','download_url': 'Download-URL','obsoletes_dist': 'Obsoletes-Dist','provides_dist': 'Provides-Dist','requires_dist': 'Requires-Dist','setup_requires_dist': 'Setup-Requires-Dist','requires_python': 'Requires-Python','requires_external': 'Requires-External','requires': 'Requires','provides': 'Provides','obsoletes': 'Obsoletes','project_url': 'Project-URL','private_version': 'Private-Version','obsoleted_by': 'Obsoleted-By','extension': 'Extension','provides_extra': 'Provides-Extra',}_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')_VERSIONS_FIELDS = ('Requires-Python',)_VERSION_FIELDS = ('Version',)_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes','Requires', 'Provides', 'Obsoletes-Dist','Provides-Dist', 'Requires-Dist', 'Requires-External','Project-URL', 'Supported-Platform', 'Setup-Requires-Dist','Provides-Extra', 'Extension')_LISTTUPLEFIELDS = ('Project-URL',)_ELEMENTSFIELD = ('Keywords',)_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')_MISSING = object()_FILESAFE = re.compile('[^A-Za-z0-9.]+')def _get_name_and_version(name, version, for_filename=False):"""Return the distribution name with version.If for_filename is true, return a filename-escaped form."""if for_filename:# For both name and version any runs of non-alphanumeric or '.'# characters are replaced with a single '-'. Additionally any# spaces in the version string become '.'name = _FILESAFE.sub('-', name)version = _FILESAFE.sub('-', version.replace(' ', '.'))return '%s-%s' % (name, version)class LegacyMetadata(object):"""The legacy metadata of a release.Supports versions 1.0, 1.1 and 1.2 (auto-detected). You caninstantiate the class with one of these arguments (or none):- *path*, the path to a metadata file- *fileobj* give a file-like object with metadata as content- *mapping* is a dict-like object- *scheme* is a version scheme name"""# TODO document the mapping API and UNKNOWN default keydef __init__(self, path=None, fileobj=None, mapping=None,scheme='default'):if [path, fileobj, mapping].count(None) < 2:raise TypeError('path, fileobj and mapping are exclusive')self._fields = {}self.requires_files = []self._dependencies = Noneself.scheme = schemeif path is not None:self.read(path)elif fileobj is not None:self.read_file(fileobj)elif mapping is not None:self.update(mapping)self.set_metadata_version()def set_metadata_version(self):self._fields['Metadata-Version'] = _best_version(self._fields)def _write_field(self, fileobj, name, value):fileobj.write('%s: %s\n' % (name, value))def __getitem__(self, name):return self.get(name)def __setitem__(self, name, value):return self.set(name, value)def __delitem__(self, name):field_name = self._convert_name(name)try:del self._fields[field_name]except KeyError:raise KeyError(name)def __contains__(self, name):return (name in self._fields orself._convert_name(name) in self._fields)def _convert_name(self, name):if name in _ALL_FIELDS:return namename = name.replace('-', '_').lower()return _ATTR2FIELD.get(name, name)def _default_value(self, name):if name in _LISTFIELDS or name in _ELEMENTSFIELD:return []return 'UNKNOWN'def _remove_line_prefix(self, value):if self.metadata_version in ('1.0', '1.1'):return _LINE_PREFIX_PRE_1_2.sub('\n', value)else:return _LINE_PREFIX_1_2.sub('\n', value)def __getattr__(self, name):if name in _ATTR2FIELD:return self[name]raise AttributeError(name)## Public API## dependencies = property(_get_dependencies, _set_dependencies)def get_fullname(self, filesafe=False):"""Return the distribution name with version.If filesafe is true, return a filename-escaped form."""return _get_name_and_version(self['Name'], self['Version'], filesafe)def is_field(self, name):"""return True if name is a valid metadata key"""name = self._convert_name(name)return name in _ALL_FIELDSdef is_multi_field(self, name):name = self._convert_name(name)return name in _LISTFIELDSdef read(self, filepath):"""Read the metadata values from a file path."""fp = codecs.open(filepath, 'r', encoding='utf-8')try:self.read_file(fp)finally:fp.close()def read_file(self, fileob):"""Read the metadata values from a file object."""msg = message_from_file(fileob)self._fields['Metadata-Version'] = msg['metadata-version']# When reading, get all the fields we canfor field in _ALL_FIELDS:if field not in msg:continueif field in _LISTFIELDS:# we can have multiple linesvalues = msg.get_all(field)if field in _LISTTUPLEFIELDS and values is not None:values = [tuple(value.split(',')) for value in values]self.set(field, values)else:# single linevalue = msg[field]if value is not None and value != 'UNKNOWN':self.set(field, value)self.set_metadata_version()def write(self, filepath, skip_unknown=False):"""Write the metadata fields to filepath."""fp = codecs.open(filepath, 'w', encoding='utf-8')try:self.write_file(fp, skip_unknown)finally:fp.close()def write_file(self, fileobject, skip_unknown=False):"""Write the PKG-INFO format data to a file object."""self.set_metadata_version()for field in _version2fieldlist(self['Metadata-Version']):values = self.get(field)if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']):continueif field in _ELEMENTSFIELD:self._write_field(fileobject, field, ','.join(values))continueif field not in _LISTFIELDS:if field == 'Description':if self.metadata_version in ('1.0', '1.1'):values = values.replace('\n', '\n ')else:values = values.replace('\n', '\n |')values = [values]if field in _LISTTUPLEFIELDS:values = [','.join(value) for value in values]for value in values:self._write_field(fileobject, field, value)def update(self, other=None, **kwargs):"""Set metadata values from the given iterable `other` and kwargs.Behavior is like `dict.update`: If `other` has a ``keys`` method,they are looped over and ``self[key]`` is assigned ``other[key]``.Else, ``other`` is an iterable of ``(key, value)`` iterables.Keys that don't match a metadata field or that have an empty value aredropped."""def _set(key, value):if key in _ATTR2FIELD and value:self.set(self._convert_name(key), value)if not other:# other is None or empty containerpasselif hasattr(other, 'keys'):for k in other.keys():_set(k, other[k])else:for k, v in other:_set(k, v)if kwargs:for k, v in kwargs.items():_set(k, v)def set(self, name, value):"""Control then set a metadata field."""name = self._convert_name(name)if ((name in _ELEMENTSFIELD or name == 'Platform') andnot isinstance(value, (list, tuple))):if isinstance(value, string_types):value = [v.strip() for v in value.split(',')]else:value = []elif (name in _LISTFIELDS andnot isinstance(value, (list, tuple))):if isinstance(value, string_types):value = [value]else:value = []if logger.isEnabledFor(logging.WARNING):project_name = self['Name']scheme = get_scheme(self.scheme)if name in _PREDICATE_FIELDS and value is not None:for v in value:# check that the values are validif not scheme.is_valid_matcher(v.split(';')[0]):logger.warning("'%s': '%s' is not valid (field '%s')",project_name, v, name)# FIXME this rejects UNKNOWN, is that right?elif name in _VERSIONS_FIELDS and value is not None:if not scheme.is_valid_constraint_list(value):logger.warning("'%s': '%s' is not a valid version (field '%s')",project_name, value, name)elif name in _VERSION_FIELDS and value is not None:if not scheme.is_valid_version(value):logger.warning("'%s': '%s' is not a valid version (field '%s')",project_name, value, name)if name in _UNICODEFIELDS:if name == 'Description':value = self._remove_line_prefix(value)self._fields[name] = valuedef get(self, name, default=_MISSING):"""Get a metadata field."""name = self._convert_name(name)if name not in self._fields:if default is _MISSING:default = self._default_value(name)return defaultif name in _UNICODEFIELDS:value = self._fields[name]return valueelif name in _LISTFIELDS:value = self._fields[name]if value is None:return []res = []for val in value:if name not in _LISTTUPLEFIELDS:res.append(val)else:# That's for Project-URLres.append((val[0], val[1]))return reselif name in _ELEMENTSFIELD:value = self._fields[name]if isinstance(value, string_types):return value.split(',')return self._fields[name]def check(self, strict=False):"""Check if the metadata is compliant. If strict is True then raise ifno Name or Version are provided"""self.set_metadata_version()# XXX should check the versions (if the file was loaded)missing, warnings = [], []for attr in ('Name', 'Version'): # required by PEP 345if attr not in self:missing.append(attr)if strict and missing != []:msg = 'missing required metadata: %s' % ', '.join(missing)raise MetadataMissingError(msg)for attr in ('Home-page', 'Author'):if attr not in self:missing.append(attr)# checking metadata 1.2 (XXX needs to check 1.1, 1.0)if self['Metadata-Version'] != '1.2':return missing, warningsscheme = get_scheme(self.scheme)def are_valid_constraints(value):for v in value:if not scheme.is_valid_matcher(v.split(';')[0]):return Falsereturn Truefor fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints),(_VERSIONS_FIELDS,scheme.is_valid_constraint_list),(_VERSION_FIELDS,scheme.is_valid_version)):for field in fields:value = self.get(field, None)if value is not None and not controller(value):warnings.append("Wrong value for '%s': %s" % (field, value))return missing, warningsdef todict(self, skip_missing=False):"""Return fields as a dict.Field names will be converted to use the underscore-lowercase styleinstead of hyphen-mixed case (i.e. home_page instead of Home-page)."""self.set_metadata_version()mapping_1_0 = (('metadata_version', 'Metadata-Version'),('name', 'Name'),('version', 'Version'),('summary', 'Summary'),('home_page', 'Home-page'),('author', 'Author'),('author_email', 'Author-email'),('license', 'License'),('description', 'Description'),('keywords', 'Keywords'),('platform', 'Platform'),('classifiers', 'Classifier'),('download_url', 'Download-URL'),)data = {}for key, field_name in mapping_1_0:if not skip_missing or field_name in self._fields:data[key] = self[field_name]if self['Metadata-Version'] == '1.2':mapping_1_2 = (('requires_dist', 'Requires-Dist'),('requires_python', 'Requires-Python'),('requires_external', 'Requires-External'),('provides_dist', 'Provides-Dist'),('obsoletes_dist', 'Obsoletes-Dist'),('project_url', 'Project-URL'),('maintainer', 'Maintainer'),('maintainer_email', 'Maintainer-email'),)for key, field_name in mapping_1_2:if not skip_missing or field_name in self._fields:if key != 'project_url':data[key] = self[field_name]else:data[key] = [','.join(u) for u in self[field_name]]elif self['Metadata-Version'] == '1.1':mapping_1_1 = (('provides', 'Provides'),('requires', 'Requires'),('obsoletes', 'Obsoletes'),)for key, field_name in mapping_1_1:if not skip_missing or field_name in self._fields:data[key] = self[field_name]return datadef add_requirements(self, requirements):if self['Metadata-Version'] == '1.1':# we can't have 1.1 metadata *and* Setuptools requiresfor field in ('Obsoletes', 'Requires', 'Provides'):if field in self:del self[field]self['Requires-Dist'] += requirements# Mapping API# TODO could add iter* variantsdef keys(self):return list(_version2fieldlist(self['Metadata-Version']))def __iter__(self):for key in self.keys():yield keydef values(self):return [self[key] for key in self.keys()]def items(self):return [(key, self[key]) for key in self.keys()]def __repr__(self):return '<%s %s %s>' % (self.__class__.__name__, self.name,self.version)METADATA_FILENAME = 'pydist.json'WHEEL_METADATA_FILENAME = 'metadata.json'class Metadata(object):"""The metadata of a release. This implementation uses 2.0 (JSON)metadata where possible. If not possible, it wraps a LegacyMetadatainstance which handles the key-value metadata format."""METADATA_VERSION_MATCHER = re.compile('^\d+(\.\d+)*$')NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I)VERSION_MATCHER = PEP440_VERSION_RESUMMARY_MATCHER = re.compile('.{1,2047}')METADATA_VERSION = '2.0'GENERATOR = 'distlib (%s)' % __version__MANDATORY_KEYS = {'name': (),'version': (),'summary': ('legacy',),}INDEX_KEYS = ('name version license summary description author ''author_email keywords platform home_page classifiers ''download_url')DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ''dev_requires provides meta_requires obsoleted_by ''supports_environments')SYNTAX_VALIDATORS = {'metadata_version': (METADATA_VERSION_MATCHER, ()),'name': (NAME_MATCHER, ('legacy',)),'version': (VERSION_MATCHER, ('legacy',)),'summary': (SUMMARY_MATCHER, ('legacy',)),}__slots__ = ('_legacy', '_data', 'scheme')def __init__(self, path=None, fileobj=None, mapping=None,scheme='default'):if [path, fileobj, mapping].count(None) < 2:raise TypeError('path, fileobj and mapping are exclusive')self._legacy = Noneself._data = Noneself.scheme = scheme#import pdb; pdb.set_trace()if mapping is not None:try:self._validate_mapping(mapping, scheme)self._data = mappingexcept MetadataUnrecognizedVersionError:self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme)self.validate()else:data = Noneif path:with open(path, 'rb') as f:data = f.read()elif fileobj:data = fileobj.read()if data is None:# Initialised with no args - to be addedself._data = {'metadata_version': self.METADATA_VERSION,'generator': self.GENERATOR,}else:if not isinstance(data, text_type):data = data.decode('utf-8')try:self._data = json.loads(data)self._validate_mapping(self._data, scheme)except ValueError:# Note: MetadataUnrecognizedVersionError does not# inherit from ValueError (it's a DistlibException,# which should not inherit from ValueError).# The ValueError comes from the json.load - if that# succeeds and we get a validation error, we want# that to propagateself._legacy = LegacyMetadata(fileobj=StringIO(data),scheme=scheme)self.validate()common_keys = set(('name', 'version', 'license', 'keywords', 'summary'))none_list = (None, list)none_dict = (None, dict)mapped_keys = {'run_requires': ('Requires-Dist', list),'build_requires': ('Setup-Requires-Dist', list),'dev_requires': none_list,'test_requires': none_list,'meta_requires': none_list,'extras': ('Provides-Extra', list),'modules': none_list,'namespaces': none_list,'exports': none_dict,'commands': none_dict,'classifiers': ('Classifier', list),'source_url': ('Download-URL', None),'metadata_version': ('Metadata-Version', None),}del none_list, none_dictdef __getattribute__(self, key):common = object.__getattribute__(self, 'common_keys')mapped = object.__getattribute__(self, 'mapped_keys')if key in mapped:lk, maker = mapped[key]if self._legacy:if lk is None:result = None if maker is None else maker()else:result = self._legacy.get(lk)else:value = None if maker is None else maker()if key not in ('commands', 'exports', 'modules', 'namespaces','classifiers'):result = self._data.get(key, value)else:# special cases for PEP 459sentinel = object()result = sentineld = self._data.get('extensions')if d:if key == 'commands':result = d.get('python.commands', value)elif key == 'classifiers':d = d.get('python.details')if d:result = d.get(key, value)else:d = d.get('python.exports')if not d:d = self._data.get('python.exports')if d:result = d.get(key, value)if result is sentinel:result = valueelif key not in common:result = object.__getattribute__(self, key)elif self._legacy:result = self._legacy.get(key)else:result = self._data.get(key)return resultdef _validate_value(self, key, value, scheme=None):if key in self.SYNTAX_VALIDATORS:pattern, exclusions = self.SYNTAX_VALIDATORS[key]if (scheme or self.scheme) not in exclusions:m = pattern.match(value)if not m:raise MetadataInvalidError("'%s' is an invalid value for ""the '%s' property" % (value,key))def __setattr__(self, key, value):self._validate_value(key, value)common = object.__getattribute__(self, 'common_keys')mapped = object.__getattribute__(self, 'mapped_keys')if key in mapped:lk, _ = mapped[key]if self._legacy:if lk is None:raise NotImplementedErrorself._legacy[lk] = valueelif key not in ('commands', 'exports', 'modules', 'namespaces','classifiers'):self._data[key] = valueelse:# special cases for PEP 459d = self._data.setdefault('extensions', {})if key == 'commands':d['python.commands'] = valueelif key == 'classifiers':d = d.setdefault('python.details', {})d[key] = valueelse:d = d.setdefault('python.exports', {})d[key] = valueelif key not in common:object.__setattr__(self, key, value)else:if key == 'keywords':if isinstance(value, string_types):value = value.strip()if value:value = value.split()else:value = []if self._legacy:self._legacy[key] = valueelse:self._data[key] = value@propertydef name_and_version(self):return _get_name_and_version(self.name, self.version, True)@propertydef provides(self):if self._legacy:result = self._legacy['Provides-Dist']else:result = self._data.setdefault('provides', [])s = '%s (%s)' % (self.name, self.version)if s not in result:result.append(s)return result@provides.setterdef provides(self, value):if self._legacy:self._legacy['Provides-Dist'] = valueelse:self._data['provides'] = valuedef get_requirements(self, reqts, extras=None, env=None):"""Base method to get dependencies, given a set of extrasto satisfy and an optional environment context.:param reqts: A list of sometimes-wanted dependencies,perhaps dependent on extras and environment.:param extras: A list of optional components being requested.:param env: An optional environment for marker evaluation."""if self._legacy:result = reqtselse:result = []extras = get_extras(extras or [], self.extras)for d in reqts:if 'extra' not in d and 'environment' not in d:# unconditionalinclude = Trueelse:if 'extra' not in d:# Not extra-dependent - only environment-dependentinclude = Trueelse:include = d.get('extra') in extrasif include:# Not excluded because of extras, check environmentmarker = d.get('environment')if marker:include = interpret(marker, env)if include:result.extend(d['requires'])for key in ('build', 'dev', 'test'):e = ':%s:' % keyif e in extras:extras.remove(e)# A recursive call, but it should terminate since 'test'# has been removed from the extrasreqts = self._data.get('%s_requires' % key, [])result.extend(self.get_requirements(reqts, extras=extras,env=env))return result@propertydef dictionary(self):if self._legacy:return self._from_legacy()return self._data@propertydef dependencies(self):if self._legacy:raise NotImplementedErrorelse:return extract_by_key(self._data, self.DEPENDENCY_KEYS)@dependencies.setterdef dependencies(self, value):if self._legacy:raise NotImplementedErrorelse:self._data.update(value)def _validate_mapping(self, mapping, scheme):if mapping.get('metadata_version') != self.METADATA_VERSION:raise MetadataUnrecognizedVersionError()missing = []for key, exclusions in self.MANDATORY_KEYS.items():if key not in mapping:if scheme not in exclusions:missing.append(key)if missing:msg = 'Missing metadata items: %s' % ', '.join(missing)raise MetadataMissingError(msg)for k, v in mapping.items():self._validate_value(k, v, scheme)def validate(self):if self._legacy:missing, warnings = self._legacy.check(True)if missing or warnings:logger.warning('Metadata: missing: %s, warnings: %s',missing, warnings)else:self._validate_mapping(self._data, self.scheme)def todict(self):if self._legacy:return self._legacy.todict(True)else:result = extract_by_key(self._data, self.INDEX_KEYS)return resultdef _from_legacy(self):assert self._legacy and not self._dataresult = {'metadata_version': self.METADATA_VERSION,'generator': self.GENERATOR,}lmd = self._legacy.todict(True) # skip missing onesfor k in ('name', 'version', 'license', 'summary', 'description','classifier'):if k in lmd:if k == 'classifier':nk = 'classifiers'else:nk = kresult[nk] = lmd[k]kw = lmd.get('Keywords', [])if kw == ['']:kw = []result['keywords'] = kwkeys = (('requires_dist', 'run_requires'),('setup_requires_dist', 'build_requires'))for ok, nk in keys:if ok in lmd and lmd[ok]:result[nk] = [{'requires': lmd[ok]}]result['provides'] = self.providesauthor = {}maintainer = {}return resultLEGACY_MAPPING = {'name': 'Name','version': 'Version','license': 'License','summary': 'Summary','description': 'Description','classifiers': 'Classifier',}def _to_legacy(self):def process_entries(entries):reqts = set()for e in entries:extra = e.get('extra')env = e.get('environment')rlist = e['requires']for r in rlist:if not env and not extra:reqts.add(r)else:marker = ''if extra:marker = 'extra == "%s"' % extraif env:if marker:marker = '(%s) and %s' % (env, marker)else:marker = envreqts.add(';'.join((r, marker)))return reqtsassert self._data and not self._legacyresult = LegacyMetadata()nmd = self._datafor nk, ok in self.LEGACY_MAPPING.items():if nk in nmd:result[ok] = nmd[nk]r1 = process_entries(self.run_requires + self.meta_requires)r2 = process_entries(self.build_requires + self.dev_requires)if self.extras:result['Provides-Extra'] = sorted(self.extras)result['Requires-Dist'] = sorted(r1)result['Setup-Requires-Dist'] = sorted(r2)# TODO: other fields such as contactsreturn resultdef write(self, path=None, fileobj=None, legacy=False, skip_unknown=True):if [path, fileobj].count(None) != 1:raise ValueError('Exactly one of path and fileobj is needed')self.validate()if legacy:if self._legacy:legacy_md = self._legacyelse:legacy_md = self._to_legacy()if path:legacy_md.write(path, skip_unknown=skip_unknown)else:legacy_md.write_file(fileobj, skip_unknown=skip_unknown)else:if self._legacy:d = self._from_legacy()else:d = self._dataif fileobj:json.dump(d, fileobj, ensure_ascii=True, indent=2,sort_keys=True)else:with codecs.open(path, 'w', 'utf-8') as f:json.dump(d, f, ensure_ascii=True, indent=2,sort_keys=True)def add_requirements(self, requirements):if self._legacy:self._legacy.add_requirements(requirements)else:run_requires = self._data.setdefault('run_requires', [])always = Nonefor entry in run_requires:if 'environment' not in entry and 'extra' not in entry:always = entrybreakif always is None:always = { 'requires': requirements }run_requires.insert(0, always)else:rset = set(always['requires']) | set(requirements)always['requires'] = sorted(rset)def __repr__(self):name = self.name or '(no name)'version = self.version or 'no version'return '<%s %s %s (%s)>' % (self.__class__.__name__,self.metadata_version, name, version)
# -*- coding: utf-8 -*-## Copyright (C) 2012-2013 Vinay Sajip.# Licensed to the Python Software Foundation under a contributor agreement.# See LICENSE.txt and CONTRIBUTORS.txt.#"""Parser for the environment markers micro-language defined in PEP 345."""import astimport osimport sysimport platformfrom .compat import python_implementation, string_typesfrom .util import in_venv__all__ = ['interpret']class Evaluator(object):"""A limited evaluator for Python expressions."""operators = {'eq': lambda x, y: x == y,'gt': lambda x, y: x > y,'gte': lambda x, y: x >= y,'in': lambda x, y: x in y,'lt': lambda x, y: x < y,'lte': lambda x, y: x <= y,'not': lambda x: not x,'noteq': lambda x, y: x != y,'notin': lambda x, y: x not in y,}allowed_values = {'sys_platform': sys.platform,'python_version': '%s.%s' % sys.version_info[:2],# parsing sys.platform is not reliable, but there is no other# way to get e.g. 2.7.2+, and the PEP is defined with sys.version'python_full_version': sys.version.split(' ', 1)[0],'os_name': os.name,'platform_in_venv': str(in_venv()),'platform_release': platform.release(),'platform_version': platform.version(),'platform_machine': platform.machine(),'platform_python_implementation': python_implementation(),}def __init__(self, context=None):"""Initialise an instance.:param context: If specified, names are looked up in this mapping."""self.context = context or {}self.source = Nonedef get_fragment(self, offset):"""Get the part of the source which is causing a problem."""fragment_len = 10s = '%r' % (self.source[offset:offset + fragment_len])if offset + fragment_len < len(self.source):s += '...'return sdef get_handler(self, node_type):"""Get a handler for the specified AST node type."""return getattr(self, 'do_%s' % node_type, None)def evaluate(self, node, filename=None):"""Evaluate a source string or node, using ``filename`` whendisplaying errors."""if isinstance(node, string_types):self.source = nodekwargs = {'mode': 'eval'}if filename:kwargs['filename'] = filenametry:node = ast.parse(node, **kwargs)except SyntaxError as e:s = self.get_fragment(e.offset)raise SyntaxError('syntax error %s' % s)node_type = node.__class__.__name__.lower()handler = self.get_handler(node_type)if handler is None:if self.source is None:s = '(source not available)'else:s = self.get_fragment(node.col_offset)raise SyntaxError("don't know how to evaluate %r %s" % (node_type, s))return handler(node)def get_attr_key(self, node):assert isinstance(node, ast.Attribute), 'attribute node expected'return '%s.%s' % (node.value.id, node.attr)def do_attribute(self, node):if not isinstance(node.value, ast.Name):valid = Falseelse:key = self.get_attr_key(node)valid = key in self.context or key in self.allowed_valuesif not valid:raise SyntaxError('invalid expression: %s' % key)if key in self.context:result = self.context[key]else:result = self.allowed_values[key]return resultdef do_boolop(self, node):result = self.evaluate(node.values[0])is_or = node.op.__class__ is ast.Oris_and = node.op.__class__ is ast.Andassert is_or or is_andif (is_and and result) or (is_or and not result):for n in node.values[1:]:result = self.evaluate(n)if (is_or and result) or (is_and and not result):breakreturn resultdef do_compare(self, node):def sanity_check(lhsnode, rhsnode):valid = Trueif isinstance(lhsnode, ast.Str) and isinstance(rhsnode, ast.Str):valid = False#elif (isinstance(lhsnode, ast.Attribute)# and isinstance(rhsnode, ast.Attribute)):# klhs = self.get_attr_key(lhsnode)# krhs = self.get_attr_key(rhsnode)# valid = klhs != krhsif not valid:s = self.get_fragment(node.col_offset)raise SyntaxError('Invalid comparison: %s' % s)lhsnode = node.leftlhs = self.evaluate(lhsnode)result = Truefor op, rhsnode in zip(node.ops, node.comparators):sanity_check(lhsnode, rhsnode)op = op.__class__.__name__.lower()if op not in self.operators:raise SyntaxError('unsupported operation: %r' % op)rhs = self.evaluate(rhsnode)result = self.operators[op](lhs, rhs)if not result:breaklhs = rhslhsnode = rhsnodereturn resultdef do_expression(self, node):return self.evaluate(node.body)def do_name(self, node):valid = Falseif node.id in self.context:valid = Trueresult = self.context[node.id]elif node.id in self.allowed_values:valid = Trueresult = self.allowed_values[node.id]if not valid:raise SyntaxError('invalid expression: %s' % node.id)return resultdef do_str(self, node):return node.sdef interpret(marker, execution_context=None):"""Interpret a marker and return a result depending on environment.:param marker: The marker to interpret.:type marker: str:param execution_context: The context used for name lookup.:type execution_context: mapping"""return Evaluator(execution_context).evaluate(marker.strip())
# -*- coding: utf-8 -*-## Copyright (C) 2012-2013 Python Software Foundation.# See LICENSE.txt and CONTRIBUTORS.txt.#"""Class representing the list of files in a distribution.Equivalent to distutils.filelist, but fixes some problems."""import fnmatchimport loggingimport osimport reimport sysfrom . import DistlibExceptionfrom .compat import fsdecodefrom .util import convert_path__all__ = ['Manifest']logger = logging.getLogger(__name__)# a \ followed by some spaces + EOL_COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M)_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)## Due to the different results returned by fnmatch.translate, we need# to do slightly different processing for Python 2.7 and 3.2 ... this needed# to be brought in for Python 3.6 onwards.#_PYTHON_VERSION = sys.version_info[:2]class Manifest(object):"""A list of files built by on exploring the filesystem and filtered byapplying various patterns to what we find there."""def __init__(self, base=None):"""Initialise an instance.:param base: The base directory to explore under."""self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))self.prefix = self.base + os.sepself.allfiles = Noneself.files = set()## Public API#def findall(self):"""Find all files under the base and set ``allfiles`` to the absolutepathnames of files found."""from stat import S_ISREG, S_ISDIR, S_ISLNKself.allfiles = allfiles = []root = self.basestack = [root]pop = stack.poppush = stack.appendwhile stack:root = pop()names = os.listdir(root)for name in names:fullname = os.path.join(root, name)# Avoid excess stat calls -- just one will do, thank you!stat = os.stat(fullname)mode = stat.st_modeif S_ISREG(mode):allfiles.append(fsdecode(fullname))elif S_ISDIR(mode) and not S_ISLNK(mode):push(fullname)def add(self, item):"""Add a file to the manifest.:param item: The pathname to add. This can be relative to the base."""if not item.startswith(self.prefix):item = os.path.join(self.base, item)self.files.add(os.path.normpath(item))def add_many(self, items):"""Add a list of files to the manifest.:param items: The pathnames to add. These can be relative to the base."""for item in items:self.add(item)def sorted(self, wantdirs=False):"""Return sorted files in directory order"""def add_dir(dirs, d):dirs.add(d)logger.debug('add_dir added %s', d)if d != self.base:parent, _ = os.path.split(d)assert parent not in ('', '/')add_dir(dirs, parent)result = set(self.files) # make a copy!if wantdirs:dirs = set()for f in result:add_dir(dirs, os.path.dirname(f))result |= dirsreturn [os.path.join(*path_tuple) for path_tuple insorted(os.path.split(path) for path in result)]def clear(self):"""Clear all collected files."""self.files = set()self.allfiles = []def process_directive(self, directive):"""Process a directive which either adds some files from ``allfiles`` to``files``, or removes some files from ``files``.:param directive: The directive to process. This should be in a formatcompatible with distutils ``MANIFEST.in`` files:http://docs.python.org/distutils/sourcedist.html#commands"""# Parse the line: split it up, make sure the right number of words# is there, and return the relevant words. 'action' is always# defined: it's the first word of the line. Which of the other# three are defined depends on the action; it'll be either# patterns, (dir and patterns), or (dirpattern).action, patterns, thedir, dirpattern = self._parse_directive(directive)# OK, now we know that the action is valid and we have the# right number of words on the line for that action -- so we# can proceed with minimal error-checking.if action == 'include':for pattern in patterns:if not self._include_pattern(pattern, anchor=True):logger.warning('no files found matching %r', pattern)elif action == 'exclude':for pattern in patterns:found = self._exclude_pattern(pattern, anchor=True)#if not found:# logger.warning('no previously-included files '# 'found matching %r', pattern)elif action == 'global-include':for pattern in patterns:if not self._include_pattern(pattern, anchor=False):logger.warning('no files found matching %r ''anywhere in distribution', pattern)elif action == 'global-exclude':for pattern in patterns:found = self._exclude_pattern(pattern, anchor=False)#if not found:# logger.warning('no previously-included files '# 'matching %r found anywhere in '# 'distribution', pattern)elif action == 'recursive-include':for pattern in patterns:if not self._include_pattern(pattern, prefix=thedir):logger.warning('no files found matching %r ''under directory %r', pattern, thedir)elif action == 'recursive-exclude':for pattern in patterns:found = self._exclude_pattern(pattern, prefix=thedir)#if not found:# logger.warning('no previously-included files '# 'matching %r found under directory %r',# pattern, thedir)elif action == 'graft':if not self._include_pattern(None, prefix=dirpattern):logger.warning('no directories found matching %r',dirpattern)elif action == 'prune':if not self._exclude_pattern(None, prefix=dirpattern):logger.warning('no previously-included directories found ''matching %r', dirpattern)else: # pragma: no cover# This should never happen, as it should be caught in# _parse_template_lineraise DistlibException('invalid action %r' % action)## Private API#def _parse_directive(self, directive):"""Validate a directive.:param directive: The directive to validate.:return: A tuple of action, patterns, thedir, dir_patterns"""words = directive.split()if len(words) == 1 and words[0] not in ('include', 'exclude','global-include','global-exclude','recursive-include','recursive-exclude','graft', 'prune'):# no action given, let's use the default 'include'words.insert(0, 'include')action = words[0]patterns = thedir = dir_pattern = Noneif action in ('include', 'exclude','global-include', 'global-exclude'):if len(words) < 2:raise DistlibException('%r expects <pattern1> <pattern2> ...' % action)patterns = [convert_path(word) for word in words[1:]]elif action in ('recursive-include', 'recursive-exclude'):if len(words) < 3:raise DistlibException('%r expects <dir> <pattern1> <pattern2> ...' % action)thedir = convert_path(words[1])patterns = [convert_path(word) for word in words[2:]]elif action in ('graft', 'prune'):if len(words) != 2:raise DistlibException('%r expects a single <dir_pattern>' % action)dir_pattern = convert_path(words[1])else:raise DistlibException('unknown action %r' % action)return action, patterns, thedir, dir_patterndef _include_pattern(self, pattern, anchor=True, prefix=None,is_regex=False):"""Select strings (presumably filenames) from 'self.files' thatmatch 'pattern', a Unix-style wildcard (glob) pattern.Patterns are not quite the same as implemented by the 'fnmatch'module: '*' and '?' match non-special characters, where "special"is platform-dependent: slash on Unix; colon, slash, and backslash onDOS/Windows; and colon on Mac OS.If 'anchor' is true (the default), then the pattern match is morestringent: "*.py" will match "foo.py" but not "foo/bar.py". If'anchor' is false, both of these will match.If 'prefix' is supplied, then only filenames starting with 'prefix'(itself a pattern) and ending with 'pattern', with anything in betweenthem, will match. 'anchor' is ignored in this case.If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and'pattern' is assumed to be either a string containing a regex or aregex object -- no translation is done, the regex is just compiledand used as-is.Selected strings will be added to self.files.Return True if files are found."""# XXX docstring lying about what the special chars are?found = Falsepattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)# delayed loading of allfiles listif self.allfiles is None:self.findall()for name in self.allfiles:if pattern_re.search(name):self.files.add(name)found = Truereturn founddef _exclude_pattern(self, pattern, anchor=True, prefix=None,is_regex=False):"""Remove strings (presumably filenames) from 'files' that match'pattern'.Other parameters are the same as for 'include_pattern()', above.The list 'self.files' is modified in place. Return True if files arefound.This API is public to allow e.g. exclusion of SCM subdirs, e.g. whenpackaging source distributions"""found = Falsepattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)for f in list(self.files):if pattern_re.search(f):self.files.remove(f)found = Truereturn founddef _translate_pattern(self, pattern, anchor=True, prefix=None,is_regex=False):"""Translate a shell-like wildcard pattern to a compiled regularexpression.Return the compiled regex. If 'is_regex' true,then 'pattern' is directly compiled to a regex (if it's a string)or just returned as-is (assumes it's a regex object)."""if is_regex:if isinstance(pattern, str):return re.compile(pattern)else:return patternif _PYTHON_VERSION > (3, 2):# ditch start and end charactersstart, _, end = self._glob_to_re('_').partition('_')if pattern:pattern_re = self._glob_to_re(pattern)if _PYTHON_VERSION > (3, 2):assert pattern_re.startswith(start) and pattern_re.endswith(end)else:pattern_re = ''base = re.escape(os.path.join(self.base, ''))if prefix is not None:# ditch end of pattern characterif _PYTHON_VERSION <= (3, 2):empty_pattern = self._glob_to_re('')prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]else:prefix_re = self._glob_to_re(prefix)assert prefix_re.startswith(start) and prefix_re.endswith(end)prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]sep = os.sepif os.sep == '\\':sep = r'\\'if _PYTHON_VERSION <= (3, 2):pattern_re = '^' + base + sep.join((prefix_re,'.*' + pattern_re))else:pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,pattern_re, end)else: # no prefix -- respect anchor flagif anchor:if _PYTHON_VERSION <= (3, 2):pattern_re = '^' + base + pattern_reelse:pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])return re.compile(pattern_re)def _glob_to_re(self, pattern):"""Translate a shell-like glob pattern to a regular expression.Return a string containing the regex. Differs from'fnmatch.translate()' in that '*' does not match "special characters"(which are platform-specific)."""pattern_re = fnmatch.translate(pattern)# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,# and by extension they shouldn't match such "special characters" under# any OS. So change all non-escaped dots in the RE to match any# character except the special characters (currently: just os.sep).sep = os.sepif os.sep == '\\':# we're using a regex to manipulate a regex, so we need# to escape the backslash twicesep = r'\\\\'escaped = r'\1[^%s]' % seppattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)return pattern_re
# -*- coding: utf-8 -*-## Copyright (C) 2012-2015 Vinay Sajip.# Licensed to the Python Software Foundation under a contributor agreement.# See LICENSE.txt and CONTRIBUTORS.txt.#import gzipfrom io import BytesIOimport jsonimport loggingimport osimport posixpathimport retry:import threadingexcept ImportError: # pragma: no coverimport dummy_threading as threadingimport zlibfrom . import DistlibExceptionfrom .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url,queue, quote, unescape, string_types, build_opener,HTTPRedirectHandler as BaseRedirectHandler, text_type,Request, HTTPError, URLError)from .database import Distribution, DistributionPath, make_distfrom .metadata import Metadatafrom .util import (cached_property, parse_credentials, ensure_slash,split_filename, get_project_data, parse_requirement,parse_name_and_version, ServerProxy, normalize_name)from .version import get_scheme, UnsupportedVersionErrorfrom .wheel import Wheel, is_compatiblelogger = logging.getLogger(__name__)HASHER_HASH = re.compile('^(\w+)=([a-f0-9]+)')CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I)HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml')DEFAULT_INDEX = 'https://pypi.python.org/pypi'def get_all_distribution_names(url=None):"""Return all distribution names known by an index.:param url: The URL of the index.:return: A list of all known distribution names."""if url is None:url = DEFAULT_INDEXclient = ServerProxy(url, timeout=3.0)return client.list_packages()class RedirectHandler(BaseRedirectHandler):"""A class to work around a bug in some Python 3.2.x releases."""# There's a bug in the base version for some 3.2.x# (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header# returns e.g. /abc, it bails because it says the scheme ''# is bogus, when actually it should use the request's# URL for the scheme. See Python issue #13696.def http_error_302(self, req, fp, code, msg, headers):# Some servers (incorrectly) return multiple Location headers# (so probably same goes for URI). Use first header.newurl = Nonefor key in ('location', 'uri'):if key in headers:newurl = headers[key]breakif newurl is None:returnurlparts = urlparse(newurl)if urlparts.scheme == '':newurl = urljoin(req.get_full_url(), newurl)if hasattr(headers, 'replace_header'):headers.replace_header(key, newurl)else:headers[key] = newurlreturn BaseRedirectHandler.http_error_302(self, req, fp, code, msg,headers)http_error_301 = http_error_303 = http_error_307 = http_error_302class Locator(object):"""A base class for locators - things that locate distributions."""source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz')binary_extensions = ('.egg', '.exe', '.whl')excluded_extensions = ('.pdf',)# A list of tags indicating which wheels you want to match. The default# value of None matches against the tags compatible with the running# Python. If you want to match other values, set wheel_tags on a locator# instance to a list of tuples (pyver, abi, arch) which you want to match.wheel_tags = Nonedownloadable_extensions = source_extensions + ('.whl',)def __init__(self, scheme='default'):"""Initialise an instance.:param scheme: Because locators look for most recent versions, theyneed to know the version scheme to use. This specifiesthe current PEP-recommended scheme - use ``'legacy'``if you need to support existing distributions on PyPI."""self._cache = {}self.scheme = scheme# Because of bugs in some of the handlers on some of the platforms,# we use our own opener rather than just using urlopen.self.opener = build_opener(RedirectHandler())# If get_project() is called from locate(), the matcher instance# is set from the requirement passed to locate(). See issue #18 for# why this can be useful to know.self.matcher = Noneself.errors = queue.Queue()def get_errors(self):"""Return any errors which have occurred."""result = []while not self.errors.empty(): # pragma: no covertry:e = self.errors.get(False)result.append(e)except self.errors.Empty:continueself.errors.task_done()return resultdef clear_errors(self):"""Clear any errors which may have been logged."""# Just get the errors and throw them awayself.get_errors()def clear_cache(self):self._cache.clear()def _get_scheme(self):return self._schemedef _set_scheme(self, value):self._scheme = valuescheme = property(_get_scheme, _set_scheme)def _get_project(self, name):"""For a given project, get a dictionary mapping available versions to Distributioninstances.This should be implemented in subclasses.If called from a locate() request, self.matcher will be set to amatcher for the requirement to satisfy, otherwise it will be None."""raise NotImplementedError('Please implement in the subclass')def get_distribution_names(self):"""Return all the distribution names known to this locator."""raise NotImplementedError('Please implement in the subclass')def get_project(self, name):"""For a given project, get a dictionary mapping available versions to Distributioninstances.This calls _get_project to do all the work, and just implements a caching layer on top."""if self._cache is None:result = self._get_project(name)elif name in self._cache:result = self._cache[name]else:self.clear_errors()result = self._get_project(name)self._cache[name] = resultreturn resultdef score_url(self, url):"""Give an url a score which can be used to choose preferred URLsfor a given project release."""t = urlparse(url)basename = posixpath.basename(t.path)compatible = Trueis_wheel = basename.endswith('.whl')if is_wheel:compatible = is_compatible(Wheel(basename), self.wheel_tags)return (t.scheme != 'https', 'pypi.python.org' in t.netloc,is_wheel, compatible, basename)def prefer_url(self, url1, url2):"""Choose one of two URLs where both are candidates for distributionarchives for the same version of a distribution (for example,.tar.gz vs. zip).The current implementation favours https:// URLs over http://, archivesfrom PyPI over those from other locations, wheel compatibility (if awheel) and then the archive name."""result = url2if url1:s1 = self.score_url(url1)s2 = self.score_url(url2)if s1 > s2:result = url1if result != url2:logger.debug('Not replacing %r with %r', url1, url2)else:logger.debug('Replacing %r with %r', url1, url2)return resultdef split_filename(self, filename, project_name):"""Attempt to split a filename in project name, version and Python version."""return split_filename(filename, project_name)def convert_url_to_download_info(self, url, project_name):"""See if a URL is a candidate for a download URL for a project (the URLhas typically been scraped from an HTML page).If it is, a dictionary is returned with keys "name", "version","filename" and "url"; otherwise, None is returned."""def same_project(name1, name2):return normalize_name(name1) == normalize_name(name2)result = Nonescheme, netloc, path, params, query, frag = urlparse(url)if frag.lower().startswith('egg='):logger.debug('%s: version hint in fragment: %r',project_name, frag)m = HASHER_HASH.match(frag)if m:algo, digest = m.groups()else:algo, digest = None, Noneorigpath = pathif path and path[-1] == '/':path = path[:-1]if path.endswith('.whl'):try:wheel = Wheel(path)if is_compatible(wheel, self.wheel_tags):if project_name is None:include = Trueelse:include = same_project(wheel.name, project_name)if include:result = {'name': wheel.name,'version': wheel.version,'filename': wheel.filename,'url': urlunparse((scheme, netloc, origpath,params, query, '')),'python-version': ', '.join(['.'.join(list(v[2:])) for v in wheel.pyver]),}except Exception as e: # pragma: no coverlogger.warning('invalid path for wheel: %s', path)elif path.endswith(self.downloadable_extensions):path = filename = posixpath.basename(path)for ext in self.downloadable_extensions:if path.endswith(ext):path = path[:-len(ext)]t = self.split_filename(path, project_name)if not t:logger.debug('No match for project/version: %s', path)else:name, version, pyver = tif not project_name or same_project(project_name, name):result = {'name': name,'version': version,'filename': filename,'url': urlunparse((scheme, netloc, origpath,params, query, '')),#'packagetype': 'sdist',}if pyver:result['python-version'] = pyverbreakif result and algo:result['%s_digest' % algo] = digestreturn resultdef _get_digest(self, info):"""Get a digest from a dictionary by looking at keys of the form'algo_digest'.Returns a 2-tuple (algo, digest) if found, else None. Currentlylooks only for SHA256, then MD5."""result = Nonefor algo in ('sha256', 'md5'):key = '%s_digest' % algoif key in info:result = (algo, info[key])breakreturn resultdef _update_version_data(self, result, info):"""Update a result dictionary (the final result from _get_project) with adictionary for a specific version, which typically holds informationgleaned from a filename or URL for an archive for the distribution."""name = info.pop('name')version = info.pop('version')if version in result:dist = result[version]md = dist.metadataelse:dist = make_dist(name, version, scheme=self.scheme)md = dist.metadatadist.digest = digest = self._get_digest(info)url = info['url']result['digests'][url] = digestif md.source_url != info['url']:md.source_url = self.prefer_url(md.source_url, url)result['urls'].setdefault(version, set()).add(url)dist.locator = selfresult[version] = distdef locate(self, requirement, prereleases=False):"""Find the most recent distribution which matches the givenrequirement.:param requirement: A requirement of the form 'foo (1.0)' or perhaps'foo (>= 1.0, < 2.0, != 1.3)':param prereleases: If ``True``, allow pre-release versionsto be located. Otherwise, pre-release versionsare not returned.:return: A :class:`Distribution` instance, or ``None`` if no suchdistribution could be located."""result = Noner = parse_requirement(requirement)if r is None:raise DistlibException('Not a valid requirement: %r' % requirement)scheme = get_scheme(self.scheme)self.matcher = matcher = scheme.matcher(r.requirement)logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)versions = self.get_project(r.name)if len(versions) > 2: # urls and digests keys are present# sometimes, versions are invalidslist = []vcls = matcher.version_classfor k in versions:if k in ('urls', 'digests'):continuetry:if not matcher.match(k):logger.debug('%s did not match %r', matcher, k)else:if prereleases or not vcls(k).is_prerelease:slist.append(k)else:logger.debug('skipping pre-release ''version %s of %s', k, matcher.name)except Exception: # pragma: no coverlogger.warning('error matching %s with %r', matcher, k)pass # slist.append(k)if len(slist) > 1:slist = sorted(slist, key=scheme.key)if slist:logger.debug('sorted list: %s', slist)version = slist[-1]result = versions[version]if result:if r.extras:result.extras = r.extrasresult.download_urls = versions.get('urls', {}).get(version, set())d = {}sd = versions.get('digests', {})for url in result.download_urls:if url in sd:d[url] = sd[url]result.digests = dself.matcher = Nonereturn resultclass PyPIRPCLocator(Locator):"""This locator uses XML-RPC to locate distributions. It thereforecannot be used with simple mirrors (that only mirror file content)."""def __init__(self, url, **kwargs):"""Initialise an instance.:param url: The URL to use for XML-RPC.:param kwargs: Passed to the superclass constructor."""super(PyPIRPCLocator, self).__init__(**kwargs)self.base_url = urlself.client = ServerProxy(url, timeout=3.0)def get_distribution_names(self):"""Return all the distribution names known to this locator."""return set(self.client.list_packages())def _get_project(self, name):result = {'urls': {}, 'digests': {}}versions = self.client.package_releases(name, True)for v in versions:urls = self.client.release_urls(name, v)data = self.client.release_data(name, v)metadata = Metadata(scheme=self.scheme)metadata.name = data['name']metadata.version = data['version']metadata.license = data.get('license')metadata.keywords = data.get('keywords', [])metadata.summary = data.get('summary')dist = Distribution(metadata)if urls:info = urls[0]metadata.source_url = info['url']dist.digest = self._get_digest(info)dist.locator = selfresult[v] = distfor info in urls:url = info['url']digest = self._get_digest(info)result['urls'].setdefault(v, set()).add(url)result['digests'][url] = digestreturn resultclass PyPIJSONLocator(Locator):"""This locator uses PyPI's JSON interface. It's very limited in functionalityand probably not worth using."""def __init__(self, url, **kwargs):super(PyPIJSONLocator, self).__init__(**kwargs)self.base_url = ensure_slash(url)def get_distribution_names(self):"""Return all the distribution names known to this locator."""raise NotImplementedError('Not available from this locator')def _get_project(self, name):result = {'urls': {}, 'digests': {}}url = urljoin(self.base_url, '%s/json' % quote(name))try:resp = self.opener.open(url)data = resp.read().decode() # for nowd = json.loads(data)md = Metadata(scheme=self.scheme)data = d['info']md.name = data['name']md.version = data['version']md.license = data.get('license')md.keywords = data.get('keywords', [])md.summary = data.get('summary')dist = Distribution(md)dist.locator = selfurls = d['urls']result[md.version] = distfor info in d['urls']:url = info['url']dist.download_urls.add(url)dist.digests[url] = self._get_digest(info)result['urls'].setdefault(md.version, set()).add(url)result['digests'][url] = self._get_digest(info)# Now get other releasesfor version, infos in d['releases'].items():if version == md.version:continue # already doneomd = Metadata(scheme=self.scheme)omd.name = md.nameomd.version = versionodist = Distribution(omd)odist.locator = selfresult[version] = odistfor info in infos:url = info['url']odist.download_urls.add(url)odist.digests[url] = self._get_digest(info)result['urls'].setdefault(version, set()).add(url)result['digests'][url] = self._get_digest(info)# for info in urls:# md.source_url = info['url']# dist.digest = self._get_digest(info)# dist.locator = self# for info in urls:# url = info['url']# result['urls'].setdefault(md.version, set()).add(url)# result['digests'][url] = self._get_digest(info)except Exception as e:self.errors.put(text_type(e))logger.exception('JSON fetch failed: %s', e)return resultclass Page(object):"""This class represents a scraped HTML page."""# The following slightly hairy-looking regex just looks for the contents of# an anchor link, which has an attribute "href" either immediately preceded# or immediately followed by a "rel" attribute. The attribute values can be# declared with double quotes, single quotes or no quotes - which leads to# the length of the expression._href = re.compile("""(rel\s*=\s*(?:"(?P<rel1>[^"]*)"|'(?P<rel2>[^']*)'|(?P<rel3>[^>\s\n]*))\s+)?href\s*=\s*(?:"(?P<url1>[^"]*)"|'(?P<url2>[^']*)'|(?P<url3>[^>\s\n]*))(\s+rel\s*=\s*(?:"(?P<rel4>[^"]*)"|'(?P<rel5>[^']*)'|(?P<rel6>[^>\s\n]*)))?""", re.I | re.S | re.X)_base = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I | re.S)def __init__(self, data, url):"""Initialise an instance with the Unicode page contents and the URL theycame from."""self.data = dataself.base_url = self.url = urlm = self._base.search(self.data)if m:self.base_url = m.group(1)_clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)@cached_propertydef links(self):"""Return the URLs of all the links on a page together with informationabout their "rel" attribute, for determining which ones to treat asdownloads and which ones to queue for further scraping."""def clean(url):"Tidy up an URL."scheme, netloc, path, params, query, frag = urlparse(url)return urlunparse((scheme, netloc, quote(path),params, query, frag))result = set()for match in self._href.finditer(self.data):d = match.groupdict('')rel = (d['rel1'] or d['rel2'] or d['rel3'] ord['rel4'] or d['rel5'] or d['rel6'])url = d['url1'] or d['url2'] or d['url3']url = urljoin(self.base_url, url)url = unescape(url)url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url)result.add((url, rel))# We sort the result, hoping to bring the most recent versions# to the frontresult = sorted(result, key=lambda t: t[0], reverse=True)return resultclass SimpleScrapingLocator(Locator):"""A locator which scrapes HTML pages to locate downloads for a distribution.This runs multiple threads to do the I/O; performance is at least as goodas pip's PackageFinder, which works in an analogous fashion."""# These are used to deal with various Content-Encoding schemes.decoders = {'deflate': zlib.decompress,'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(),'none': lambda b: b,}def __init__(self, url, timeout=None, num_workers=10, **kwargs):"""Initialise an instance.:param url: The root URL to use for scraping.:param timeout: The timeout, in seconds, to be applied to requests.This defaults to ``None`` (no timeout specified).:param num_workers: The number of worker threads you want to do I/O,This defaults to 10.:param kwargs: Passed to the superclass."""super(SimpleScrapingLocator, self).__init__(**kwargs)self.base_url = ensure_slash(url)self.timeout = timeoutself._page_cache = {}self._seen = set()self._to_fetch = queue.Queue()self._bad_hosts = set()self.skip_externals = Falseself.num_workers = num_workersself._lock = threading.RLock()# See issue #45: we need to be resilient when the locator is used# in a thread, e.g. with concurrent.futures. We can't use self._lock# as it is for coordinating our internal threads - the ones created# in _prepare_threads.self._gplock = threading.RLock()def _prepare_threads(self):"""Threads are created only when get_project is called, and terminatebefore it returns. They are there primarily to parallelise I/O (i.e.fetching web pages)."""self._threads = []for i in range(self.num_workers):t = threading.Thread(target=self._fetch)t.setDaemon(True)t.start()self._threads.append(t)def _wait_threads(self):"""Tell all the threads to terminate (by sending a sentinel value) andwait for them to do so."""# Note that you need two loops, since you can't say which# thread will get each sentinelfor t in self._threads:self._to_fetch.put(None) # sentinelfor t in self._threads:t.join()self._threads = []def _get_project(self, name):result = {'urls': {}, 'digests': {}}with self._gplock:self.result = resultself.project_name = nameurl = urljoin(self.base_url, '%s/' % quote(name))self._seen.clear()self._page_cache.clear()self._prepare_threads()try:logger.debug('Queueing %s', url)self._to_fetch.put(url)self._to_fetch.join()finally:self._wait_threads()del self.resultreturn resultplatform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|'r'win(32|-amd64)|macosx-?\d+)\b', re.I)def _is_platform_dependent(self, url):"""Does an URL refer to a platform-specific download?"""return self.platform_dependent.search(url)def _process_download(self, url):"""See if an URL is a suitable download for a project.If it is, register information in the result dictionary (for_get_project) about the specific version it's for.Note that the return value isn't actually used other than as a booleanvalue."""if self._is_platform_dependent(url):info = Noneelse:info = self.convert_url_to_download_info(url, self.project_name)logger.debug('process_download: %s -> %s', url, info)if info:with self._lock: # needed because self.result is sharedself._update_version_data(self.result, info)return infodef _should_queue(self, link, referrer, rel):"""Determine whether a link URL from a referring page and with aparticular "rel" attribute should be queued for scraping."""scheme, netloc, path, _, _, _ = urlparse(link)if path.endswith(self.source_extensions + self.binary_extensions +self.excluded_extensions):result = Falseelif self.skip_externals and not link.startswith(self.base_url):result = Falseelif not referrer.startswith(self.base_url):result = Falseelif rel not in ('homepage', 'download'):result = Falseelif scheme not in ('http', 'https', 'ftp'):result = Falseelif self._is_platform_dependent(link):result = Falseelse:host = netloc.split(':', 1)[0]if host.lower() == 'localhost':result = Falseelse:result = Truelogger.debug('should_queue: %s (%s) from %s -> %s', link, rel,referrer, result)return resultdef _fetch(self):"""Get a URL to fetch from the work queue, get the HTML page, examine itslinks for download candidates and candidates for further scraping.This is a handy method to run in a thread."""while True:url = self._to_fetch.get()try:if url:page = self.get_page(url)if page is None: # e.g. after an errorcontinuefor link, rel in page.links:if link not in self._seen:self._seen.add(link)if (not self._process_download(link) andself._should_queue(link, url, rel)):logger.debug('Queueing %s from %s', link, url)self._to_fetch.put(link)except Exception as e: # pragma: no coverself.errors.put(text_type(e))finally:# always do this, to avoid hangs :-)self._to_fetch.task_done()if not url:#logger.debug('Sentinel seen, quitting.')breakdef get_page(self, url):"""Get the HTML for an URL, possibly from an in-memory cache.XXX TODO Note: this cache is never actually cleared. It's assumed thatthe data won't get stale over the lifetime of a locator instance (notnecessarily true for the default_locator)."""# http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-apischeme, netloc, path, _, _, _ = urlparse(url)if scheme == 'file' and os.path.isdir(url2pathname(path)):url = urljoin(ensure_slash(url), 'index.html')if url in self._page_cache:result = self._page_cache[url]logger.debug('Returning %s from cache: %s', url, result)else:host = netloc.split(':', 1)[0]result = Noneif host in self._bad_hosts:logger.debug('Skipping %s due to bad host %s', url, host)else:req = Request(url, headers={'Accept-encoding': 'identity'})try:logger.debug('Fetching %s', url)resp = self.opener.open(req, timeout=self.timeout)logger.debug('Fetched %s', url)headers = resp.info()content_type = headers.get('Content-Type', '')if HTML_CONTENT_TYPE.match(content_type):final_url = resp.geturl()data = resp.read()encoding = headers.get('Content-Encoding')if encoding:decoder = self.decoders[encoding] # fail if not founddata = decoder(data)encoding = 'utf-8'm = CHARSET.search(content_type)if m:encoding = m.group(1)try:data = data.decode(encoding)except UnicodeError: # pragma: no coverdata = data.decode('latin-1') # fallbackresult = Page(data, final_url)self._page_cache[final_url] = resultexcept HTTPError as e:if e.code != 404:logger.exception('Fetch failed: %s: %s', url, e)except URLError as e: # pragma: no coverlogger.exception('Fetch failed: %s: %s', url, e)with self._lock:self._bad_hosts.add(host)except Exception as e: # pragma: no coverlogger.exception('Fetch failed: %s: %s', url, e)finally:self._page_cache[url] = result # even if None (failure)return result_distname_re = re.compile('<a href=[^>]*>([^<]+)<')def get_distribution_names(self):"""Return all the distribution names known to this locator."""result = set()page = self.get_page(self.base_url)if not page:raise DistlibException('Unable to get %s' % self.base_url)for match in self._distname_re.finditer(page.data):result.add(match.group(1))return resultclass DirectoryLocator(Locator):"""This class locates distributions in a directory tree."""def __init__(self, path, **kwargs):"""Initialise an instance.:param path: The root of the directory tree to search.:param kwargs: Passed to the superclass constructor,except for:* recursive - if True (the default), subdirectories arerecursed into. If False, only the top-level directoryis searched,"""self.recursive = kwargs.pop('recursive', True)super(DirectoryLocator, self).__init__(**kwargs)path = os.path.abspath(path)if not os.path.isdir(path): # pragma: no coverraise DistlibException('Not a directory: %r' % path)self.base_dir = pathdef should_include(self, filename, parent):"""Should a filename be considered as a candidate for a distributionarchive? As well as the filename, the directory which contains itis provided, though not used by the current implementation."""return filename.endswith(self.downloadable_extensions)def _get_project(self, name):result = {'urls': {}, 'digests': {}}for root, dirs, files in os.walk(self.base_dir):for fn in files:if self.should_include(fn, root):fn = os.path.join(root, fn)url = urlunparse(('file', '',pathname2url(os.path.abspath(fn)),'', '', ''))info = self.convert_url_to_download_info(url, name)if info:self._update_version_data(result, info)if not self.recursive:breakreturn resultdef get_distribution_names(self):"""Return all the distribution names known to this locator."""result = set()for root, dirs, files in os.walk(self.base_dir):for fn in files:if self.should_include(fn, root):fn = os.path.join(root, fn)url = urlunparse(('file', '',pathname2url(os.path.abspath(fn)),'', '', ''))info = self.convert_url_to_download_info(url, None)if info:result.add(info['name'])if not self.recursive:breakreturn resultclass JSONLocator(Locator):"""This locator uses special extended metadata (not available on PyPI) and isthe basis of performant dependency resolution in distlib. Other locatorsrequire archive downloads before dependencies can be determined! As youmight imagine, that can be slow."""def get_distribution_names(self):"""Return all the distribution names known to this locator."""raise NotImplementedError('Not available from this locator')def _get_project(self, name):result = {'urls': {}, 'digests': {}}data = get_project_data(name)if data:for info in data.get('files', []):if info['ptype'] != 'sdist' or info['pyversion'] != 'source':continue# We don't store summary in project metadata as it makes# the data bigger for no benefit during dependency# resolutiondist = make_dist(data['name'], info['version'],summary=data.get('summary','Placeholder for summary'),scheme=self.scheme)md = dist.metadatamd.source_url = info['url']# TODO SHA256 digestif 'digest' in info and info['digest']:dist.digest = ('md5', info['digest'])md.dependencies = info.get('requirements', {})dist.exports = info.get('exports', {})result[dist.version] = distresult['urls'].setdefault(dist.version, set()).add(info['url'])return resultclass DistPathLocator(Locator):"""This locator finds installed distributions in a path. It can be useful foradding to an :class:`AggregatingLocator`."""def __init__(self, distpath, **kwargs):"""Initialise an instance.:param distpath: A :class:`DistributionPath` instance to search."""super(DistPathLocator, self).__init__(**kwargs)assert isinstance(distpath, DistributionPath)self.distpath = distpathdef _get_project(self, name):dist = self.distpath.get_distribution(name)if dist is None:result = {'urls': {}, 'digests': {}}else:result = {dist.version: dist,'urls': {dist.version: set([dist.source_url])},'digests': {dist.version: set([None])}}return resultclass AggregatingLocator(Locator):"""This class allows you to chain and/or merge a list of locators."""def __init__(self, *locators, **kwargs):"""Initialise an instance.:param locators: The list of locators to search.:param kwargs: Passed to the superclass constructor,except for:* merge - if False (the default), the first successfulsearch from any of the locators is returned. If True,the results from all locators are merged (this can beslow)."""self.merge = kwargs.pop('merge', False)self.locators = locatorssuper(AggregatingLocator, self).__init__(**kwargs)def clear_cache(self):super(AggregatingLocator, self).clear_cache()for locator in self.locators:locator.clear_cache()def _set_scheme(self, value):self._scheme = valuefor locator in self.locators:locator.scheme = valuescheme = property(Locator.scheme.fget, _set_scheme)def _get_project(self, name):result = {}for locator in self.locators:d = locator.get_project(name)if d:if self.merge:files = result.get('urls', {})digests = result.get('digests', {})# next line could overwrite result['urls'], result['digests']result.update(d)df = result.get('urls')if files and df:for k, v in files.items():if k in df:df[k] |= velse:df[k] = vdd = result.get('digests')if digests and dd:dd.update(digests)else:# See issue #18. If any dists are found and we're looking# for specific constraints, we only return something if# a match is found. For example, if a DirectoryLocator# returns just foo (1.0) while we're looking for# foo (>= 2.0), we'll pretend there was nothing there so# that subsequent locators can be queried. Otherwise we# would just return foo (1.0) which would then lead to a# failure to find foo (>= 2.0), because other locators# weren't searched. Note that this only matters when# merge=False.if self.matcher is None:found = Trueelse:found = Falsefor k in d:if self.matcher.match(k):found = Truebreakif found:result = dbreakreturn resultdef get_distribution_names(self):"""Return all the distribution names known to this locator."""result = set()for locator in self.locators:try:result |= locator.get_distribution_names()except NotImplementedError:passreturn result# We use a legacy scheme simply because most of the dists on PyPI use legacy# versions which don't conform to PEP 426 / PEP 440.default_locator = AggregatingLocator(JSONLocator(),SimpleScrapingLocator('https://pypi.python.org/simple/',timeout=3.0),scheme='legacy')locate = default_locator.locateNAME_VERSION_RE = re.compile(r'(?P<name>[\w-]+)\s*'r'\(\s*(==\s*)?(?P<ver>[^)]+)\)$')class DependencyFinder(object):"""Locate dependencies for distributions."""def __init__(self, locator=None):"""Initialise an instance, using the specified locatorto locate distributions."""self.locator = locator or default_locatorself.scheme = get_scheme(self.locator.scheme)def add_distribution(self, dist):"""Add a distribution to the finder. This will update internal informationabout who provides what.:param dist: The distribution to add."""logger.debug('adding distribution %s', dist)name = dist.keyself.dists_by_name[name] = distself.dists[(name, dist.version)] = distfor p in dist.provides:name, version = parse_name_and_version(p)logger.debug('Add to provided: %s, %s, %s', name, version, dist)self.provided.setdefault(name, set()).add((version, dist))def remove_distribution(self, dist):"""Remove a distribution from the finder. This will update internalinformation about who provides what.:param dist: The distribution to remove."""logger.debug('removing distribution %s', dist)name = dist.keydel self.dists_by_name[name]del self.dists[(name, dist.version)]for p in dist.provides:name, version = parse_name_and_version(p)logger.debug('Remove from provided: %s, %s, %s', name, version, dist)s = self.provided[name]s.remove((version, dist))if not s:del self.provided[name]def get_matcher(self, reqt):"""Get a version matcher for a requirement.:param reqt: The requirement:type reqt: str:return: A version matcher (an instance of:class:`distlib.version.Matcher`)."""try:matcher = self.scheme.matcher(reqt)except UnsupportedVersionError: # pragma: no cover# XXX compat-mode if cannot read the versionname = reqt.split()[0]matcher = self.scheme.matcher(name)return matcherdef find_providers(self, reqt):"""Find the distributions which can fulfill a requirement.:param reqt: The requirement.:type reqt: str:return: A set of distribution which can fulfill the requirement."""matcher = self.get_matcher(reqt)name = matcher.key # case-insensitiveresult = set()provided = self.providedif name in provided:for version, provider in provided[name]:try:match = matcher.match(version)except UnsupportedVersionError:match = Falseif match:result.add(provider)breakreturn resultdef try_to_replace(self, provider, other, problems):"""Attempt to replace one provider with another. This is typically usedwhen resolving dependencies from multiple sources, e.g. A requires(B >= 1.0) while C requires (B >= 1.1).For successful replacement, ``provider`` must meet all the requirementswhich ``other`` fulfills.:param provider: The provider we are trying to replace with.:param other: The provider we're trying to replace.:param problems: If False is returned, this will contain whatproblems prevented replacement. This is currentlya tuple of the literal string 'cantreplace',``provider``, ``other`` and the set of requirementsthat ``provider`` couldn't fulfill.:return: True if we can replace ``other`` with ``provider``, elseFalse."""rlist = self.reqts[other]unmatched = set()for s in rlist:matcher = self.get_matcher(s)if not matcher.match(provider.version):unmatched.add(s)if unmatched:# can't replace other with providerproblems.add(('cantreplace', provider, other,frozenset(unmatched)))result = Falseelse:# can replace other with providerself.remove_distribution(other)del self.reqts[other]for s in rlist:self.reqts.setdefault(provider, set()).add(s)self.add_distribution(provider)result = Truereturn resultdef find(self, requirement, meta_extras=None, prereleases=False):"""Find a distribution and all distributions it depends on.:param requirement: The requirement specifying the distribution tofind, or a Distribution instance.:param meta_extras: A list of meta extras such as :test:, :build: andso on.:param prereleases: If ``True``, allow pre-release versions to bereturned - otherwise, don't return prereleasesunless they're all that's available.Return a set of :class:`Distribution` instances and a set ofproblems.The distributions returned should be such that they have the:attr:`required` attribute set to ``True`` if they werefrom the ``requirement`` passed to ``find()``, and they have the:attr:`build_time_dependency` attribute set to ``True`` unless theyare post-installation dependencies of the ``requirement``.The problems should be a tuple consisting of the string``'unsatisfied'`` and the requirement which couldn't be satisfiedby any distribution known to the locator."""self.provided = {}self.dists = {}self.dists_by_name = {}self.reqts = {}meta_extras = set(meta_extras or [])if ':*:' in meta_extras:meta_extras.remove(':*:')# :meta: and :run: are implicitly includedmeta_extras |= set([':test:', ':build:', ':dev:'])if isinstance(requirement, Distribution):dist = odist = requirementlogger.debug('passed %s as requirement', odist)else:dist = odist = self.locator.locate(requirement,prereleases=prereleases)if dist is None:raise DistlibException('Unable to locate %r' % requirement)logger.debug('located %s', odist)dist.requested = Trueproblems = set()todo = set([dist])install_dists = set([odist])while todo:dist = todo.pop()name = dist.key # case-insensitiveif name not in self.dists_by_name:self.add_distribution(dist)else:#import pdb; pdb.set_trace()other = self.dists_by_name[name]if other != dist:self.try_to_replace(dist, other, problems)ireqts = dist.run_requires | dist.meta_requiressreqts = dist.build_requiresereqts = set()if dist in install_dists:for key in ('test', 'build', 'dev'):e = ':%s:' % keyif e in meta_extras:ereqts |= getattr(dist, '%s_requires' % key)all_reqts = ireqts | sreqts | ereqtsfor r in all_reqts:providers = self.find_providers(r)if not providers:logger.debug('No providers found for %r', r)provider = self.locator.locate(r, prereleases=prereleases)# If no provider is found and we didn't consider# prereleases, consider them now.if provider is None and not prereleases:provider = self.locator.locate(r, prereleases=True)if provider is None:logger.debug('Cannot satisfy %r', r)problems.add(('unsatisfied', r))else:n, v = provider.key, provider.versionif (n, v) not in self.dists:todo.add(provider)providers.add(provider)if r in ireqts and dist in install_dists:install_dists.add(provider)logger.debug('Adding %s to install_dists',provider.name_and_version)for p in providers:name = p.keyif name not in self.dists_by_name:self.reqts.setdefault(p, set()).add(r)else:other = self.dists_by_name[name]if other != p:# see if other can be replaced by pself.try_to_replace(p, other, problems)dists = set(self.dists.values())for dist in dists:dist.build_time_dependency = dist not in install_distsif dist.build_time_dependency:logger.debug('%s is a build-time dependency only.',dist.name_and_version)logger.debug('find done for %s', odist)return dists, problems
# -*- coding: utf-8 -*-## Copyright (C) 2013 Vinay Sajip.# Licensed to the Python Software Foundation under a contributor agreement.# See LICENSE.txt and CONTRIBUTORS.txt.#import hashlibimport loggingimport osimport shutilimport subprocessimport tempfiletry:from threading import Threadexcept ImportError:from dummy_threading import Threadfrom . import DistlibExceptionfrom .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,urlparse, build_opener, string_types)from .util import cached_property, zip_dir, ServerProxylogger = logging.getLogger(__name__)DEFAULT_INDEX = 'https://pypi.python.org/pypi'DEFAULT_REALM = 'pypi'class PackageIndex(object):"""This class represents a package index compatible with PyPI, the PythonPackage Index."""boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'def __init__(self, url=None):"""Initialise an instance.:param url: The URL of the index. If not specified, the URL for PyPI isused."""self.url = url or DEFAULT_INDEXself.read_configuration()scheme, netloc, path, params, query, frag = urlparse(self.url)if params or query or frag or scheme not in ('http', 'https'):raise DistlibException('invalid repository: %s' % self.url)self.password_handler = Noneself.ssl_verifier = Noneself.gpg = Noneself.gpg_home = Noneself.rpc_proxy = Nonewith open(os.devnull, 'w') as sink:# Use gpg by default rather than gpg2, as gpg2 insists on# prompting for passwordsfor s in ('gpg', 'gpg2'):try:rc = subprocess.check_call([s, '--version'], stdout=sink,stderr=sink)if rc == 0:self.gpg = sbreakexcept OSError:passdef _get_pypirc_command(self):"""Get the distutils command for interacting with PyPI configurations.:return: the command."""from distutils.core import Distributionfrom distutils.config import PyPIRCCommandd = Distribution()return PyPIRCCommand(d)def read_configuration(self):"""Read the PyPI access configuration as supported by distutils, gettingPyPI to do the actual work. This populates ``username``, ``password``,``realm`` and ``url`` attributes from the configuration."""# get distutils to do the workc = self._get_pypirc_command()c.repository = self.urlcfg = c._read_pypirc()self.username = cfg.get('username')self.password = cfg.get('password')self.realm = cfg.get('realm', 'pypi')self.url = cfg.get('repository', self.url)def save_configuration(self):"""Save the PyPI access configuration. You must have set ``username`` and``password`` attributes before calling this method.Again, distutils is used to do the actual work."""self.check_credentials()# get distutils to do the workc = self._get_pypirc_command()c._store_pypirc(self.username, self.password)def check_credentials(self):"""Check that ``username`` and ``password`` have been set, and raise anexception if not."""if self.username is None or self.password is None:raise DistlibException('username and password must be set')pm = HTTPPasswordMgr()_, netloc, _, _, _, _ = urlparse(self.url)pm.add_password(self.realm, netloc, self.username, self.password)self.password_handler = HTTPBasicAuthHandler(pm)def register(self, metadata):"""Register a distribution on PyPI, using the provided metadata.:param metadata: A :class:`Metadata` instance defining at least a nameand version number for the distribution to beregistered.:return: The HTTP response received from PyPI upon submission of therequest."""self.check_credentials()metadata.validate()d = metadata.todict()d[':action'] = 'verify'request = self.encode_request(d.items(), [])response = self.send_request(request)d[':action'] = 'submit'request = self.encode_request(d.items(), [])return self.send_request(request)def _reader(self, name, stream, outbuf):"""Thread runner for reading lines of from a subprocess into a buffer.:param name: The logical name of the stream (used for logging only).:param stream: The stream to read from. This will typically a pipeconnected to the output stream of a subprocess.:param outbuf: The list to append the read lines to."""while True:s = stream.readline()if not s:breaks = s.decode('utf-8').rstrip()outbuf.append(s)logger.debug('%s: %s' % (name, s))stream.close()def get_sign_command(self, filename, signer, sign_password,keystore=None):"""Return a suitable command for signing a file.:param filename: The pathname to the file to be signed.:param signer: The identifier of the signer of the file.:param sign_password: The passphrase for the signer'sprivate key used for signing.:param keystore: The path to a directory which contains the keysused in verification. If not specified, theinstance's ``gpg_home`` attribute is used instead.:return: The signing command as a list suitable to bepassed to :class:`subprocess.Popen`."""cmd = [self.gpg, '--status-fd', '2', '--no-tty']if keystore is None:keystore = self.gpg_homeif keystore:cmd.extend(['--homedir', keystore])if sign_password is not None:cmd.extend(['--batch', '--passphrase-fd', '0'])td = tempfile.mkdtemp()sf = os.path.join(td, os.path.basename(filename) + '.asc')cmd.extend(['--detach-sign', '--armor', '--local-user',signer, '--output', sf, filename])logger.debug('invoking: %s', ' '.join(cmd))return cmd, sfdef run_command(self, cmd, input_data=None):"""Run a command in a child process , passing it any input data specified.:param cmd: The command to run.:param input_data: If specified, this must be a byte string containingdata to be sent to the child process.:return: A tuple consisting of the subprocess' exit code, a list oflines read from the subprocess' ``stdout``, and a list oflines read from the subprocess' ``stderr``."""kwargs = {'stdout': subprocess.PIPE,'stderr': subprocess.PIPE,}if input_data is not None:kwargs['stdin'] = subprocess.PIPEstdout = []stderr = []p = subprocess.Popen(cmd, **kwargs)# We don't use communicate() here because we may need to# get clever with interacting with the commandt1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))t1.start()t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))t2.start()if input_data is not None:p.stdin.write(input_data)p.stdin.close()p.wait()t1.join()t2.join()return p.returncode, stdout, stderrdef sign_file(self, filename, signer, sign_password, keystore=None):"""Sign a file.:param filename: The pathname to the file to be signed.:param signer: The identifier of the signer of the file.:param sign_password: The passphrase for the signer'sprivate key used for signing.:param keystore: The path to a directory which contains the keysused in signing. If not specified, the instance's``gpg_home`` attribute is used instead.:return: The absolute pathname of the file where the signature isstored."""cmd, sig_file = self.get_sign_command(filename, signer, sign_password,keystore)rc, stdout, stderr = self.run_command(cmd,sign_password.encode('utf-8'))if rc != 0:raise DistlibException('sign command failed with error ''code %s' % rc)return sig_filedef upload_file(self, metadata, filename, signer=None, sign_password=None,filetype='sdist', pyversion='source', keystore=None):"""Upload a release file to the index.:param metadata: A :class:`Metadata` instance defining at least a nameand version number for the file to be uploaded.:param filename: The pathname of the file to be uploaded.:param signer: The identifier of the signer of the file.:param sign_password: The passphrase for the signer'sprivate key used for signing.:param filetype: The type of the file being uploaded. This is thedistutils command which produced that file, e.g.``sdist`` or ``bdist_wheel``.:param pyversion: The version of Python which the release relatesto. For code compatible with any Python, this wouldbe ``source``, otherwise it would be e.g. ``3.2``.:param keystore: The path to a directory which contains the keysused in signing. If not specified, the instance's``gpg_home`` attribute is used instead.:return: The HTTP response received from PyPI upon submission of therequest."""self.check_credentials()if not os.path.exists(filename):raise DistlibException('not found: %s' % filename)metadata.validate()d = metadata.todict()sig_file = Noneif signer:if not self.gpg:logger.warning('no signing program available - not signed')else:sig_file = self.sign_file(filename, signer, sign_password,keystore)with open(filename, 'rb') as f:file_data = f.read()md5_digest = hashlib.md5(file_data).hexdigest()sha256_digest = hashlib.sha256(file_data).hexdigest()d.update({':action': 'file_upload','protocol_version': '1','filetype': filetype,'pyversion': pyversion,'md5_digest': md5_digest,'sha256_digest': sha256_digest,})files = [('content', os.path.basename(filename), file_data)]if sig_file:with open(sig_file, 'rb') as f:sig_data = f.read()files.append(('gpg_signature', os.path.basename(sig_file),sig_data))shutil.rmtree(os.path.dirname(sig_file))request = self.encode_request(d.items(), files)return self.send_request(request)def upload_documentation(self, metadata, doc_dir):"""Upload documentation to the index.:param metadata: A :class:`Metadata` instance defining at least a nameand version number for the documentation to beuploaded.:param doc_dir: The pathname of the directory which contains thedocumentation. This should be the directory thatcontains the ``index.html`` for the documentation.:return: The HTTP response received from PyPI upon submission of therequest."""self.check_credentials()if not os.path.isdir(doc_dir):raise DistlibException('not a directory: %r' % doc_dir)fn = os.path.join(doc_dir, 'index.html')if not os.path.exists(fn):raise DistlibException('not found: %r' % fn)metadata.validate()name, version = metadata.name, metadata.versionzip_data = zip_dir(doc_dir).getvalue()fields = [(':action', 'doc_upload'),('name', name), ('version', version)]files = [('content', name, zip_data)]request = self.encode_request(fields, files)return self.send_request(request)def get_verify_command(self, signature_filename, data_filename,keystore=None):"""Return a suitable command for verifying a file.:param signature_filename: The pathname to the file containing thesignature.:param data_filename: The pathname to the file containing thesigned data.:param keystore: The path to a directory which contains the keysused in verification. If not specified, theinstance's ``gpg_home`` attribute is used instead.:return: The verifying command as a list suitable to bepassed to :class:`subprocess.Popen`."""cmd = [self.gpg, '--status-fd', '2', '--no-tty']if keystore is None:keystore = self.gpg_homeif keystore:cmd.extend(['--homedir', keystore])cmd.extend(['--verify', signature_filename, data_filename])logger.debug('invoking: %s', ' '.join(cmd))return cmddef verify_signature(self, signature_filename, data_filename,keystore=None):"""Verify a signature for a file.:param signature_filename: The pathname to the file containing thesignature.:param data_filename: The pathname to the file containing thesigned data.:param keystore: The path to a directory which contains the keysused in verification. If not specified, theinstance's ``gpg_home`` attribute is used instead.:return: True if the signature was verified, else False."""if not self.gpg:raise DistlibException('verification unavailable because gpg ''unavailable')cmd = self.get_verify_command(signature_filename, data_filename,keystore)rc, stdout, stderr = self.run_command(cmd)if rc not in (0, 1):raise DistlibException('verify command failed with error ''code %s' % rc)return rc == 0def download_file(self, url, destfile, digest=None, reporthook=None):"""This is a convenience method for downloading a file from an URL.Normally, this will be a file from the index, though currentlyno check is made for this (i.e. a file can be downloaded fromanywhere).The method is just like the :func:`urlretrieve` function in thestandard library, except that it allows digest computation to bedone during download and checking that the downloaded datamatched any expected value.:param url: The URL of the file to be downloaded (assumed to beavailable via an HTTP GET request).:param destfile: The pathname where the downloaded file is to besaved.:param digest: If specified, this must be a (hasher, value)tuple, where hasher is the algorithm used (e.g.``'md5'``) and ``value`` is the expected value.:param reporthook: The same as for :func:`urlretrieve` in thestandard library."""if digest is None:digester = Nonelogger.debug('No digest specified')else:if isinstance(digest, (list, tuple)):hasher, digest = digestelse:hasher = 'md5'digester = getattr(hashlib, hasher)()logger.debug('Digest specified: %s' % digest)# The following code is equivalent to urlretrieve.# We need to do it this way so that we can compute the# digest of the file as we go.with open(destfile, 'wb') as dfp:# addinfourl is not a context manager on 2.x# so we have to use try/finallysfp = self.send_request(Request(url))try:headers = sfp.info()blocksize = 8192size = -1read = 0blocknum = 0if "content-length" in headers:size = int(headers["Content-Length"])if reporthook:reporthook(blocknum, blocksize, size)while True:block = sfp.read(blocksize)if not block:breakread += len(block)dfp.write(block)if digester:digester.update(block)blocknum += 1if reporthook:reporthook(blocknum, blocksize, size)finally:sfp.close()# check that we got the whole file, if we canif size >= 0 and read < size:raise DistlibException('retrieval incomplete: got only %d out of %d bytes'% (read, size))# if we have a digest, it must match.if digester:actual = digester.hexdigest()if digest != actual:raise DistlibException('%s digest mismatch for %s: expected ''%s, got %s' % (hasher, destfile,digest, actual))logger.debug('Digest verified: %s', digest)def send_request(self, req):"""Send a standard library :class:`Request` to PyPI and return itsresponse.:param req: The request to send.:return: The HTTP response from PyPI (a standard library HTTPResponse)."""handlers = []if self.password_handler:handlers.append(self.password_handler)if self.ssl_verifier:handlers.append(self.ssl_verifier)opener = build_opener(*handlers)return opener.open(req)def encode_request(self, fields, files):"""Encode fields and files for posting to an HTTP server.:param fields: The fields to send as a list of (fieldname, value)tuples.:param files: The files to send as a list of (fieldname, filename,file_bytes) tuple."""# Adapted from packaging, which in turn was adapted from# http://code.activestate.com/recipes/146306parts = []boundary = self.boundaryfor k, values in fields:if not isinstance(values, (list, tuple)):values = [values]for v in values:parts.extend((b'--' + boundary,('Content-Disposition: form-data; name="%s"' %k).encode('utf-8'),b'',v.encode('utf-8')))for key, filename, value in files:parts.extend((b'--' + boundary,('Content-Disposition: form-data; name="%s"; filename="%s"' %(key, filename)).encode('utf-8'),b'',value))parts.extend((b'--' + boundary + b'--', b''))body = b'\r\n'.join(parts)ct = b'multipart/form-data; boundary=' + boundaryheaders = {'Content-type': ct,'Content-length': str(len(body))}return Request(self.url, body, headers)def search(self, terms, operator=None):if isinstance(terms, string_types):terms = {'name': terms}if self.rpc_proxy is None:self.rpc_proxy = ServerProxy(self.url, timeout=3.0)return self.rpc_proxy.search(terms, operator or 'and')
# -*- coding: utf-8 -*-## Copyright (C) 2012-2016 The Python Software Foundation.# See LICENSE.txt and CONTRIBUTORS.txt.#"""PEP 376 implementation."""from __future__ import unicode_literalsimport base64import codecsimport contextlibimport hashlibimport loggingimport osimport posixpathimport sysimport zipimportfrom . import DistlibException, resourcesfrom .compat import StringIOfrom .version import get_scheme, UnsupportedVersionErrorfrom .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAMEfrom .util import (parse_requirement, cached_property, parse_name_and_version,read_exports, write_exports, CSVReader, CSVWriter)__all__ = ['Distribution', 'BaseInstalledDistribution','InstalledDistribution', 'EggInfoDistribution','DistributionPath']logger = logging.getLogger(__name__)EXPORTS_FILENAME = 'pydist-exports.json'COMMANDS_FILENAME = 'pydist-commands.json'DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED','RESOURCES', EXPORTS_FILENAME, 'SHARED')DISTINFO_EXT = '.dist-info'class _Cache(object):"""A simple cache mapping names and .dist-info paths to distributions"""def __init__(self):"""Initialise an instance. There is normally one for each DistributionPath."""self.name = {}self.path = {}self.generated = Falsedef clear(self):"""Clear the cache, setting it to its initial state."""self.name.clear()self.path.clear()self.generated = Falsedef add(self, dist):"""Add a distribution to the cache.:param dist: The distribution to add."""if dist.path not in self.path:self.path[dist.path] = distself.name.setdefault(dist.key, []).append(dist)class DistributionPath(object):"""Represents a set of distributions installed on a path (typically sys.path)."""def __init__(self, path=None, include_egg=False):"""Create an instance from a path, optionally including legacy (distutils/setuptools/distribute) distributions.:param path: The path to use, as a list of directories. If not specified,sys.path is used.:param include_egg: If True, this instance will look for and return legacydistributions as well as those based on PEP 376."""if path is None:path = sys.pathself.path = pathself._include_dist = Trueself._include_egg = include_eggself._cache = _Cache()self._cache_egg = _Cache()self._cache_enabled = Trueself._scheme = get_scheme('default')def _get_cache_enabled(self):return self._cache_enableddef _set_cache_enabled(self, value):self._cache_enabled = valuecache_enabled = property(_get_cache_enabled, _set_cache_enabled)def clear_cache(self):"""Clears the internal cache."""self._cache.clear()self._cache_egg.clear()def _yield_distributions(self):"""Yield .dist-info and/or .egg(-info) distributions."""# We need to check if we've seen some resources already, because on# some Linux systems (e.g. some Debian/Ubuntu variants) there are# symlinks which alias other files in the environment.seen = set()for path in self.path:finder = resources.finder_for_path(path)if finder is None:continuer = finder.find('')if not r or not r.is_container:continuerset = sorted(r.resources)for entry in rset:r = finder.find(entry)if not r or r.path in seen:continueif self._include_dist and entry.endswith(DISTINFO_EXT):possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME]for metadata_filename in possible_filenames:metadata_path = posixpath.join(entry, metadata_filename)pydist = finder.find(metadata_path)if pydist:breakelse:continuewith contextlib.closing(pydist.as_stream()) as stream:metadata = Metadata(fileobj=stream, scheme='legacy')logger.debug('Found %s', r.path)seen.add(r.path)yield new_dist_class(r.path, metadata=metadata,env=self)elif self._include_egg and entry.endswith(('.egg-info','.egg')):logger.debug('Found %s', r.path)seen.add(r.path)yield old_dist_class(r.path, self)def _generate_cache(self):"""Scan the path for distributions and populate the cache withthose that are found."""gen_dist = not self._cache.generatedgen_egg = self._include_egg and not self._cache_egg.generatedif gen_dist or gen_egg:for dist in self._yield_distributions():if isinstance(dist, InstalledDistribution):self._cache.add(dist)else:self._cache_egg.add(dist)if gen_dist:self._cache.generated = Trueif gen_egg:self._cache_egg.generated = True@classmethoddef distinfo_dirname(cls, name, version):"""The *name* and *version* parameters are converted into theirfilename-escaped form, i.e. any ``'-'`` characters are replacedwith ``'_'`` other than the one in ``'dist-info'`` and the oneseparating the name from the version number.:parameter name: is converted to a standard distribution name by replacingany runs of non- alphanumeric characters with a single``'-'``.:type name: string:parameter version: is converted to a standard version string. Spacesbecome dots, and all other non-alphanumeric characters(except dots) become dashes, with runs of multipledashes condensed to a single dash.:type version: string:returns: directory name:rtype: string"""name = name.replace('-', '_')return '-'.join([name, version]) + DISTINFO_EXTdef get_distributions(self):"""Provides an iterator that looks for distributions and returns:class:`InstalledDistribution` or:class:`EggInfoDistribution` instances for each one of them.:rtype: iterator of :class:`InstalledDistribution` and:class:`EggInfoDistribution` instances"""if not self._cache_enabled:for dist in self._yield_distributions():yield distelse:self._generate_cache()for dist in self._cache.path.values():yield distif self._include_egg:for dist in self._cache_egg.path.values():yield distdef get_distribution(self, name):"""Looks for a named distribution on the path.This function only returns the first result found, as no more than onevalue is expected. If nothing is found, ``None`` is returned.:rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`or ``None``"""result = Nonename = name.lower()if not self._cache_enabled:for dist in self._yield_distributions():if dist.key == name:result = distbreakelse:self._generate_cache()if name in self._cache.name:result = self._cache.name[name][0]elif self._include_egg and name in self._cache_egg.name:result = self._cache_egg.name[name][0]return resultdef provides_distribution(self, name, version=None):"""Iterates over all distributions to find which distributions provide *name*.If a *version* is provided, it will be used to filter the results.This function only returns the first result found, since no more thanone values are expected. If the directory is not found, returns ``None``.:parameter version: a version specifier that indicates the versionrequired, conforming to the format in ``PEP-345``:type name: string:type version: string"""matcher = Noneif not version is None:try:matcher = self._scheme.matcher('%s (%s)' % (name, version))except ValueError:raise DistlibException('invalid name or version: %r, %r' %(name, version))for dist in self.get_distributions():provided = dist.providesfor p in provided:p_name, p_ver = parse_name_and_version(p)if matcher is None:if p_name == name:yield distbreakelse:if p_name == name and matcher.match(p_ver):yield distbreakdef get_file_path(self, name, relative_path):"""Return the path to a resource file."""dist = self.get_distribution(name)if dist is None:raise LookupError('no distribution named %r found' % name)return dist.get_resource_path(relative_path)def get_exported_entries(self, category, name=None):"""Return all of the exported entries in a particular category.:param category: The category to search for entries.:param name: If specified, only entries with that name are returned."""for dist in self.get_distributions():r = dist.exportsif category in r:d = r[category]if name is not None:if name in d:yield d[name]else:for v in d.values():yield vclass Distribution(object):"""A base class for distributions, whether installed or from indexes.Either way, it must have some metadata, so that's all that's neededfor construction."""build_time_dependency = False"""Set to True if it's known to be only a build-time dependency (i.e.not needed after installation)."""requested = False"""A boolean that indicates whether the ``REQUESTED`` metadata file ispresent (in other words, whether the package was installed by userrequest or it was installed as a dependency)."""def __init__(self, metadata):"""Initialise an instance.:param metadata: The instance of :class:`Metadata` describing thisdistribution."""self.metadata = metadataself.name = metadata.nameself.key = self.name.lower() # for case-insensitive comparisonsself.version = metadata.versionself.locator = Noneself.digest = Noneself.extras = None # additional features requestedself.context = None # environment marker overridesself.download_urls = set()self.digests = {}@propertydef source_url(self):"""The source archive download URL for this distribution."""return self.metadata.source_urldownload_url = source_url # Backward compatibility@propertydef name_and_version(self):"""A utility property which displays the name and version in parentheses."""return '%s (%s)' % (self.name, self.version)@propertydef provides(self):"""A set of distribution names and versions provided by this distribution.:return: A set of "name (version)" strings."""plist = self.metadata.providess = '%s (%s)' % (self.name, self.version)if s not in plist:plist.append(s)return plistdef _get_requirements(self, req_attr):md = self.metadatalogger.debug('Getting requirements from metadata %r', md.todict())reqts = getattr(md, req_attr)return set(md.get_requirements(reqts, extras=self.extras,env=self.context))@propertydef run_requires(self):return self._get_requirements('run_requires')@propertydef meta_requires(self):return self._get_requirements('meta_requires')@propertydef build_requires(self):return self._get_requirements('build_requires')@propertydef test_requires(self):return self._get_requirements('test_requires')@propertydef dev_requires(self):return self._get_requirements('dev_requires')def matches_requirement(self, req):"""Say if this instance matches (fulfills) a requirement.:param req: The requirement to match.:rtype req: str:return: True if it matches, else False."""# Requirement may contain extras - parse to lose those# from what's passed to the matcherr = parse_requirement(req)scheme = get_scheme(self.metadata.scheme)try:matcher = scheme.matcher(r.requirement)except UnsupportedVersionError:# XXX compat-mode if cannot read the versionlogger.warning('could not read version %r - using name only',req)name = req.split()[0]matcher = scheme.matcher(name)name = matcher.key # case-insensitiveresult = Falsefor p in self.provides:p_name, p_ver = parse_name_and_version(p)if p_name != name:continuetry:result = matcher.match(p_ver)breakexcept UnsupportedVersionError:passreturn resultdef __repr__(self):"""Return a textual representation of this instance,"""if self.source_url:suffix = ' [%s]' % self.source_urlelse:suffix = ''return '<Distribution %s (%s)%s>' % (self.name, self.version, suffix)def __eq__(self, other):"""See if this distribution is the same as another.:param other: The distribution to compare with. To be equal to oneanother. distributions must have the same type, name,version and source_url.:return: True if it is the same, else False."""if type(other) is not type(self):result = Falseelse:result = (self.name == other.name andself.version == other.version andself.source_url == other.source_url)return resultdef __hash__(self):"""Compute hash in a way which matches the equality test."""return hash(self.name) + hash(self.version) + hash(self.source_url)class BaseInstalledDistribution(Distribution):"""This is the base class for installed distributions (whether PEP 376 orlegacy)."""hasher = Nonedef __init__(self, metadata, path, env=None):"""Initialise an instance.:param metadata: An instance of :class:`Metadata` which describes thedistribution. This will normally have been initialisedfrom a metadata file in the ``path``.:param path: The path of the ``.dist-info`` or ``.egg-info``directory for the distribution.:param env: This is normally the :class:`DistributionPath`instance where this distribution was found."""super(BaseInstalledDistribution, self).__init__(metadata)self.path = pathself.dist_path = envdef get_hash(self, data, hasher=None):"""Get the hash of some data, using a particular hash algorithm, ifspecified.:param data: The data to be hashed.:type data: bytes:param hasher: The name of a hash implementation, supported by hashlib,or ``None``. Examples of valid values are ``'sha1'``,``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and``'sha512'``. If no hasher is specified, the ``hasher``attribute of the :class:`InstalledDistribution` instanceis used. If the hasher is determined to be ``None``, MD5is used as the hashing algorithm.:returns: The hash of the data. If a hasher was explicitly specified,the returned hash will be prefixed with the specified hasherfollowed by '='.:rtype: str"""if hasher is None:hasher = self.hasherif hasher is None:hasher = hashlib.md5prefix = ''else:hasher = getattr(hashlib, hasher)prefix = '%s=' % self.hasherdigest = hasher(data).digest()digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')return '%s%s' % (prefix, digest)class InstalledDistribution(BaseInstalledDistribution):"""Created with the *path* of the ``.dist-info`` directory provided to theconstructor. It reads the metadata contained in ``pydist.json`` when it isinstantiated., or uses a passed in Metadata instance (useful for whendry-run mode is being used)."""hasher = 'sha256'def __init__(self, path, metadata=None, env=None):self.finder = finder = resources.finder_for_path(path)if finder is None:import pdb; pdb.set_trace ()if env and env._cache_enabled and path in env._cache.path:metadata = env._cache.path[path].metadataelif metadata is None:r = finder.find(METADATA_FILENAME)# Temporary - for Wheel 0.23 supportif r is None:r = finder.find(WHEEL_METADATA_FILENAME)# Temporary - for legacy supportif r is None:r = finder.find('METADATA')if r is None:raise ValueError('no %s found in %s' % (METADATA_FILENAME,path))with contextlib.closing(r.as_stream()) as stream:metadata = Metadata(fileobj=stream, scheme='legacy')super(InstalledDistribution, self).__init__(metadata, path, env)if env and env._cache_enabled:env._cache.add(self)try:r = finder.find('REQUESTED')except AttributeError:import pdb; pdb.set_trace ()self.requested = r is not Nonedef __repr__(self):return '<InstalledDistribution %r %s at %r>' % (self.name, self.version, self.path)def __str__(self):return "%s %s" % (self.name, self.version)def _get_records(self):"""Get the list of installed files for the distribution:return: A list of tuples of path, hash and size. Note that hash andsize might be ``None`` for some entries. The path is exactlyas stored in the file (which is as in PEP 376)."""results = []r = self.get_distinfo_resource('RECORD')with contextlib.closing(r.as_stream()) as stream:with CSVReader(stream=stream) as record_reader:# Base location is parent dir of .dist-info dir#base_location = os.path.dirname(self.path)#base_location = os.path.abspath(base_location)for row in record_reader:missing = [None for i in range(len(row), 3)]path, checksum, size = row + missing#if not os.path.isabs(path):# path = path.replace('/', os.sep)# path = os.path.join(base_location, path)results.append((path, checksum, size))return results@cached_propertydef exports(self):"""Return the information exported by this distribution.:return: A dictionary of exports, mapping an export category to a dictof :class:`ExportEntry` instances describing the individualexport entries, and keyed by name."""result = {}r = self.get_distinfo_resource(EXPORTS_FILENAME)if r:result = self.read_exports()return resultdef read_exports(self):"""Read exports data from a file in .ini format.:return: A dictionary of exports, mapping an export category to a listof :class:`ExportEntry` instances describing the individualexport entries."""result = {}r = self.get_distinfo_resource(EXPORTS_FILENAME)if r:with contextlib.closing(r.as_stream()) as stream:result = read_exports(stream)return resultdef write_exports(self, exports):"""Write a dictionary of exports to a file in .ini format.:param exports: A dictionary of exports, mapping an export category toa list of :class:`ExportEntry` instances describing theindividual export entries."""rf = self.get_distinfo_file(EXPORTS_FILENAME)with open(rf, 'w') as f:write_exports(exports, f)def get_resource_path(self, relative_path):"""NOTE: This API may change in the future.Return the absolute path to a resource file with the given relativepath.:param relative_path: The path, relative to .dist-info, of the resourceof interest.:return: The absolute path where the resource is to be found."""r = self.get_distinfo_resource('RESOURCES')with contextlib.closing(r.as_stream()) as stream:with CSVReader(stream=stream) as resources_reader:for relative, destination in resources_reader:if relative == relative_path:return destinationraise KeyError('no resource file with relative path %r ''is installed' % relative_path)def list_installed_files(self):"""Iterates over the ``RECORD`` entries and returns a tuple``(path, hash, size)`` for each line.:returns: iterator of (path, hash, size)"""for result in self._get_records():yield resultdef write_installed_files(self, paths, prefix, dry_run=False):"""Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Anyexisting ``RECORD`` file is silently overwritten.prefix is used to determine when to write absolute paths."""prefix = os.path.join(prefix, '')base = os.path.dirname(self.path)base_under_prefix = base.startswith(prefix)base = os.path.join(base, '')record_path = self.get_distinfo_file('RECORD')logger.info('creating %s', record_path)if dry_run:return Nonewith CSVWriter(record_path) as writer:for path in paths:if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')):# do not put size and hash, as in PEP-376hash_value = size = ''else:size = '%d' % os.path.getsize(path)with open(path, 'rb') as fp:hash_value = self.get_hash(fp.read())if path.startswith(base) or (base_under_prefix andpath.startswith(prefix)):path = os.path.relpath(path, base)writer.writerow((path, hash_value, size))# add the RECORD file itselfif record_path.startswith(base):record_path = os.path.relpath(record_path, base)writer.writerow((record_path, '', ''))return record_pathdef check_installed_files(self):"""Checks that the hashes and sizes of the files in ``RECORD`` arematched by the files themselves. Returns a (possibly empty) list ofmismatches. Each entry in the mismatch list will be a tuple consistingof the path, 'exists', 'size' or 'hash' according to what didn't match(existence is checked first, then size, then hash), the expectedvalue and the actual value."""mismatches = []base = os.path.dirname(self.path)record_path = self.get_distinfo_file('RECORD')for path, hash_value, size in self.list_installed_files():if not os.path.isabs(path):path = os.path.join(base, path)if path == record_path:continueif not os.path.exists(path):mismatches.append((path, 'exists', True, False))elif os.path.isfile(path):actual_size = str(os.path.getsize(path))if size and actual_size != size:mismatches.append((path, 'size', size, actual_size))elif hash_value:if '=' in hash_value:hasher = hash_value.split('=', 1)[0]else:hasher = Nonewith open(path, 'rb') as f:actual_hash = self.get_hash(f.read(), hasher)if actual_hash != hash_value:mismatches.append((path, 'hash', hash_value, actual_hash))return mismatches@cached_propertydef shared_locations(self):"""A dictionary of shared locations whose keys are in the set 'prefix','purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'.The corresponding value is the absolute path of that category forthis distribution, and takes into account any paths selected by theuser at installation time (e.g. via command-line arguments). In thecase of the 'namespace' key, this would be a list of absolute pathsfor the roots of namespace packages in this distribution.The first time this property is accessed, the relevant information isread from the SHARED file in the .dist-info directory."""result = {}shared_path = os.path.join(self.path, 'SHARED')if os.path.isfile(shared_path):with codecs.open(shared_path, 'r', encoding='utf-8') as f:lines = f.read().splitlines()for line in lines:key, value = line.split('=', 1)if key == 'namespace':result.setdefault(key, []).append(value)else:result[key] = valuereturn resultdef write_shared_locations(self, paths, dry_run=False):"""Write shared location information to the SHARED file in .dist-info.:param paths: A dictionary as described in the documentation for:meth:`shared_locations`.:param dry_run: If True, the action is logged but no file is actuallywritten.:return: The path of the file written to."""shared_path = os.path.join(self.path, 'SHARED')logger.info('creating %s', shared_path)if dry_run:return Nonelines = []for key in ('prefix', 'lib', 'headers', 'scripts', 'data'):path = paths[key]if os.path.isdir(paths[key]):lines.append('%s=%s' % (key, path))for ns in paths.get('namespace', ()):lines.append('namespace=%s' % ns)with codecs.open(shared_path, 'w', encoding='utf-8') as f:f.write('\n'.join(lines))return shared_pathdef get_distinfo_resource(self, path):if path not in DIST_FILES:raise DistlibException('invalid path for a dist-info file: ''%r at %r' % (path, self.path))finder = resources.finder_for_path(self.path)if finder is None:raise DistlibException('Unable to get a finder for %s' % self.path)return finder.find(path)def get_distinfo_file(self, path):"""Returns a path located under the ``.dist-info`` directory. Returns astring representing the path.:parameter path: a ``'/'``-separated path relative to the``.dist-info`` directory or an absolute path;If *path* is an absolute path and doesn't startwith the ``.dist-info`` directory path,a :class:`DistlibException` is raised:type path: str:rtype: str"""# Check if it is an absolute path # XXX use relpath, add testsif path.find(os.sep) >= 0:# it's an absolute path?distinfo_dirname, path = path.split(os.sep)[-2:]if distinfo_dirname != self.path.split(os.sep)[-1]:raise DistlibException('dist-info file %r does not belong to the %r %s ''distribution' % (path, self.name, self.version))# The file must be relativeif path not in DIST_FILES:raise DistlibException('invalid path for a dist-info file: ''%r at %r' % (path, self.path))return os.path.join(self.path, path)def list_distinfo_files(self):"""Iterates over the ``RECORD`` entries and returns paths for each line ifthe path is pointing to a file located in the ``.dist-info`` directoryor one of its subdirectories.:returns: iterator of paths"""base = os.path.dirname(self.path)for path, checksum, size in self._get_records():# XXX add separator or use real relpath algoif not os.path.isabs(path):path = os.path.join(base, path)if path.startswith(self.path):yield pathdef __eq__(self, other):return (isinstance(other, InstalledDistribution) andself.path == other.path)# See http://docs.python.org/reference/datamodel#object.__hash____hash__ = object.__hash__class EggInfoDistribution(BaseInstalledDistribution):"""Created with the *path* of the ``.egg-info`` directory or file providedto the constructor. It reads the metadata contained in the file itself, orif the given path happens to be a directory, the metadata is read from thefile ``PKG-INFO`` under that directory."""requested = True # as we have no way of knowing, assume it wasshared_locations = {}def __init__(self, path, env=None):def set_name_and_version(s, n, v):s.name = ns.key = n.lower() # for case-insensitive comparisonss.version = vself.path = pathself.dist_path = envif env and env._cache_enabled and path in env._cache_egg.path:metadata = env._cache_egg.path[path].metadataset_name_and_version(self, metadata.name, metadata.version)else:metadata = self._get_metadata(path)# Need to be set before cachingset_name_and_version(self, metadata.name, metadata.version)if env and env._cache_enabled:env._cache_egg.add(self)super(EggInfoDistribution, self).__init__(metadata, path, env)def _get_metadata(self, path):requires = Nonedef parse_requires_data(data):"""Create a list of dependencies from a requires.txt file.*data*: the contents of a setuptools-produced requires.txt file."""reqs = []lines = data.splitlines()for line in lines:line = line.strip()if line.startswith('['):logger.warning('Unexpected line: quitting requirement scan: %r',line)breakr = parse_requirement(line)if not r:logger.warning('Not recognised as a requirement: %r', line)continueif r.extras:logger.warning('extra requirements in requires.txt are ''not supported')if not r.constraints:reqs.append(r.name)else:cons = ', '.join('%s%s' % c for c in r.constraints)reqs.append('%s (%s)' % (r.name, cons))return reqsdef parse_requires_path(req_path):"""Create a list of dependencies from a requires.txt file.*req_path*: the path to a setuptools-produced requires.txt file."""reqs = []try:with codecs.open(req_path, 'r', 'utf-8') as fp:reqs = parse_requires_data(fp.read())except IOError:passreturn reqsif path.endswith('.egg'):if os.path.isdir(path):meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO')metadata = Metadata(path=meta_path, scheme='legacy')req_path = os.path.join(path, 'EGG-INFO', 'requires.txt')requires = parse_requires_path(req_path)else:# FIXME handle the case where zipfile is not availablezipf = zipimport.zipimporter(path)fileobj = StringIO(zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))metadata = Metadata(fileobj=fileobj, scheme='legacy')try:data = zipf.get_data('EGG-INFO/requires.txt')requires = parse_requires_data(data.decode('utf-8'))except IOError:requires = Noneelif path.endswith('.egg-info'):if os.path.isdir(path):req_path = os.path.join(path, 'requires.txt')requires = parse_requires_path(req_path)path = os.path.join(path, 'PKG-INFO')metadata = Metadata(path=path, scheme='legacy')else:raise DistlibException('path must end with .egg-info or .egg, ''got %r' % path)if requires:metadata.add_requirements(requires)return metadatadef __repr__(self):return '<EggInfoDistribution %r %s at %r>' % (self.name, self.version, self.path)def __str__(self):return "%s %s" % (self.name, self.version)def check_installed_files(self):"""Checks that the hashes and sizes of the files in ``RECORD`` arematched by the files themselves. Returns a (possibly empty) list ofmismatches. Each entry in the mismatch list will be a tuple consistingof the path, 'exists', 'size' or 'hash' according to what didn't match(existence is checked first, then size, then hash), the expectedvalue and the actual value."""mismatches = []record_path = os.path.join(self.path, 'installed-files.txt')if os.path.exists(record_path):for path, _, _ in self.list_installed_files():if path == record_path:continueif not os.path.exists(path):mismatches.append((path, 'exists', True, False))return mismatchesdef list_installed_files(self):"""Iterates over the ``installed-files.txt`` entries and returns a tuple``(path, hash, size)`` for each line.:returns: a list of (path, hash, size)"""def _md5(path):f = open(path, 'rb')try:content = f.read()finally:f.close()return hashlib.md5(content).hexdigest()def _size(path):return os.stat(path).st_sizerecord_path = os.path.join(self.path, 'installed-files.txt')result = []if os.path.exists(record_path):with codecs.open(record_path, 'r', encoding='utf-8') as f:for line in f:line = line.strip()p = os.path.normpath(os.path.join(self.path, line))# "./" is present as a marker between installed files# and installation metadata filesif not os.path.exists(p):logger.warning('Non-existent file: %s', p)if p.endswith(('.pyc', '.pyo')):continue#otherwise fall through and failif not os.path.isdir(p):result.append((p, _md5(p), _size(p)))result.append((record_path, None, None))return resultdef list_distinfo_files(self, absolute=False):"""Iterates over the ``installed-files.txt`` entries and returns paths foreach line if the path is pointing to a file located in the``.egg-info`` directory or one of its subdirectories.:parameter absolute: If *absolute* is ``True``, each returned path istransformed into a local absolute path. Otherwise theraw value from ``installed-files.txt`` is returned.:type absolute: boolean:returns: iterator of paths"""record_path = os.path.join(self.path, 'installed-files.txt')skip = Truewith codecs.open(record_path, 'r', encoding='utf-8') as f:for line in f:line = line.strip()if line == './':skip = Falsecontinueif not skip:p = os.path.normpath(os.path.join(self.path, line))if p.startswith(self.path):if absolute:yield pelse:yield linedef __eq__(self, other):return (isinstance(other, EggInfoDistribution) andself.path == other.path)# See http://docs.python.org/reference/datamodel#object.__hash____hash__ = object.__hash__new_dist_class = InstalledDistributionold_dist_class = EggInfoDistributionclass DependencyGraph(object):"""Represents a dependency graph between distributions.The dependency relationships are stored in an ``adjacency_list`` that mapsdistributions to a list of ``(other, label)`` tuples where ``other``is a distribution and the edge is labeled with ``label`` (i.e. the versionspecifier, if such was provided). Also, for more efficient traversal, forevery distribution ``x``, a list of predecessors is kept in``reverse_list[x]``. An edge from distribution ``a`` todistribution ``b`` means that ``a`` depends on ``b``. If any missingdependencies are found, they are stored in ``missing``, which is adictionary that maps distributions to a list of requirements that were notprovided by any other distributions."""def __init__(self):self.adjacency_list = {}self.reverse_list = {}self.missing = {}def add_distribution(self, distribution):"""Add the *distribution* to the graph.:type distribution: :class:`distutils2.database.InstalledDistribution`or :class:`distutils2.database.EggInfoDistribution`"""self.adjacency_list[distribution] = []self.reverse_list[distribution] = []#self.missing[distribution] = []def add_edge(self, x, y, label=None):"""Add an edge from distribution *x* to distribution *y* with the given*label*.:type x: :class:`distutils2.database.InstalledDistribution` or:class:`distutils2.database.EggInfoDistribution`:type y: :class:`distutils2.database.InstalledDistribution` or:class:`distutils2.database.EggInfoDistribution`:type label: ``str`` or ``None``"""self.adjacency_list[x].append((y, label))# multiple edges are allowed, so be carefulif x not in self.reverse_list[y]:self.reverse_list[y].append(x)def add_missing(self, distribution, requirement):"""Add a missing *requirement* for the given *distribution*.:type distribution: :class:`distutils2.database.InstalledDistribution`or :class:`distutils2.database.EggInfoDistribution`:type requirement: ``str``"""logger.debug('%s missing %r', distribution, requirement)self.missing.setdefault(distribution, []).append(requirement)def _repr_dist(self, dist):return '%s %s' % (dist.name, dist.version)def repr_node(self, dist, level=1):"""Prints only a subgraph"""output = [self._repr_dist(dist)]for other, label in self.adjacency_list[dist]:dist = self._repr_dist(other)if label is not None:dist = '%s [%s]' % (dist, label)output.append(' ' * level + str(dist))suboutput = self.repr_node(other, level + 1)subs = suboutput.split('\n')output.extend(subs[1:])return '\n'.join(output)def to_dot(self, f, skip_disconnected=True):"""Writes a DOT output for the graph to the provided file *f*.If *skip_disconnected* is set to ``True``, then all distributionsthat are not dependent on any other distribution are skipped.:type f: has to support ``file``-like operations:type skip_disconnected: ``bool``"""disconnected = []f.write("digraph dependencies {\n")for dist, adjs in self.adjacency_list.items():if len(adjs) == 0 and not skip_disconnected:disconnected.append(dist)for other, label in adjs:if not label is None:f.write('"%s" -> "%s" [label="%s"]\n' %(dist.name, other.name, label))else:f.write('"%s" -> "%s"\n' % (dist.name, other.name))if not skip_disconnected and len(disconnected) > 0:f.write('subgraph disconnected {\n')f.write('label = "Disconnected"\n')f.write('bgcolor = red\n')for dist in disconnected:f.write('"%s"' % dist.name)f.write('\n')f.write('}\n')f.write('}\n')def topological_sort(self):"""Perform a topological sort of the graph.:return: A tuple, the first element of which is a topologically sortedlist of distributions, and the second element of which is alist of distributions that cannot be sorted because they havecircular dependencies and so form a cycle."""result = []# Make a shallow copy of the adjacency listalist = {}for k, v in self.adjacency_list.items():alist[k] = v[:]while True:# See what we can remove in this runto_remove = []for k, v in list(alist.items())[:]:if not v:to_remove.append(k)del alist[k]if not to_remove:# What's left in alist (if anything) is a cycle.break# Remove from the adjacency list of othersfor k, v in alist.items():alist[k] = [(d, r) for d, r in v if d not in to_remove]logger.debug('Moving to result: %s',['%s (%s)' % (d.name, d.version) for d in to_remove])result.extend(to_remove)return result, list(alist.keys())def __repr__(self):"""Representation of the graph"""output = []for dist, adjs in self.adjacency_list.items():output.append(self.repr_node(dist))return '\n'.join(output)def make_graph(dists, scheme='default'):"""Makes a dependency graph from the given distributions.:parameter dists: a list of distributions:type dists: list of :class:`distutils2.database.InstalledDistribution` and:class:`distutils2.database.EggInfoDistribution` instances:rtype: a :class:`DependencyGraph` instance"""scheme = get_scheme(scheme)graph = DependencyGraph()provided = {} # maps names to lists of (version, dist) tuples# first, build the graph and find out what's providedfor dist in dists:graph.add_distribution(dist)for p in dist.provides:name, version = parse_name_and_version(p)logger.debug('Add to provided: %s, %s, %s', name, version, dist)provided.setdefault(name, []).append((version, dist))# now make the edgesfor dist in dists:requires = (dist.run_requires | dist.meta_requires |dist.build_requires | dist.dev_requires)for req in requires:try:matcher = scheme.matcher(req)except UnsupportedVersionError:# XXX compat-mode if cannot read the versionlogger.warning('could not read version %r - using name only',req)name = req.split()[0]matcher = scheme.matcher(name)name = matcher.key # case-insensitivematched = Falseif name in provided:for version, provider in provided[name]:try:match = matcher.match(version)except UnsupportedVersionError:match = Falseif match:graph.add_edge(dist, provider, req)matched = Truebreakif not matched:graph.add_missing(dist, req)return graphdef get_dependent_dists(dists, dist):"""Recursively generate a list of distributions from *dists* that aredependent on *dist*.:param dists: a list of distributions:param dist: a distribution, member of *dists* for which we are interested"""if dist not in dists:raise DistlibException('given distribution %r is not a member ''of the list' % dist.name)graph = make_graph(dists)dep = [dist] # dependent distributionstodo = graph.reverse_list[dist] # list of nodes we should inspectwhile todo:d = todo.pop()dep.append(d)for succ in graph.reverse_list[d]:if succ not in dep:todo.append(succ)dep.pop(0) # remove dist from dep, was there to prevent infinite loopsreturn depdef get_required_dists(dists, dist):"""Recursively generate a list of distributions from *dists* that arerequired by *dist*.:param dists: a list of distributions:param dist: a distribution, member of *dists* for which we are interested"""if dist not in dists:raise DistlibException('given distribution %r is not a member ''of the list' % dist.name)graph = make_graph(dists)req = [] # required distributionstodo = graph.adjacency_list[dist] # list of nodes we should inspectwhile todo:d = todo.pop()[0]req.append(d)for pred in graph.adjacency_list[d]:if pred not in req:todo.append(pred)return reqdef make_dist(name, version, **kwargs):"""A convenience method for making a dist given just a name and version."""summary = kwargs.pop('summary', 'Placeholder for summary')md = Metadata(**kwargs)md.name = namemd.version = versionmd.summary = summary or 'Placeholder for summary'return Distribution(md)
# -*- coding: utf-8 -*-## Copyright (C) 2013-2016 Vinay Sajip.# Licensed to the Python Software Foundation under a contributor agreement.# See LICENSE.txt and CONTRIBUTORS.txt.#from __future__ import absolute_importimport osimport reimport systry:import sslexcept ImportError:ssl = Noneif sys.version_info[0] < 3: # pragma: no coverfrom StringIO import StringIOstring_types = basestring,text_type = unicodefrom types import FileType as file_typeimport __builtin__ as builtinsimport ConfigParser as configparserfrom ._backport import shutilfrom urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplitfrom urllib import (urlretrieve, quote as _quote, unquote, url2pathname,pathname2url, ContentTooShortError, splittype)def quote(s):if isinstance(s, unicode):s = s.encode('utf-8')return _quote(s)import urllib2from urllib2 import (Request, urlopen, URLError, HTTPError,HTTPBasicAuthHandler, HTTPPasswordMgr,HTTPHandler, HTTPRedirectHandler,build_opener)if ssl:from urllib2 import HTTPSHandlerimport httplibimport xmlrpclibimport Queue as queuefrom HTMLParser import HTMLParserimport htmlentitydefsraw_input = raw_inputfrom itertools import ifilter as filterfrom itertools import ifilterfalse as filterfalse_userprog = Nonedef splituser(host):"""splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""global _userprogif _userprog is None:import re_userprog = re.compile('^(.*)@(.*)$')match = _userprog.match(host)if match: return match.group(1, 2)return None, hostelse: # pragma: no coverfrom io import StringIOstring_types = str,text_type = strfrom io import TextIOWrapper as file_typeimport builtinsimport configparserimport shutilfrom urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote,unquote, urlsplit, urlunsplit, splittype)from urllib.request import (urlopen, urlretrieve, Request, url2pathname,pathname2url,HTTPBasicAuthHandler, HTTPPasswordMgr,HTTPHandler, HTTPRedirectHandler,build_opener)if ssl:from urllib.request import HTTPSHandlerfrom urllib.error import HTTPError, URLError, ContentTooShortErrorimport http.client as httplibimport urllib.request as urllib2import xmlrpc.client as xmlrpclibimport queuefrom html.parser import HTMLParserimport html.entities as htmlentitydefsraw_input = inputfrom itertools import filterfalsefilter = filtertry:from ssl import match_hostname, CertificateErrorexcept ImportError: # pragma: no coverclass CertificateError(ValueError):passdef _dnsname_match(dn, hostname, max_wildcards=1):"""Matching according to RFC 6125, section 6.4.3http://tools.ietf.org/html/rfc6125#section-6.4.3"""pats = []if not dn:return Falseparts = dn.split('.')leftmost, remainder = parts[0], parts[1:]wildcards = leftmost.count('*')if wildcards > max_wildcards:# Issue #17980: avoid denials of service by refusing more# than one wildcard per fragment. A survey of established# policy among SSL implementations showed it to be a# reasonable choice.raise CertificateError("too many wildcards in certificate DNS name: " + repr(dn))# speed up common case w/o wildcardsif not wildcards:return dn.lower() == hostname.lower()# RFC 6125, section 6.4.3, subitem 1.# The client SHOULD NOT attempt to match a presented identifier in which# the wildcard character comprises a label other than the left-most label.if leftmost == '*':# When '*' is a fragment by itself, it matches a non-empty dotless# fragment.pats.append('[^.]+')elif leftmost.startswith('xn--') or hostname.startswith('xn--'):# RFC 6125, section 6.4.3, subitem 3.# The client SHOULD NOT attempt to match a presented identifier# where the wildcard character is embedded within an A-label or# U-label of an internationalized domain name.pats.append(re.escape(leftmost))else:# Otherwise, '*' matches any dotless string, e.g. www*pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))# add the remaining fragments, ignore any wildcardsfor frag in remainder:pats.append(re.escape(frag))pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)return pat.match(hostname)def match_hostname(cert, hostname):"""Verify that *cert* (in decoded format as returned bySSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125rules are followed, but IP addresses are not accepted for *hostname*.CertificateError is raised on failure. On success, the functionreturns nothing."""if not cert:raise ValueError("empty or no certificate, match_hostname needs a ""SSL socket or SSL context with either ""CERT_OPTIONAL or CERT_REQUIRED")dnsnames = []san = cert.get('subjectAltName', ())for key, value in san:if key == 'DNS':if _dnsname_match(value, hostname):returndnsnames.append(value)if not dnsnames:# The subject is only checked when there is no dNSName entry# in subjectAltNamefor sub in cert.get('subject', ()):for key, value in sub:# XXX according to RFC 2818, the most specific Common Name# must be used.if key == 'commonName':if _dnsname_match(value, hostname):returndnsnames.append(value)if len(dnsnames) > 1:raise CertificateError("hostname %r ""doesn't match either of %s"% (hostname, ', '.join(map(repr, dnsnames))))elif len(dnsnames) == 1:raise CertificateError("hostname %r ""doesn't match %r"% (hostname, dnsnames[0]))else:raise CertificateError("no appropriate commonName or ""subjectAltName fields were found")try:from types import SimpleNamespace as Containerexcept ImportError: # pragma: no coverclass Container(object):"""A generic container for when multiple values need to be returned"""def __init__(self, **kwargs):self.__dict__.update(kwargs)try:from shutil import whichexcept ImportError: # pragma: no cover# Implementation from Python 3.3def which(cmd, mode=os.F_OK | os.X_OK, path=None):"""Given a command, mode, and a PATH string, return the path whichconforms to the given mode on the PATH, or None if there is no suchfile.`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the resultof os.environ.get("PATH"), or can be overridden with a custom searchpath."""# Check that a given file can be accessed with the correct mode.# Additionally check that `file` is not a directory, as on Windows# directories pass the os.access check.def _access_check(fn, mode):return (os.path.exists(fn) and os.access(fn, mode)and not os.path.isdir(fn))# If we're given a path with a directory part, look it up directly rather# than referring to PATH directories. This includes checking relative to the# current directory, e.g. ./scriptif os.path.dirname(cmd):if _access_check(cmd, mode):return cmdreturn Noneif path is None:path = os.environ.get("PATH", os.defpath)if not path:return Nonepath = path.split(os.pathsep)if sys.platform == "win32":# The current directory takes precedence on Windows.if not os.curdir in path:path.insert(0, os.curdir)# PATHEXT is necessary to check on Windows.pathext = os.environ.get("PATHEXT", "").split(os.pathsep)# See if the given file matches any of the expected path extensions.# This will allow us to short circuit when given "python.exe".# If it does match, only test that one, otherwise we have to try# others.if any(cmd.lower().endswith(ext.lower()) for ext in pathext):files = [cmd]else:files = [cmd + ext for ext in pathext]else:# On other platforms you don't have things like PATHEXT to tell you# what file suffixes are executable, so just pass on cmd as-is.files = [cmd]seen = set()for dir in path:normdir = os.path.normcase(dir)if not normdir in seen:seen.add(normdir)for thefile in files:name = os.path.join(dir, thefile)if _access_check(name, mode):return namereturn None# ZipFile is a context manager in 2.7, but not in 2.6from zipfile import ZipFile as BaseZipFileif hasattr(BaseZipFile, '__enter__'): # pragma: no coverZipFile = BaseZipFileelse:from zipfile import ZipExtFile as BaseZipExtFileclass ZipExtFile(BaseZipExtFile):def __init__(self, base):self.__dict__.update(base.__dict__)def __enter__(self):return selfdef __exit__(self, *exc_info):self.close()# return None, so if an exception occurred, it will propagateclass ZipFile(BaseZipFile):def __enter__(self):return selfdef __exit__(self, *exc_info):self.close()# return None, so if an exception occurred, it will propagatedef open(self, *args, **kwargs):base = BaseZipFile.open(self, *args, **kwargs)return ZipExtFile(base)try:from platform import python_implementationexcept ImportError: # pragma: no coverdef python_implementation():"""Return a string identifying the Python implementation."""if 'PyPy' in sys.version:return 'PyPy'if os.name == 'java':return 'Jython'if sys.version.startswith('IronPython'):return 'IronPython'return 'CPython'try:import sysconfigexcept ImportError: # pragma: no coverfrom ._backport import sysconfigtry:callable = callableexcept NameError: # pragma: no coverfrom collections import Callabledef callable(obj):return isinstance(obj, Callable)try:fsencode = os.fsencodefsdecode = os.fsdecodeexcept AttributeError: # pragma: no cover_fsencoding = sys.getfilesystemencoding()if _fsencoding == 'mbcs':_fserrors = 'strict'else:_fserrors = 'surrogateescape'def fsencode(filename):if isinstance(filename, bytes):return filenameelif isinstance(filename, text_type):return filename.encode(_fsencoding, _fserrors)else:raise TypeError("expect bytes or str, not %s" %type(filename).__name__)def fsdecode(filename):if isinstance(filename, text_type):return filenameelif isinstance(filename, bytes):return filename.decode(_fsencoding, _fserrors)else:raise TypeError("expect bytes or str, not %s" %type(filename).__name__)try:from tokenize import detect_encodingexcept ImportError: # pragma: no coverfrom codecs import BOM_UTF8, lookupimport recookie_re = re.compile("coding[:=]\s*([-\w.]+)")def _get_normal_name(orig_enc):"""Imitates get_normal_name in tokenizer.c."""# Only care about the first 12 characters.enc = orig_enc[:12].lower().replace("_", "-")if enc == "utf-8" or enc.startswith("utf-8-"):return "utf-8"if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):return "iso-8859-1"return orig_encdef detect_encoding(readline):"""The detect_encoding() function is used to detect the encoding that shouldbe used to decode a Python source file. It requires one argument, readline,in the same way as the tokenize() generator.It will call readline a maximum of twice, and return the encoding used(as a string) and a list of any lines (left as bytes) it has read in.It detects the encoding from the presence of a utf-8 bom or an encodingcookie as specified in pep-0263. If both a bom and a cookie are present,but disagree, a SyntaxError will be raised. If the encoding cookie is aninvalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,'utf-8-sig' is returned.If no encoding is specified, then the default of 'utf-8' will be returned."""try:filename = readline.__self__.nameexcept AttributeError:filename = Nonebom_found = Falseencoding = Nonedefault = 'utf-8'def read_or_stop():try:return readline()except StopIteration:return b''def find_cookie(line):try:# Decode as UTF-8. Either the line is an encoding declaration,# in which case it should be pure ASCII, or it must be UTF-8# per default encoding.line_string = line.decode('utf-8')except UnicodeDecodeError:msg = "invalid or missing encoding declaration"if filename is not None:msg = '{} for {!r}'.format(msg, filename)raise SyntaxError(msg)matches = cookie_re.findall(line_string)if not matches:return Noneencoding = _get_normal_name(matches[0])try:codec = lookup(encoding)except LookupError:# This behaviour mimics the Python interpreterif filename is None:msg = "unknown encoding: " + encodingelse:msg = "unknown encoding for {!r}: {}".format(filename,encoding)raise SyntaxError(msg)if bom_found:if codec.name != 'utf-8':# This behaviour mimics the Python interpreterif filename is None:msg = 'encoding problem: utf-8'else:msg = 'encoding problem for {!r}: utf-8'.format(filename)raise SyntaxError(msg)encoding += '-sig'return encodingfirst = read_or_stop()if first.startswith(BOM_UTF8):bom_found = Truefirst = first[3:]default = 'utf-8-sig'if not first:return default, []encoding = find_cookie(first)if encoding:return encoding, [first]second = read_or_stop()if not second:return default, [first]encoding = find_cookie(second)if encoding:return encoding, [first, second]return default, [first, second]# For converting & <-> & etc.try:from html import escapeexcept ImportError:from cgi import escapeif sys.version_info[:2] < (3, 4):unescape = HTMLParser().unescapeelse:from html import unescapetry:from collections import ChainMapexcept ImportError: # pragma: no coverfrom collections import MutableMappingtry:from reprlib import recursive_repr as _recursive_reprexcept ImportError:def _recursive_repr(fillvalue='...'):'''Decorator to make a repr function return fillvalue for a recursivecall'''def decorating_function(user_function):repr_running = set()def wrapper(self):key = id(self), get_ident()if key in repr_running:return fillvaluerepr_running.add(key)try:result = user_function(self)finally:repr_running.discard(key)return result# Can't use functools.wraps() here because of bootstrap issueswrapper.__module__ = getattr(user_function, '__module__')wrapper.__doc__ = getattr(user_function, '__doc__')wrapper.__name__ = getattr(user_function, '__name__')wrapper.__annotations__ = getattr(user_function, '__annotations__', {})return wrapperreturn decorating_functionclass ChainMap(MutableMapping):''' A ChainMap groups multiple dicts (or other mappings) togetherto create a single, updateable view.The underlying mappings are stored in a list. That list is public and canaccessed or updated using the *maps* attribute. There is no other state.Lookups search the underlying mappings successively until a key is found.In contrast, writes, updates, and deletions only operate on the firstmapping.'''def __init__(self, *maps):'''Initialize a ChainMap by setting *maps* to the given mappings.If no mappings are provided, a single empty dictionary is used.'''self.maps = list(maps) or [{}] # always at least one mapdef __missing__(self, key):raise KeyError(key)def __getitem__(self, key):for mapping in self.maps:try:return mapping[key] # can't use 'key in mapping' with defaultdictexcept KeyError:passreturn self.__missing__(key) # support subclasses that define __missing__def get(self, key, default=None):return self[key] if key in self else defaultdef __len__(self):return len(set().union(*self.maps)) # reuses stored hash values if possibledef __iter__(self):return iter(set().union(*self.maps))def __contains__(self, key):return any(key in m for m in self.maps)def __bool__(self):return any(self.maps)@_recursive_repr()def __repr__(self):return '{0.__class__.__name__}({1})'.format(self, ', '.join(map(repr, self.maps)))@classmethoddef fromkeys(cls, iterable, *args):'Create a ChainMap with a single dict created from the iterable.'return cls(dict.fromkeys(iterable, *args))def copy(self):'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'return self.__class__(self.maps[0].copy(), *self.maps[1:])__copy__ = copydef new_child(self): # like Django's Context.push()'New ChainMap with a new dict followed by all previous maps.'return self.__class__({}, *self.maps)@propertydef parents(self): # like Django's Context.pop()'New ChainMap from maps[1:].'return self.__class__(*self.maps[1:])def __setitem__(self, key, value):self.maps[0][key] = valuedef __delitem__(self, key):try:del self.maps[0][key]except KeyError:raise KeyError('Key not found in the first mapping: {!r}'.format(key))def popitem(self):'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'try:return self.maps[0].popitem()except KeyError:raise KeyError('No keys found in the first mapping.')def pop(self, key, *args):'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'try:return self.maps[0].pop(key, *args)except KeyError:raise KeyError('Key not found in the first mapping: {!r}'.format(key))def clear(self):'Clear maps[0], leaving maps[1:] intact.'self.maps[0].clear()try:from imp import cache_from_sourceexcept ImportError: # pragma: no coverdef cache_from_source(path, debug_override=None):assert path.endswith('.py')if debug_override is None:debug_override = __debug__if debug_override:suffix = 'c'else:suffix = 'o'return path + suffixtry:from collections import OrderedDictexcept ImportError: # pragma: no cover## {{{ http://code.activestate.com/recipes/576693/ (r9)# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.# Passes Python2.7's test suite and incorporates all the latest updates.try:from thread import get_ident as _get_identexcept ImportError:from dummy_thread import get_ident as _get_identtry:from _abcoll import KeysView, ValuesView, ItemsViewexcept ImportError:passclass OrderedDict(dict):'Dictionary that remembers insertion order'# An inherited dict maps keys to values.# The inherited dict provides __getitem__, __len__, __contains__, and get.# The remaining methods are order-aware.# Big-O running times for all methods are the same as for regular dictionaries.# The internal self.__map dictionary maps keys to links in a doubly linked list.# The circular doubly linked list starts and ends with a sentinel element.# The sentinel element never gets deleted (this simplifies the algorithm).# Each link is stored as a list of length three: [PREV, NEXT, KEY].def __init__(self, *args, **kwds):'''Initialize an ordered dictionary. Signature is the same as forregular dictionaries, but keyword arguments are not recommendedbecause their insertion order is arbitrary.'''if len(args) > 1:raise TypeError('expected at most 1 arguments, got %d' % len(args))try:self.__rootexcept AttributeError:self.__root = root = [] # sentinel noderoot[:] = [root, root, None]self.__map = {}self.__update(*args, **kwds)def __setitem__(self, key, value, dict_setitem=dict.__setitem__):'od.__setitem__(i, y) <==> od[i]=y'# Setting a new item creates a new link which goes at the end of the linked# list, and the inherited dictionary is updated with the new key/value pair.if key not in self:root = self.__rootlast = root[0]last[1] = root[0] = self.__map[key] = [last, root, key]dict_setitem(self, key, value)def __delitem__(self, key, dict_delitem=dict.__delitem__):'od.__delitem__(y) <==> del od[y]'# Deleting an existing item uses self.__map to find the link which is# then removed by updating the links in the predecessor and successor nodes.dict_delitem(self, key)link_prev, link_next, key = self.__map.pop(key)link_prev[1] = link_nextlink_next[0] = link_prevdef __iter__(self):'od.__iter__() <==> iter(od)'root = self.__rootcurr = root[1]while curr is not root:yield curr[2]curr = curr[1]def __reversed__(self):'od.__reversed__() <==> reversed(od)'root = self.__rootcurr = root[0]while curr is not root:yield curr[2]curr = curr[0]def clear(self):'od.clear() -> None. Remove all items from od.'try:for node in self.__map.itervalues():del node[:]root = self.__rootroot[:] = [root, root, None]self.__map.clear()except AttributeError:passdict.clear(self)def popitem(self, last=True):'''od.popitem() -> (k, v), return and remove a (key, value) pair.Pairs are returned in LIFO order if last is true or FIFO order if false.'''if not self:raise KeyError('dictionary is empty')root = self.__rootif last:link = root[0]link_prev = link[0]link_prev[1] = rootroot[0] = link_prevelse:link = root[1]link_next = link[1]root[1] = link_nextlink_next[0] = rootkey = link[2]del self.__map[key]value = dict.pop(self, key)return key, value# -- the following methods do not depend on the internal structure --def keys(self):'od.keys() -> list of keys in od'return list(self)def values(self):'od.values() -> list of values in od'return [self[key] for key in self]def items(self):'od.items() -> list of (key, value) pairs in od'return [(key, self[key]) for key in self]def iterkeys(self):'od.iterkeys() -> an iterator over the keys in od'return iter(self)def itervalues(self):'od.itervalues -> an iterator over the values in od'for k in self:yield self[k]def iteritems(self):'od.iteritems -> an iterator over the (key, value) items in od'for k in self:yield (k, self[k])def update(*args, **kwds):'''od.update(E, **F) -> None. Update od from dict/iterable E and F.If E is a dict instance, does: for k in E: od[k] = E[k]If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]Or if E is an iterable of items, does: for k, v in E: od[k] = vIn either case, this is followed by: for k, v in F.items(): od[k] = v'''if len(args) > 2:raise TypeError('update() takes at most 2 positional ''arguments (%d given)' % (len(args),))elif not args:raise TypeError('update() takes at least 1 argument (0 given)')self = args[0]# Make progressively weaker assumptions about "other"other = ()if len(args) == 2:other = args[1]if isinstance(other, dict):for key in other:self[key] = other[key]elif hasattr(other, 'keys'):for key in other.keys():self[key] = other[key]else:for key, value in other:self[key] = valuefor key, value in kwds.items():self[key] = value__update = update # let subclasses override update without breaking __init____marker = object()def pop(self, key, default=__marker):'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.If key is not found, d is returned if given, otherwise KeyError is raised.'''if key in self:result = self[key]del self[key]return resultif default is self.__marker:raise KeyError(key)return defaultdef setdefault(self, key, default=None):'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'if key in self:return self[key]self[key] = defaultreturn defaultdef __repr__(self, _repr_running=None):'od.__repr__() <==> repr(od)'if not _repr_running: _repr_running = {}call_key = id(self), _get_ident()if call_key in _repr_running:return '...'_repr_running[call_key] = 1try:if not self:return '%s()' % (self.__class__.__name__,)return '%s(%r)' % (self.__class__.__name__, self.items())finally:del _repr_running[call_key]def __reduce__(self):'Return state information for pickling'items = [[k, self[k]] for k in self]inst_dict = vars(self).copy()for k in vars(OrderedDict()):inst_dict.pop(k, None)if inst_dict:return (self.__class__, (items,), inst_dict)return self.__class__, (items,)def copy(self):'od.copy() -> a shallow copy of od'return self.__class__(self)@classmethoddef fromkeys(cls, iterable, value=None):'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from Sand values equal to v (which defaults to None).'''d = cls()for key in iterable:d[key] = valuereturn ddef __eq__(self, other):'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitivewhile comparison to a regular mapping is order-insensitive.'''if isinstance(other, OrderedDict):return len(self)==len(other) and self.items() == other.items()return dict.__eq__(self, other)def __ne__(self, other):return not self == other# -- the following methods are only used in Python 2.7 --def viewkeys(self):"od.viewkeys() -> a set-like object providing a view on od's keys"return KeysView(self)def viewvalues(self):"od.viewvalues() -> an object providing a view on od's values"return ValuesView(self)def viewitems(self):"od.viewitems() -> a set-like object providing a view on od's items"return ItemsView(self)try:from logging.config import BaseConfigurator, valid_identexcept ImportError: # pragma: no coverIDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)def valid_ident(s):m = IDENTIFIER.match(s)if not m:raise ValueError('Not a valid Python identifier: %r' % s)return True# The ConvertingXXX classes are wrappers around standard Python containers,# and they serve to convert any suitable values in the container. The# conversion converts base dicts, lists and tuples to their wrapped# equivalents, whereas strings which match a conversion format are converted# appropriately.## Each wrapper should have a configurator attribute holding the actual# configurator to use for conversion.class ConvertingDict(dict):"""A converting dictionary wrapper."""def __getitem__(self, key):value = dict.__getitem__(self, key)result = self.configurator.convert(value)#If the converted value is different, save for next timeif value is not result:self[key] = resultif type(result) in (ConvertingDict, ConvertingList,ConvertingTuple):result.parent = selfresult.key = keyreturn resultdef get(self, key, default=None):value = dict.get(self, key, default)result = self.configurator.convert(value)#If the converted value is different, save for next timeif value is not result:self[key] = resultif type(result) in (ConvertingDict, ConvertingList,ConvertingTuple):result.parent = selfresult.key = keyreturn resultdef pop(self, key, default=None):value = dict.pop(self, key, default)result = self.configurator.convert(value)if value is not result:if type(result) in (ConvertingDict, ConvertingList,ConvertingTuple):result.parent = selfresult.key = keyreturn resultclass ConvertingList(list):"""A converting list wrapper."""def __getitem__(self, key):value = list.__getitem__(self, key)result = self.configurator.convert(value)#If the converted value is different, save for next timeif value is not result:self[key] = resultif type(result) in (ConvertingDict, ConvertingList,ConvertingTuple):result.parent = selfresult.key = keyreturn resultdef pop(self, idx=-1):value = list.pop(self, idx)result = self.configurator.convert(value)if value is not result:if type(result) in (ConvertingDict, ConvertingList,ConvertingTuple):result.parent = selfreturn resultclass ConvertingTuple(tuple):"""A converting tuple wrapper."""def __getitem__(self, key):value = tuple.__getitem__(self, key)result = self.configurator.convert(value)if value is not result:if type(result) in (ConvertingDict, ConvertingList,ConvertingTuple):result.parent = selfresult.key = keyreturn resultclass BaseConfigurator(object):"""The configurator base class which defines some useful defaults."""CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')DIGIT_PATTERN = re.compile(r'^\d+$')value_converters = {'ext' : 'ext_convert','cfg' : 'cfg_convert',}# We might want to use a different one, e.g. importlibimporter = staticmethod(__import__)def __init__(self, config):self.config = ConvertingDict(config)self.config.configurator = selfdef resolve(self, s):"""Resolve strings to objects using standard import and attributesyntax."""name = s.split('.')used = name.pop(0)try:found = self.importer(used)for frag in name:used += '.' + fragtry:found = getattr(found, frag)except AttributeError:self.importer(used)found = getattr(found, frag)return foundexcept ImportError:e, tb = sys.exc_info()[1:]v = ValueError('Cannot resolve %r: %s' % (s, e))v.__cause__, v.__traceback__ = e, tbraise vdef ext_convert(self, value):"""Default converter for the ext:// protocol."""return self.resolve(value)def cfg_convert(self, value):"""Default converter for the cfg:// protocol."""rest = valuem = self.WORD_PATTERN.match(rest)if m is None:raise ValueError("Unable to convert %r" % value)else:rest = rest[m.end():]d = self.config[m.groups()[0]]#print d, restwhile rest:m = self.DOT_PATTERN.match(rest)if m:d = d[m.groups()[0]]else:m = self.INDEX_PATTERN.match(rest)if m:idx = m.groups()[0]if not self.DIGIT_PATTERN.match(idx):d = d[idx]else:try:n = int(idx) # try as number first (most likely)d = d[n]except TypeError:d = d[idx]if m:rest = rest[m.end():]else:raise ValueError('Unable to convert ''%r at %r' % (value, rest))#rest should be emptyreturn ddef convert(self, value):"""Convert values to an appropriate type. dicts, lists and tuples arereplaced by their converting alternatives. Strings are checked tosee if they have a conversion format and are converted if they do."""if not isinstance(value, ConvertingDict) and isinstance(value, dict):value = ConvertingDict(value)value.configurator = selfelif not isinstance(value, ConvertingList) and isinstance(value, list):value = ConvertingList(value)value.configurator = selfelif not isinstance(value, ConvertingTuple) and\isinstance(value, tuple):value = ConvertingTuple(value)value.configurator = selfelif isinstance(value, string_types):m = self.CONVERT_PATTERN.match(value)if m:d = m.groupdict()prefix = d['prefix']converter = self.value_converters.get(prefix, None)if converter:suffix = d['suffix']converter = getattr(self, converter)value = converter(suffix)return valuedef configure_custom(self, config):"""Configure an object with a user-supplied factory."""c = config.pop('()')if not callable(c):c = self.resolve(c)props = config.pop('.', None)# Check for valid identifierskwargs = dict([(k, config[k]) for k in config if valid_ident(k)])result = c(**kwargs)if props:for name, value in props.items():setattr(result, name, value)return resultdef as_tuple(self, value):"""Utility function which converts lists to tuples."""if isinstance(value, list):value = tuple(value)return value
#-------------------------------------------------------------------# tarfile.py#-------------------------------------------------------------------# Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de># All rights reserved.## Permission is hereby granted, free of charge, to any person# obtaining a copy of this software and associated documentation# files (the "Software"), to deal in the Software without# restriction, including without limitation the rights to use,# copy, modify, merge, publish, distribute, sublicense, and/or sell# copies of the Software, and to permit persons to whom the# Software is furnished to do so, subject to the following# conditions:## The above copyright notice and this permission notice shall be# included in all copies or substantial portions of the Software.## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR# OTHER DEALINGS IN THE SOFTWARE.#from __future__ import print_function"""Read from and write to tar format archives."""__version__ = "$Revision$"version = "0.9.0"__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)"__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $"__cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $"__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."#---------# Imports#---------import sysimport osimport statimport errnoimport timeimport structimport copyimport retry:import grp, pwdexcept ImportError:grp = pwd = None# os.symlink on Windows prior to 6.0 raises NotImplementedErrorsymlink_exception = (AttributeError, NotImplementedError)try:# WindowsError (1314) will be raised if the caller does not hold the# SeCreateSymbolicLinkPrivilege privilegesymlink_exception += (WindowsError,)except NameError:pass# from tarfile import *__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]if sys.version_info[0] < 3:import __builtin__ as builtinselse:import builtins_open = builtins.open # Since 'open' is TarFile.open#---------------------------------------------------------# tar constants#---------------------------------------------------------NUL = b"\0" # the null characterBLOCKSIZE = 512 # length of processing blocksRECORDSIZE = BLOCKSIZE * 20 # length of recordsGNU_MAGIC = b"ustar \0" # magic gnu tar stringPOSIX_MAGIC = b"ustar\x0000" # magic posix tar stringLENGTH_NAME = 100 # maximum length of a filenameLENGTH_LINK = 100 # maximum length of a linknameLENGTH_PREFIX = 155 # maximum length of the prefix fieldREGTYPE = b"0" # regular fileAREGTYPE = b"\0" # regular fileLNKTYPE = b"1" # link (inside tarfile)SYMTYPE = b"2" # symbolic linkCHRTYPE = b"3" # character special deviceBLKTYPE = b"4" # block special deviceDIRTYPE = b"5" # directoryFIFOTYPE = b"6" # fifo special deviceCONTTYPE = b"7" # contiguous fileGNUTYPE_LONGNAME = b"L" # GNU tar longnameGNUTYPE_LONGLINK = b"K" # GNU tar longlinkGNUTYPE_SPARSE = b"S" # GNU tar sparse fileXHDTYPE = b"x" # POSIX.1-2001 extended headerXGLTYPE = b"g" # POSIX.1-2001 global headerSOLARIS_XHDTYPE = b"X" # Solaris extended headerUSTAR_FORMAT = 0 # POSIX.1-1988 (ustar) formatGNU_FORMAT = 1 # GNU tar formatPAX_FORMAT = 2 # POSIX.1-2001 (pax) formatDEFAULT_FORMAT = GNU_FORMAT#---------------------------------------------------------# tarfile constants#---------------------------------------------------------# File types that tarfile supports:SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,SYMTYPE, DIRTYPE, FIFOTYPE,CONTTYPE, CHRTYPE, BLKTYPE,GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,GNUTYPE_SPARSE)# File types that will be treated as a regular file.REGULAR_TYPES = (REGTYPE, AREGTYPE,CONTTYPE, GNUTYPE_SPARSE)# File types that are part of the GNU tar format.GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,GNUTYPE_SPARSE)# Fields from a pax header that override a TarInfo attribute.PAX_FIELDS = ("path", "linkpath", "size", "mtime","uid", "gid", "uname", "gname")# Fields from a pax header that are affected by hdrcharset.PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname"))# Fields in a pax header that are numbers, all other fields# are treated as strings.PAX_NUMBER_FIELDS = {"atime": float,"ctime": float,"mtime": float,"uid": int,"gid": int,"size": int}#---------------------------------------------------------# Bits used in the mode field, values in octal.#---------------------------------------------------------S_IFLNK = 0o120000 # symbolic linkS_IFREG = 0o100000 # regular fileS_IFBLK = 0o060000 # block deviceS_IFDIR = 0o040000 # directoryS_IFCHR = 0o020000 # character deviceS_IFIFO = 0o010000 # fifoTSUID = 0o4000 # set UID on executionTSGID = 0o2000 # set GID on executionTSVTX = 0o1000 # reservedTUREAD = 0o400 # read by ownerTUWRITE = 0o200 # write by ownerTUEXEC = 0o100 # execute/search by ownerTGREAD = 0o040 # read by groupTGWRITE = 0o020 # write by groupTGEXEC = 0o010 # execute/search by groupTOREAD = 0o004 # read by otherTOWRITE = 0o002 # write by otherTOEXEC = 0o001 # execute/search by other#---------------------------------------------------------# initialization#---------------------------------------------------------if os.name in ("nt", "ce"):ENCODING = "utf-8"else:ENCODING = sys.getfilesystemencoding()#---------------------------------------------------------# Some useful functions#---------------------------------------------------------def stn(s, length, encoding, errors):"""Convert a string to a null-terminated bytes object."""s = s.encode(encoding, errors)return s[:length] + (length - len(s)) * NULdef nts(s, encoding, errors):"""Convert a null-terminated bytes object to a string."""p = s.find(b"\0")if p != -1:s = s[:p]return s.decode(encoding, errors)def nti(s):"""Convert a number field to a python number."""# There are two possible encodings for a number field, see# itn() below.if s[0] != chr(0o200):try:n = int(nts(s, "ascii", "strict") or "0", 8)except ValueError:raise InvalidHeaderError("invalid header")else:n = 0for i in range(len(s) - 1):n <<= 8n += ord(s[i + 1])return ndef itn(n, digits=8, format=DEFAULT_FORMAT):"""Convert a python number to a number field."""# POSIX 1003.1-1988 requires numbers to be encoded as a string of# octal digits followed by a null-byte, this allows values up to# (8**(digits-1))-1. GNU tar allows storing numbers greater than# that if necessary. A leading 0o200 byte indicates this particular# encoding, the following digits-1 bytes are a big-endian# representation. This allows values up to (256**(digits-1))-1.if 0 <= n < 8 ** (digits - 1):s = ("%0*o" % (digits - 1, n)).encode("ascii") + NULelse:if format != GNU_FORMAT or n >= 256 ** (digits - 1):raise ValueError("overflow in number field")if n < 0:# XXX We mimic GNU tar's behaviour with negative numbers,# this could raise OverflowError.n = struct.unpack("L", struct.pack("l", n))[0]s = bytearray()for i in range(digits - 1):s.insert(0, n & 0o377)n >>= 8s.insert(0, 0o200)return sdef calc_chksums(buf):"""Calculate the checksum for a member's header by summing up allcharacters except for the chksum field which is treated as ifit was filled with spaces. According to the GNU tar sources,some tars (Sun and NeXT) calculate chksum with signed char,which will be different if there are chars in the buffer withthe high bit set. So we calculate two checksums, unsigned andsigned."""unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))return unsigned_chksum, signed_chksumdef copyfileobj(src, dst, length=None):"""Copy length bytes from fileobj src to fileobj dst.If length is None, copy the entire content."""if length == 0:returnif length is None:while True:buf = src.read(16*1024)if not buf:breakdst.write(buf)returnBUFSIZE = 16 * 1024blocks, remainder = divmod(length, BUFSIZE)for b in range(blocks):buf = src.read(BUFSIZE)if len(buf) < BUFSIZE:raise IOError("end of file reached")dst.write(buf)if remainder != 0:buf = src.read(remainder)if len(buf) < remainder:raise IOError("end of file reached")dst.write(buf)returnfilemode_table = (((S_IFLNK, "l"),(S_IFREG, "-"),(S_IFBLK, "b"),(S_IFDIR, "d"),(S_IFCHR, "c"),(S_IFIFO, "p")),((TUREAD, "r"),),((TUWRITE, "w"),),((TUEXEC|TSUID, "s"),(TSUID, "S"),(TUEXEC, "x")),((TGREAD, "r"),),((TGWRITE, "w"),),((TGEXEC|TSGID, "s"),(TSGID, "S"),(TGEXEC, "x")),((TOREAD, "r"),),((TOWRITE, "w"),),((TOEXEC|TSVTX, "t"),(TSVTX, "T"),(TOEXEC, "x")))def filemode(mode):"""Convert a file's mode to a string of the form-rwxrwxrwx.Used by TarFile.list()"""perm = []for table in filemode_table:for bit, char in table:if mode & bit == bit:perm.append(char)breakelse:perm.append("-")return "".join(perm)class TarError(Exception):"""Base exception."""passclass ExtractError(TarError):"""General exception for extract errors."""passclass ReadError(TarError):"""Exception for unreadable tar archives."""passclass CompressionError(TarError):"""Exception for unavailable compression methods."""passclass StreamError(TarError):"""Exception for unsupported operations on stream-like TarFiles."""passclass HeaderError(TarError):"""Base exception for header errors."""passclass EmptyHeaderError(HeaderError):"""Exception for empty headers."""passclass TruncatedHeaderError(HeaderError):"""Exception for truncated headers."""passclass EOFHeaderError(HeaderError):"""Exception for end of file headers."""passclass InvalidHeaderError(HeaderError):"""Exception for invalid headers."""passclass SubsequentHeaderError(HeaderError):"""Exception for missing and invalid extended headers."""pass#---------------------------# internal stream interface#---------------------------class _LowLevelFile(object):"""Low-level file object. Supports reading and writing.It is used instead of a regular file object for streamingaccess."""def __init__(self, name, mode):mode = {"r": os.O_RDONLY,"w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,}[mode]if hasattr(os, "O_BINARY"):mode |= os.O_BINARYself.fd = os.open(name, mode, 0o666)def close(self):os.close(self.fd)def read(self, size):return os.read(self.fd, size)def write(self, s):os.write(self.fd, s)class _Stream(object):"""Class that serves as an adapter between TarFile anda stream-like object. The stream-like object onlyneeds to have a read() or write() method and is accessedblockwise. Use of gzip or bzip2 compression is possible.A stream-like object could be for example: sys.stdin,sys.stdout, a socket, a tape device etc._Stream is intended to be used only internally."""def __init__(self, name, mode, comptype, fileobj, bufsize):"""Construct a _Stream object."""self._extfileobj = Trueif fileobj is None:fileobj = _LowLevelFile(name, mode)self._extfileobj = Falseif comptype == '*':# Enable transparent compression detection for the# stream interfacefileobj = _StreamProxy(fileobj)comptype = fileobj.getcomptype()self.name = name or ""self.mode = modeself.comptype = comptypeself.fileobj = fileobjself.bufsize = bufsizeself.buf = b""self.pos = 0self.closed = Falsetry:if comptype == "gz":try:import zlibexcept ImportError:raise CompressionError("zlib module is not available")self.zlib = zlibself.crc = zlib.crc32(b"")if mode == "r":self._init_read_gz()else:self._init_write_gz()if comptype == "bz2":try:import bz2except ImportError:raise CompressionError("bz2 module is not available")if mode == "r":self.dbuf = b""self.cmp = bz2.BZ2Decompressor()else:self.cmp = bz2.BZ2Compressor()except:if not self._extfileobj:self.fileobj.close()self.closed = Trueraisedef __del__(self):if hasattr(self, "closed") and not self.closed:self.close()def _init_write_gz(self):"""Initialize for writing with gzip compression."""self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,-self.zlib.MAX_WBITS,self.zlib.DEF_MEM_LEVEL,0)timestamp = struct.pack("<L", int(time.time()))self.__write(b"\037\213\010\010" + timestamp + b"\002\377")if self.name.endswith(".gz"):self.name = self.name[:-3]# RFC1952 says we must use ISO-8859-1 for the FNAME field.self.__write(self.name.encode("iso-8859-1", "replace") + NUL)def write(self, s):"""Write string s to the stream."""if self.comptype == "gz":self.crc = self.zlib.crc32(s, self.crc)self.pos += len(s)if self.comptype != "tar":s = self.cmp.compress(s)self.__write(s)def __write(self, s):"""Write string s to the stream if a whole new blockis ready to be written."""self.buf += swhile len(self.buf) > self.bufsize:self.fileobj.write(self.buf[:self.bufsize])self.buf = self.buf[self.bufsize:]def close(self):"""Close the _Stream object. No operation should bedone on it afterwards."""if self.closed:returnif self.mode == "w" and self.comptype != "tar":self.buf += self.cmp.flush()if self.mode == "w" and self.buf:self.fileobj.write(self.buf)self.buf = b""if self.comptype == "gz":# The native zlib crc is an unsigned 32-bit integer, but# the Python wrapper implicitly casts that to a signed C# long. So, on a 32-bit box self.crc may "look negative",# while the same crc on a 64-bit box may "look positive".# To avoid irksome warnings from the `struct` module, force# it to look positive on all boxes.self.fileobj.write(struct.pack("<L", self.crc & 0xffffffff))self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF))if not self._extfileobj:self.fileobj.close()self.closed = Truedef _init_read_gz(self):"""Initialize for reading a gzip compressed fileobj."""self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)self.dbuf = b""# taken from gzip.GzipFile with some alterationsif self.__read(2) != b"\037\213":raise ReadError("not a gzip file")if self.__read(1) != b"\010":raise CompressionError("unsupported compression method")flag = ord(self.__read(1))self.__read(6)if flag & 4:xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))self.read(xlen)if flag & 8:while True:s = self.__read(1)if not s or s == NUL:breakif flag & 16:while True:s = self.__read(1)if not s or s == NUL:breakif flag & 2:self.__read(2)def tell(self):"""Return the stream's file pointer position."""return self.posdef seek(self, pos=0):"""Set the stream's file pointer to pos. Negative seekingis forbidden."""if pos - self.pos >= 0:blocks, remainder = divmod(pos - self.pos, self.bufsize)for i in range(blocks):self.read(self.bufsize)self.read(remainder)else:raise StreamError("seeking backwards is not allowed")return self.posdef read(self, size=None):"""Return the next size number of bytes from the stream.If size is not defined, return all bytes of the streamup to EOF."""if size is None:t = []while True:buf = self._read(self.bufsize)if not buf:breakt.append(buf)buf = "".join(t)else:buf = self._read(size)self.pos += len(buf)return bufdef _read(self, size):"""Return size bytes from the stream."""if self.comptype == "tar":return self.__read(size)c = len(self.dbuf)while c < size:buf = self.__read(self.bufsize)if not buf:breaktry:buf = self.cmp.decompress(buf)except IOError:raise ReadError("invalid compressed data")self.dbuf += bufc += len(buf)buf = self.dbuf[:size]self.dbuf = self.dbuf[size:]return bufdef __read(self, size):"""Return size bytes from stream. If internal buffer is empty,read another block from the stream."""c = len(self.buf)while c < size:buf = self.fileobj.read(self.bufsize)if not buf:breakself.buf += bufc += len(buf)buf = self.buf[:size]self.buf = self.buf[size:]return buf# class _Streamclass _StreamProxy(object):"""Small proxy class that enables transparent compressiondetection for the Stream interface (mode 'r|*')."""def __init__(self, fileobj):self.fileobj = fileobjself.buf = self.fileobj.read(BLOCKSIZE)def read(self, size):self.read = self.fileobj.readreturn self.bufdef getcomptype(self):if self.buf.startswith(b"\037\213\010"):return "gz"if self.buf.startswith(b"BZh91"):return "bz2"return "tar"def close(self):self.fileobj.close()# class StreamProxyclass _BZ2Proxy(object):"""Small proxy class that enables external file objectsupport for "r:bz2" and "w:bz2" modes. This is actuallya workaround for a limitation in bz2 module's BZ2Fileclass which (unlike gzip.GzipFile) has no support fora file object argument."""blocksize = 16 * 1024def __init__(self, fileobj, mode):self.fileobj = fileobjself.mode = modeself.name = getattr(self.fileobj, "name", None)self.init()def init(self):import bz2self.pos = 0if self.mode == "r":self.bz2obj = bz2.BZ2Decompressor()self.fileobj.seek(0)self.buf = b""else:self.bz2obj = bz2.BZ2Compressor()def read(self, size):x = len(self.buf)while x < size:raw = self.fileobj.read(self.blocksize)if not raw:breakdata = self.bz2obj.decompress(raw)self.buf += datax += len(data)buf = self.buf[:size]self.buf = self.buf[size:]self.pos += len(buf)return bufdef seek(self, pos):if pos < self.pos:self.init()self.read(pos - self.pos)def tell(self):return self.posdef write(self, data):self.pos += len(data)raw = self.bz2obj.compress(data)self.fileobj.write(raw)def close(self):if self.mode == "w":raw = self.bz2obj.flush()self.fileobj.write(raw)# class _BZ2Proxy#------------------------# Extraction file object#------------------------class _FileInFile(object):"""A thin wrapper around an existing file object thatprovides a part of its data as an individual fileobject."""def __init__(self, fileobj, offset, size, blockinfo=None):self.fileobj = fileobjself.offset = offsetself.size = sizeself.position = 0if blockinfo is None:blockinfo = [(0, size)]# Construct a map with data and zero blocks.self.map_index = 0self.map = []lastpos = 0realpos = self.offsetfor offset, size in blockinfo:if offset > lastpos:self.map.append((False, lastpos, offset, None))self.map.append((True, offset, offset + size, realpos))realpos += sizelastpos = offset + sizeif lastpos < self.size:self.map.append((False, lastpos, self.size, None))def seekable(self):if not hasattr(self.fileobj, "seekable"):# XXX gzip.GzipFile and bz2.BZ2Filereturn Truereturn self.fileobj.seekable()def tell(self):"""Return the current file position."""return self.positiondef seek(self, position):"""Seek to a position in the file."""self.position = positiondef read(self, size=None):"""Read data from the file."""if size is None:size = self.size - self.positionelse:size = min(size, self.size - self.position)buf = b""while size > 0:while True:data, start, stop, offset = self.map[self.map_index]if start <= self.position < stop:breakelse:self.map_index += 1if self.map_index == len(self.map):self.map_index = 0length = min(size, stop - self.position)if data:self.fileobj.seek(offset + (self.position - start))buf += self.fileobj.read(length)else:buf += NUL * lengthsize -= lengthself.position += lengthreturn buf#class _FileInFileclass ExFileObject(object):"""File-like object for reading an archive member.Is returned by TarFile.extractfile()."""blocksize = 1024def __init__(self, tarfile, tarinfo):self.fileobj = _FileInFile(tarfile.fileobj,tarinfo.offset_data,tarinfo.size,tarinfo.sparse)self.name = tarinfo.nameself.mode = "r"self.closed = Falseself.size = tarinfo.sizeself.position = 0self.buffer = b""def readable(self):return Truedef writable(self):return Falsedef seekable(self):return self.fileobj.seekable()def read(self, size=None):"""Read at most size bytes from the file. If size is notpresent or None, read all data until EOF is reached."""if self.closed:raise ValueError("I/O operation on closed file")buf = b""if self.buffer:if size is None:buf = self.bufferself.buffer = b""else:buf = self.buffer[:size]self.buffer = self.buffer[size:]if size is None:buf += self.fileobj.read()else:buf += self.fileobj.read(size - len(buf))self.position += len(buf)return buf# XXX TextIOWrapper uses the read1() method.read1 = readdef readline(self, size=-1):"""Read one entire line from the file. If size is presentand non-negative, return a string with at most thatsize, which may be an incomplete line."""if self.closed:raise ValueError("I/O operation on closed file")pos = self.buffer.find(b"\n") + 1if pos == 0:# no newline found.while True:buf = self.fileobj.read(self.blocksize)self.buffer += bufif not buf or b"\n" in buf:pos = self.buffer.find(b"\n") + 1if pos == 0:# no newline found.pos = len(self.buffer)breakif size != -1:pos = min(size, pos)buf = self.buffer[:pos]self.buffer = self.buffer[pos:]self.position += len(buf)return bufdef readlines(self):"""Return a list with all remaining lines."""result = []while True:line = self.readline()if not line: breakresult.append(line)return resultdef tell(self):"""Return the current file position."""if self.closed:raise ValueError("I/O operation on closed file")return self.positiondef seek(self, pos, whence=os.SEEK_SET):"""Seek to a position in the file."""if self.closed:raise ValueError("I/O operation on closed file")if whence == os.SEEK_SET:self.position = min(max(pos, 0), self.size)elif whence == os.SEEK_CUR:if pos < 0:self.position = max(self.position + pos, 0)else:self.position = min(self.position + pos, self.size)elif whence == os.SEEK_END:self.position = max(min(self.size + pos, self.size), 0)else:raise ValueError("Invalid argument")self.buffer = b""self.fileobj.seek(self.position)def close(self):"""Close the file object."""self.closed = Truedef __iter__(self):"""Get an iterator over the file's lines."""while True:line = self.readline()if not line:breakyield line#class ExFileObject#------------------# Exported Classes#------------------class TarInfo(object):"""Informational class which holds the details about anarchive member given by a tar header block.TarInfo objects are returned by TarFile.getmember(),TarFile.getmembers() and TarFile.gettarinfo() and areusually created internally."""__slots__ = ("name", "mode", "uid", "gid", "size", "mtime","chksum", "type", "linkname", "uname", "gname","devmajor", "devminor","offset", "offset_data", "pax_headers", "sparse","tarfile", "_sparse_structs", "_link_target")def __init__(self, name=""):"""Construct a TarInfo object. name is the optional nameof the member."""self.name = name # member nameself.mode = 0o644 # file permissionsself.uid = 0 # user idself.gid = 0 # group idself.size = 0 # file sizeself.mtime = 0 # modification timeself.chksum = 0 # header checksumself.type = REGTYPE # member typeself.linkname = "" # link nameself.uname = "" # user nameself.gname = "" # group nameself.devmajor = 0 # device major numberself.devminor = 0 # device minor numberself.offset = 0 # the tar header starts hereself.offset_data = 0 # the file's data starts hereself.sparse = None # sparse member informationself.pax_headers = {} # pax header information# In pax headers the "name" and "linkname" field are called# "path" and "linkpath".def _getpath(self):return self.namedef _setpath(self, name):self.name = namepath = property(_getpath, _setpath)def _getlinkpath(self):return self.linknamedef _setlinkpath(self, linkname):self.linkname = linknamelinkpath = property(_getlinkpath, _setlinkpath)def __repr__(self):return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))def get_info(self):"""Return the TarInfo's attributes as a dictionary."""info = {"name": self.name,"mode": self.mode & 0o7777,"uid": self.uid,"gid": self.gid,"size": self.size,"mtime": self.mtime,"chksum": self.chksum,"type": self.type,"linkname": self.linkname,"uname": self.uname,"gname": self.gname,"devmajor": self.devmajor,"devminor": self.devminor}if info["type"] == DIRTYPE and not info["name"].endswith("/"):info["name"] += "/"return infodef tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):"""Return a tar header as a string of 512 byte blocks."""info = self.get_info()if format == USTAR_FORMAT:return self.create_ustar_header(info, encoding, errors)elif format == GNU_FORMAT:return self.create_gnu_header(info, encoding, errors)elif format == PAX_FORMAT:return self.create_pax_header(info, encoding)else:raise ValueError("invalid format")def create_ustar_header(self, info, encoding, errors):"""Return the object as a ustar header block."""info["magic"] = POSIX_MAGICif len(info["linkname"]) > LENGTH_LINK:raise ValueError("linkname is too long")if len(info["name"]) > LENGTH_NAME:info["prefix"], info["name"] = self._posix_split_name(info["name"])return self._create_header(info, USTAR_FORMAT, encoding, errors)def create_gnu_header(self, info, encoding, errors):"""Return the object as a GNU header block sequence."""info["magic"] = GNU_MAGICbuf = b""if len(info["linkname"]) > LENGTH_LINK:buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)if len(info["name"]) > LENGTH_NAME:buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)return buf + self._create_header(info, GNU_FORMAT, encoding, errors)def create_pax_header(self, info, encoding):"""Return the object as a ustar header block. If it cannot berepresented this way, prepend a pax extended header sequencewith supplement information."""info["magic"] = POSIX_MAGICpax_headers = self.pax_headers.copy()# Test string fields for values that exceed the field length or cannot# be represented in ASCII encoding.for name, hname, length in (("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),("uname", "uname", 32), ("gname", "gname", 32)):if hname in pax_headers:# The pax header has priority.continue# Try to encode the string as ASCII.try:info[name].encode("ascii", "strict")except UnicodeEncodeError:pax_headers[hname] = info[name]continueif len(info[name]) > length:pax_headers[hname] = info[name]# Test number fields for values that exceed the field limit or values# that like to be stored as float.for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):if name in pax_headers:# The pax header has priority. Avoid overflow.info[name] = 0continueval = info[name]if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):pax_headers[name] = str(val)info[name] = 0# Create a pax extended header if necessary.if pax_headers:buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)else:buf = b""return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")@classmethoddef create_pax_global_header(cls, pax_headers):"""Return the object as a pax global header block sequence."""return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8")def _posix_split_name(self, name):"""Split a name longer than 100 chars into a prefixand a name part."""prefix = name[:LENGTH_PREFIX + 1]while prefix and prefix[-1] != "/":prefix = prefix[:-1]name = name[len(prefix):]prefix = prefix[:-1]if not prefix or len(name) > LENGTH_NAME:raise ValueError("name is too long")return prefix, name@staticmethoddef _create_header(info, format, encoding, errors):"""Return a header block. info is a dictionary with fileinformation, format must be one of the *_FORMAT constants."""parts = [stn(info.get("name", ""), 100, encoding, errors),itn(info.get("mode", 0) & 0o7777, 8, format),itn(info.get("uid", 0), 8, format),itn(info.get("gid", 0), 8, format),itn(info.get("size", 0), 12, format),itn(info.get("mtime", 0), 12, format),b" ", # checksum fieldinfo.get("type", REGTYPE),stn(info.get("linkname", ""), 100, encoding, errors),info.get("magic", POSIX_MAGIC),stn(info.get("uname", ""), 32, encoding, errors),stn(info.get("gname", ""), 32, encoding, errors),itn(info.get("devmajor", 0), 8, format),itn(info.get("devminor", 0), 8, format),stn(info.get("prefix", ""), 155, encoding, errors)]buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))chksum = calc_chksums(buf[-BLOCKSIZE:])[0]buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:]return buf@staticmethoddef _create_payload(payload):"""Return the string payload filled with zero bytesup to the next 512 byte border."""blocks, remainder = divmod(len(payload), BLOCKSIZE)if remainder > 0:payload += (BLOCKSIZE - remainder) * NULreturn payload@classmethoddef _create_gnu_long_header(cls, name, type, encoding, errors):"""Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequencefor name."""name = name.encode(encoding, errors) + NULinfo = {}info["name"] = "././@LongLink"info["type"] = typeinfo["size"] = len(name)info["magic"] = GNU_MAGIC# create extended header + name blocks.return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \cls._create_payload(name)@classmethoddef _create_pax_generic_header(cls, pax_headers, type, encoding):"""Return a POSIX.1-2008 extended or global header sequencethat contains a list of keyword, value pairs. The valuesmust be strings."""# Check if one of the fields contains surrogate characters and thereby# forces hdrcharset=BINARY, see _proc_pax() for more information.binary = Falsefor keyword, value in pax_headers.items():try:value.encode("utf8", "strict")except UnicodeEncodeError:binary = Truebreakrecords = b""if binary:# Put the hdrcharset field at the beginning of the header.records += b"21 hdrcharset=BINARY\n"for keyword, value in pax_headers.items():keyword = keyword.encode("utf8")if binary:# Try to restore the original byte representation of `value'.# Needless to say, that the encoding must match the string.value = value.encode(encoding, "surrogateescape")else:value = value.encode("utf8")l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'n = p = 0while True:n = l + len(str(p))if n == p:breakp = nrecords += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"# We use a hardcoded "././@PaxHeader" name like star does# instead of the one that POSIX recommends.info = {}info["name"] = "././@PaxHeader"info["type"] = typeinfo["size"] = len(records)info["magic"] = POSIX_MAGIC# Create pax header + record blocks.return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \cls._create_payload(records)@classmethoddef frombuf(cls, buf, encoding, errors):"""Construct a TarInfo object from a 512 byte bytes object."""if len(buf) == 0:raise EmptyHeaderError("empty header")if len(buf) != BLOCKSIZE:raise TruncatedHeaderError("truncated header")if buf.count(NUL) == BLOCKSIZE:raise EOFHeaderError("end of file header")chksum = nti(buf[148:156])if chksum not in calc_chksums(buf):raise InvalidHeaderError("bad checksum")obj = cls()obj.name = nts(buf[0:100], encoding, errors)obj.mode = nti(buf[100:108])obj.uid = nti(buf[108:116])obj.gid = nti(buf[116:124])obj.size = nti(buf[124:136])obj.mtime = nti(buf[136:148])obj.chksum = chksumobj.type = buf[156:157]obj.linkname = nts(buf[157:257], encoding, errors)obj.uname = nts(buf[265:297], encoding, errors)obj.gname = nts(buf[297:329], encoding, errors)obj.devmajor = nti(buf[329:337])obj.devminor = nti(buf[337:345])prefix = nts(buf[345:500], encoding, errors)# Old V7 tar format represents a directory as a regular# file with a trailing slash.if obj.type == AREGTYPE and obj.name.endswith("/"):obj.type = DIRTYPE# The old GNU sparse format occupies some of the unused# space in the buffer for up to 4 sparse structures.# Save the them for later processing in _proc_sparse().if obj.type == GNUTYPE_SPARSE:pos = 386structs = []for i in range(4):try:offset = nti(buf[pos:pos + 12])numbytes = nti(buf[pos + 12:pos + 24])except ValueError:breakstructs.append((offset, numbytes))pos += 24isextended = bool(buf[482])origsize = nti(buf[483:495])obj._sparse_structs = (structs, isextended, origsize)# Remove redundant slashes from directories.if obj.isdir():obj.name = obj.name.rstrip("/")# Reconstruct a ustar longname.if prefix and obj.type not in GNU_TYPES:obj.name = prefix + "/" + obj.namereturn obj@classmethoddef fromtarfile(cls, tarfile):"""Return the next TarInfo object from TarFile objecttarfile."""buf = tarfile.fileobj.read(BLOCKSIZE)obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)obj.offset = tarfile.fileobj.tell() - BLOCKSIZEreturn obj._proc_member(tarfile)#--------------------------------------------------------------------------# The following are methods that are called depending on the type of a# member. The entry point is _proc_member() which can be overridden in a# subclass to add custom _proc_*() methods. A _proc_*() method MUST# implement the following# operations:# 1. Set self.offset_data to the position where the data blocks begin,# if there is data that follows.# 2. Set tarfile.offset to the position where the next member's header will# begin.# 3. Return self or another valid TarInfo object.def _proc_member(self, tarfile):"""Choose the right processing method depending onthe type and call it."""if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):return self._proc_gnulong(tarfile)elif self.type == GNUTYPE_SPARSE:return self._proc_sparse(tarfile)elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):return self._proc_pax(tarfile)else:return self._proc_builtin(tarfile)def _proc_builtin(self, tarfile):"""Process a builtin type or an unknown type whichwill be treated as a regular file."""self.offset_data = tarfile.fileobj.tell()offset = self.offset_dataif self.isreg() or self.type not in SUPPORTED_TYPES:# Skip the following data blocks.offset += self._block(self.size)tarfile.offset = offset# Patch the TarInfo object with saved global# header information.self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)return selfdef _proc_gnulong(self, tarfile):"""Process the blocks that hold a GNU longnameor longlink member."""buf = tarfile.fileobj.read(self._block(self.size))# Fetch the next header and process it.try:next = self.fromtarfile(tarfile)except HeaderError:raise SubsequentHeaderError("missing or bad subsequent header")# Patch the TarInfo object from the next header with# the longname information.next.offset = self.offsetif self.type == GNUTYPE_LONGNAME:next.name = nts(buf, tarfile.encoding, tarfile.errors)elif self.type == GNUTYPE_LONGLINK:next.linkname = nts(buf, tarfile.encoding, tarfile.errors)return nextdef _proc_sparse(self, tarfile):"""Process a GNU sparse header plus extra headers."""# We already collected some sparse structures in frombuf().structs, isextended, origsize = self._sparse_structsdel self._sparse_structs# Collect sparse structures from extended header blocks.while isextended:buf = tarfile.fileobj.read(BLOCKSIZE)pos = 0for i in range(21):try:offset = nti(buf[pos:pos + 12])numbytes = nti(buf[pos + 12:pos + 24])except ValueError:breakif offset and numbytes:structs.append((offset, numbytes))pos += 24isextended = bool(buf[504])self.sparse = structsself.offset_data = tarfile.fileobj.tell()tarfile.offset = self.offset_data + self._block(self.size)self.size = origsizereturn selfdef _proc_pax(self, tarfile):"""Process an extended or global header as described inPOSIX.1-2008."""# Read the header information.buf = tarfile.fileobj.read(self._block(self.size))# A pax header stores supplemental information for either# the following file (extended) or all following files# (global).if self.type == XGLTYPE:pax_headers = tarfile.pax_headerselse:pax_headers = tarfile.pax_headers.copy()# Check if the pax header contains a hdrcharset field. This tells us# the encoding of the path, linkpath, uname and gname fields. Normally,# these fields are UTF-8 encoded but since POSIX.1-2008 tar# implementations are allowed to store them as raw binary strings if# the translation to UTF-8 fails.match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)if match is not None:pax_headers["hdrcharset"] = match.group(1).decode("utf8")# For the time being, we don't care about anything other than "BINARY".# The only other value that is currently allowed by the standard is# "ISO-IR 10646 2000 UTF-8" in other words UTF-8.hdrcharset = pax_headers.get("hdrcharset")if hdrcharset == "BINARY":encoding = tarfile.encodingelse:encoding = "utf8"# Parse pax header information. A record looks like that:# "%d %s=%s\n" % (length, keyword, value). length is the size# of the complete record including the length field itself and# the newline. keyword and value are both UTF-8 encoded strings.regex = re.compile(br"(\d+) ([^=]+)=")pos = 0while True:match = regex.match(buf, pos)if not match:breaklength, keyword = match.groups()length = int(length)value = buf[match.end(2) + 1:match.start(1) + length - 1]# Normally, we could just use "utf8" as the encoding and "strict"# as the error handler, but we better not take the risk. For# example, GNU tar <= 1.23 is known to store filenames it cannot# translate to UTF-8 as raw strings (unfortunately without a# hdrcharset=BINARY header).# We first try the strict standard encoding, and if that fails we# fall back on the user's encoding and error handler.keyword = self._decode_pax_field(keyword, "utf8", "utf8",tarfile.errors)if keyword in PAX_NAME_FIELDS:value = self._decode_pax_field(value, encoding, tarfile.encoding,tarfile.errors)else:value = self._decode_pax_field(value, "utf8", "utf8",tarfile.errors)pax_headers[keyword] = valuepos += length# Fetch the next header.try:next = self.fromtarfile(tarfile)except HeaderError:raise SubsequentHeaderError("missing or bad subsequent header")# Process GNU sparse information.if "GNU.sparse.map" in pax_headers:# GNU extended sparse format version 0.1.self._proc_gnusparse_01(next, pax_headers)elif "GNU.sparse.size" in pax_headers:# GNU extended sparse format version 0.0.self._proc_gnusparse_00(next, pax_headers, buf)elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":# GNU extended sparse format version 1.0.self._proc_gnusparse_10(next, pax_headers, tarfile)if self.type in (XHDTYPE, SOLARIS_XHDTYPE):# Patch the TarInfo object with the extended header info.next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)next.offset = self.offsetif "size" in pax_headers:# If the extended header replaces the size field,# we need to recalculate the offset where the next# header starts.offset = next.offset_dataif next.isreg() or next.type not in SUPPORTED_TYPES:offset += next._block(next.size)tarfile.offset = offsetreturn nextdef _proc_gnusparse_00(self, next, pax_headers, buf):"""Process a GNU tar extended sparse header, version 0.0."""offsets = []for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):offsets.append(int(match.group(1)))numbytes = []for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):numbytes.append(int(match.group(1)))next.sparse = list(zip(offsets, numbytes))def _proc_gnusparse_01(self, next, pax_headers):"""Process a GNU tar extended sparse header, version 0.1."""sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]next.sparse = list(zip(sparse[::2], sparse[1::2]))def _proc_gnusparse_10(self, next, pax_headers, tarfile):"""Process a GNU tar extended sparse header, version 1.0."""fields = Nonesparse = []buf = tarfile.fileobj.read(BLOCKSIZE)fields, buf = buf.split(b"\n", 1)fields = int(fields)while len(sparse) < fields * 2:if b"\n" not in buf:buf += tarfile.fileobj.read(BLOCKSIZE)number, buf = buf.split(b"\n", 1)sparse.append(int(number))next.offset_data = tarfile.fileobj.tell()next.sparse = list(zip(sparse[::2], sparse[1::2]))def _apply_pax_info(self, pax_headers, encoding, errors):"""Replace fields with supplemental information from a previouspax extended or global header."""for keyword, value in pax_headers.items():if keyword == "GNU.sparse.name":setattr(self, "path", value)elif keyword == "GNU.sparse.size":setattr(self, "size", int(value))elif keyword == "GNU.sparse.realsize":setattr(self, "size", int(value))elif keyword in PAX_FIELDS:if keyword in PAX_NUMBER_FIELDS:try:value = PAX_NUMBER_FIELDS[keyword](value)except ValueError:value = 0if keyword == "path":value = value.rstrip("/")setattr(self, keyword, value)self.pax_headers = pax_headers.copy()def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):"""Decode a single field from a pax record."""try:return value.decode(encoding, "strict")except UnicodeDecodeError:return value.decode(fallback_encoding, fallback_errors)def _block(self, count):"""Round up a byte count by BLOCKSIZE and return it,e.g. _block(834) => 1024."""blocks, remainder = divmod(count, BLOCKSIZE)if remainder:blocks += 1return blocks * BLOCKSIZEdef isreg(self):return self.type in REGULAR_TYPESdef isfile(self):return self.isreg()def isdir(self):return self.type == DIRTYPEdef issym(self):return self.type == SYMTYPEdef islnk(self):return self.type == LNKTYPEdef ischr(self):return self.type == CHRTYPEdef isblk(self):return self.type == BLKTYPEdef isfifo(self):return self.type == FIFOTYPEdef issparse(self):return self.sparse is not Nonedef isdev(self):return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)# class TarInfoclass TarFile(object):"""The TarFile Class provides an interface to tar archives."""debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)dereference = False # If true, add content of linked file to the# tar file, else the link.ignore_zeros = False # If true, skips empty or invalid blocks and# continues processing.errorlevel = 1 # If 0, fatal errors only appear in debug# messages (if debug >= 0). If > 0, errors# are passed to the caller as exceptions.format = DEFAULT_FORMAT # The format to use when creating an archive.encoding = ENCODING # Encoding for 8-bit character strings.errors = None # Error handler for unicode conversion.tarinfo = TarInfo # The default TarInfo class to use.fileobject = ExFileObject # The default ExFileObject class to use.def __init__(self, name=None, mode="r", fileobj=None, format=None,tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None):"""Open an (uncompressed) tar archive `name'. `mode' is either 'r' toread from an existing archive, 'a' to append data to an existingfile or 'w' to create a new file overwriting an existing one. `mode'defaults to 'r'.If `fileobj' is given, it is used for reading or writing data. If itcan be determined, `mode' is overridden by `fileobj's mode.`fileobj' is not closed, when TarFile is closed."""if len(mode) > 1 or mode not in "raw":raise ValueError("mode must be 'r', 'a' or 'w'")self.mode = modeself._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]if not fileobj:if self.mode == "a" and not os.path.exists(name):# Create nonexistent files in append mode.self.mode = "w"self._mode = "wb"fileobj = bltn_open(name, self._mode)self._extfileobj = Falseelse:if name is None and hasattr(fileobj, "name"):name = fileobj.nameif hasattr(fileobj, "mode"):self._mode = fileobj.modeself._extfileobj = Trueself.name = os.path.abspath(name) if name else Noneself.fileobj = fileobj# Init attributes.if format is not None:self.format = formatif tarinfo is not None:self.tarinfo = tarinfoif dereference is not None:self.dereference = dereferenceif ignore_zeros is not None:self.ignore_zeros = ignore_zerosif encoding is not None:self.encoding = encodingself.errors = errorsif pax_headers is not None and self.format == PAX_FORMAT:self.pax_headers = pax_headerselse:self.pax_headers = {}if debug is not None:self.debug = debugif errorlevel is not None:self.errorlevel = errorlevel# Init datastructures.self.closed = Falseself.members = [] # list of members as TarInfo objectsself._loaded = False # flag if all members have been readself.offset = self.fileobj.tell()# current position in the archive fileself.inodes = {} # dictionary caching the inodes of# archive members already addedtry:if self.mode == "r":self.firstmember = Noneself.firstmember = self.next()if self.mode == "a":# Move to the end of the archive,# before the first empty block.while True:self.fileobj.seek(self.offset)try:tarinfo = self.tarinfo.fromtarfile(self)self.members.append(tarinfo)except EOFHeaderError:self.fileobj.seek(self.offset)breakexcept HeaderError as e:raise ReadError(str(e))if self.mode in "aw":self._loaded = Trueif self.pax_headers:buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())self.fileobj.write(buf)self.offset += len(buf)except:if not self._extfileobj:self.fileobj.close()self.closed = Trueraise#--------------------------------------------------------------------------# Below are the classmethods which act as alternate constructors to the# TarFile class. The open() method is the only one that is needed for# public use; it is the "super"-constructor and is able to select an# adequate "sub"-constructor for a particular compression using the mapping# from OPEN_METH.## This concept allows one to subclass TarFile without losing the comfort of# the super-constructor. A sub-constructor is registered and made available# by adding it to the mapping in OPEN_METH.@classmethoddef open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):"""Open a tar archive for reading, writing or appending. Returnan appropriate TarFile class.mode:'r' or 'r:*' open for reading with transparent compression'r:' open for reading exclusively uncompressed'r:gz' open for reading with gzip compression'r:bz2' open for reading with bzip2 compression'a' or 'a:' open for appending, creating the file if necessary'w' or 'w:' open for writing without compression'w:gz' open for writing with gzip compression'w:bz2' open for writing with bzip2 compression'r|*' open a stream of tar blocks with transparent compression'r|' open an uncompressed stream of tar blocks for reading'r|gz' open a gzip compressed stream of tar blocks'r|bz2' open a bzip2 compressed stream of tar blocks'w|' open an uncompressed stream for writing'w|gz' open a gzip compressed stream for writing'w|bz2' open a bzip2 compressed stream for writing"""if not name and not fileobj:raise ValueError("nothing to open")if mode in ("r", "r:*"):# Find out which *open() is appropriate for opening the file.for comptype in cls.OPEN_METH:func = getattr(cls, cls.OPEN_METH[comptype])if fileobj is not None:saved_pos = fileobj.tell()try:return func(name, "r", fileobj, **kwargs)except (ReadError, CompressionError) as e:if fileobj is not None:fileobj.seek(saved_pos)continueraise ReadError("file could not be opened successfully")elif ":" in mode:filemode, comptype = mode.split(":", 1)filemode = filemode or "r"comptype = comptype or "tar"# Select the *open() function according to# given compression.if comptype in cls.OPEN_METH:func = getattr(cls, cls.OPEN_METH[comptype])else:raise CompressionError("unknown compression type %r" % comptype)return func(name, filemode, fileobj, **kwargs)elif "|" in mode:filemode, comptype = mode.split("|", 1)filemode = filemode or "r"comptype = comptype or "tar"if filemode not in "rw":raise ValueError("mode must be 'r' or 'w'")stream = _Stream(name, filemode, comptype, fileobj, bufsize)try:t = cls(name, filemode, stream, **kwargs)except:stream.close()raiset._extfileobj = Falsereturn telif mode in "aw":return cls.taropen(name, mode, fileobj, **kwargs)raise ValueError("undiscernible mode")@classmethoddef taropen(cls, name, mode="r", fileobj=None, **kwargs):"""Open uncompressed tar archive name for reading or writing."""if len(mode) > 1 or mode not in "raw":raise ValueError("mode must be 'r', 'a' or 'w'")return cls(name, mode, fileobj, **kwargs)@classmethoddef gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):"""Open gzip compressed tar archive name for reading or writing.Appending is not allowed."""if len(mode) > 1 or mode not in "rw":raise ValueError("mode must be 'r' or 'w'")try:import gzipgzip.GzipFileexcept (ImportError, AttributeError):raise CompressionError("gzip module is not available")extfileobj = fileobj is not Nonetry:fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj)t = cls.taropen(name, mode, fileobj, **kwargs)except IOError:if not extfileobj and fileobj is not None:fileobj.close()if fileobj is None:raiseraise ReadError("not a gzip file")except:if not extfileobj and fileobj is not None:fileobj.close()raiset._extfileobj = extfileobjreturn t@classmethoddef bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):"""Open bzip2 compressed tar archive name for reading or writing.Appending is not allowed."""if len(mode) > 1 or mode not in "rw":raise ValueError("mode must be 'r' or 'w'.")try:import bz2except ImportError:raise CompressionError("bz2 module is not available")if fileobj is not None:fileobj = _BZ2Proxy(fileobj, mode)else:fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel)try:t = cls.taropen(name, mode, fileobj, **kwargs)except (IOError, EOFError):fileobj.close()raise ReadError("not a bzip2 file")t._extfileobj = Falsereturn t# All *open() methods are registered here.OPEN_METH = {"tar": "taropen", # uncompressed tar"gz": "gzopen", # gzip compressed tar"bz2": "bz2open" # bzip2 compressed tar}#--------------------------------------------------------------------------# The public methods which TarFile provides:def close(self):"""Close the TarFile. In write-mode, two finishing zero blocks areappended to the archive."""if self.closed:returnif self.mode in "aw":self.fileobj.write(NUL * (BLOCKSIZE * 2))self.offset += (BLOCKSIZE * 2)# fill up the end with zero-blocks# (like option -b20 for tar does)blocks, remainder = divmod(self.offset, RECORDSIZE)if remainder > 0:self.fileobj.write(NUL * (RECORDSIZE - remainder))if not self._extfileobj:self.fileobj.close()self.closed = Truedef getmember(self, name):"""Return a TarInfo object for member `name'. If `name' can not befound in the archive, KeyError is raised. If a member occurs morethan once in the archive, its last occurrence is assumed to be themost up-to-date version."""tarinfo = self._getmember(name)if tarinfo is None:raise KeyError("filename %r not found" % name)return tarinfodef getmembers(self):"""Return the members of the archive as a list of TarInfo objects. Thelist has the same order as the members in the archive."""self._check()if not self._loaded: # if we want to obtain a list ofself._load() # all members, we first have to# scan the whole archive.return self.membersdef getnames(self):"""Return the members of the archive as a list of their names. It hasthe same order as the list returned by getmembers()."""return [tarinfo.name for tarinfo in self.getmembers()]def gettarinfo(self, name=None, arcname=None, fileobj=None):"""Create a TarInfo object for either the file `name' or the fileobject `fileobj' (using os.fstat on its file descriptor). You canmodify some of the TarInfo's attributes before you add it usingaddfile(). If given, `arcname' specifies an alternative name for thefile in the archive."""self._check("aw")# When fileobj is given, replace name by# fileobj's real name.if fileobj is not None:name = fileobj.name# Building the name of the member in the archive.# Backward slashes are converted to forward slashes,# Absolute paths are turned to relative paths.if arcname is None:arcname = namedrv, arcname = os.path.splitdrive(arcname)arcname = arcname.replace(os.sep, "/")arcname = arcname.lstrip("/")# Now, fill the TarInfo object with# information specific for the file.tarinfo = self.tarinfo()tarinfo.tarfile = self# Use os.stat or os.lstat, depending on platform# and if symlinks shall be resolved.if fileobj is None:if hasattr(os, "lstat") and not self.dereference:statres = os.lstat(name)else:statres = os.stat(name)else:statres = os.fstat(fileobj.fileno())linkname = ""stmd = statres.st_modeif stat.S_ISREG(stmd):inode = (statres.st_ino, statres.st_dev)if not self.dereference and statres.st_nlink > 1 and \inode in self.inodes and arcname != self.inodes[inode]:# Is it a hardlink to an already# archived file?type = LNKTYPElinkname = self.inodes[inode]else:# The inode is added only if its valid.# For win32 it is always 0.type = REGTYPEif inode[0]:self.inodes[inode] = arcnameelif stat.S_ISDIR(stmd):type = DIRTYPEelif stat.S_ISFIFO(stmd):type = FIFOTYPEelif stat.S_ISLNK(stmd):type = SYMTYPElinkname = os.readlink(name)elif stat.S_ISCHR(stmd):type = CHRTYPEelif stat.S_ISBLK(stmd):type = BLKTYPEelse:return None# Fill the TarInfo object with all# information we can get.tarinfo.name = arcnametarinfo.mode = stmdtarinfo.uid = statres.st_uidtarinfo.gid = statres.st_gidif type == REGTYPE:tarinfo.size = statres.st_sizeelse:tarinfo.size = 0tarinfo.mtime = statres.st_mtimetarinfo.type = typetarinfo.linkname = linknameif pwd:try:tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]except KeyError:passif grp:try:tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]except KeyError:passif type in (CHRTYPE, BLKTYPE):if hasattr(os, "major") and hasattr(os, "minor"):tarinfo.devmajor = os.major(statres.st_rdev)tarinfo.devminor = os.minor(statres.st_rdev)return tarinfodef list(self, verbose=True):"""Print a table of contents to sys.stdout. If `verbose' is False, onlythe names of the members are printed. If it is True, an `ls -l'-likeoutput is produced."""self._check()for tarinfo in self:if verbose:print(filemode(tarinfo.mode), end=' ')print("%s/%s" % (tarinfo.uname or tarinfo.uid,tarinfo.gname or tarinfo.gid), end=' ')if tarinfo.ischr() or tarinfo.isblk():print("%10s" % ("%d,%d" \% (tarinfo.devmajor, tarinfo.devminor)), end=' ')else:print("%10d" % tarinfo.size, end=' ')print("%d-%02d-%02d %02d:%02d:%02d" \% time.localtime(tarinfo.mtime)[:6], end=' ')print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ')if verbose:if tarinfo.issym():print("->", tarinfo.linkname, end=' ')if tarinfo.islnk():print("link to", tarinfo.linkname, end=' ')print()def add(self, name, arcname=None, recursive=True, exclude=None, filter=None):"""Add the file `name' to the archive. `name' may be any type of file(directory, fifo, symbolic link, etc.). If given, `arcname'specifies an alternative name for the file in the archive.Directories are added recursively by default. This can be avoided bysetting `recursive' to False. `exclude' is a function that shouldreturn True for each filename to be excluded. `filter' is a functionthat expects a TarInfo object argument and returns the changedTarInfo object, if it returns None the TarInfo object will beexcluded from the archive."""self._check("aw")if arcname is None:arcname = name# Exclude pathnames.if exclude is not None:import warningswarnings.warn("use the filter argument instead",DeprecationWarning, 2)if exclude(name):self._dbg(2, "tarfile: Excluded %r" % name)return# Skip if somebody tries to archive the archive...if self.name is not None and os.path.abspath(name) == self.name:self._dbg(2, "tarfile: Skipped %r" % name)returnself._dbg(1, name)# Create a TarInfo object from the file.tarinfo = self.gettarinfo(name, arcname)if tarinfo is None:self._dbg(1, "tarfile: Unsupported type %r" % name)return# Change or exclude the TarInfo object.if filter is not None:tarinfo = filter(tarinfo)if tarinfo is None:self._dbg(2, "tarfile: Excluded %r" % name)return# Append the tar header and data to the archive.if tarinfo.isreg():f = bltn_open(name, "rb")self.addfile(tarinfo, f)f.close()elif tarinfo.isdir():self.addfile(tarinfo)if recursive:for f in os.listdir(name):self.add(os.path.join(name, f), os.path.join(arcname, f),recursive, exclude, filter=filter)else:self.addfile(tarinfo)def addfile(self, tarinfo, fileobj=None):"""Add the TarInfo object `tarinfo' to the archive. If `fileobj' isgiven, tarinfo.size bytes are read from it and added to the archive.You can create TarInfo objects using gettarinfo().On Windows platforms, `fileobj' should always be opened with mode'rb' to avoid irritation about the file size."""self._check("aw")tarinfo = copy.copy(tarinfo)buf = tarinfo.tobuf(self.format, self.encoding, self.errors)self.fileobj.write(buf)self.offset += len(buf)# If there's data to follow, append it.if fileobj is not None:copyfileobj(fileobj, self.fileobj, tarinfo.size)blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)if remainder > 0:self.fileobj.write(NUL * (BLOCKSIZE - remainder))blocks += 1self.offset += blocks * BLOCKSIZEself.members.append(tarinfo)def extractall(self, path=".", members=None):"""Extract all members from the archive to the current workingdirectory and set owner, modification time and permissions ondirectories afterwards. `path' specifies a different directoryto extract to. `members' is optional and must be a subset of thelist returned by getmembers()."""directories = []if members is None:members = selffor tarinfo in members:if tarinfo.isdir():# Extract directories with a safe mode.directories.append(tarinfo)tarinfo = copy.copy(tarinfo)tarinfo.mode = 0o700# Do not set_attrs directories, as we will do that further downself.extract(tarinfo, path, set_attrs=not tarinfo.isdir())# Reverse sort directories.directories.sort(key=lambda a: a.name)directories.reverse()# Set correct owner, mtime and filemode on directories.for tarinfo in directories:dirpath = os.path.join(path, tarinfo.name)try:self.chown(tarinfo, dirpath)self.utime(tarinfo, dirpath)self.chmod(tarinfo, dirpath)except ExtractError as e:if self.errorlevel > 1:raiseelse:self._dbg(1, "tarfile: %s" % e)def extract(self, member, path="", set_attrs=True):"""Extract a member from the archive to the current working directory,using its full name. Its file information is extracted as accuratelyas possible. `member' may be a filename or a TarInfo object. You canspecify a different directory using `path'. File attributes (owner,mtime, mode) are set unless `set_attrs' is False."""self._check("r")if isinstance(member, str):tarinfo = self.getmember(member)else:tarinfo = member# Prepare the link target for makelink().if tarinfo.islnk():tarinfo._link_target = os.path.join(path, tarinfo.linkname)try:self._extract_member(tarinfo, os.path.join(path, tarinfo.name),set_attrs=set_attrs)except EnvironmentError as e:if self.errorlevel > 0:raiseelse:if e.filename is None:self._dbg(1, "tarfile: %s" % e.strerror)else:self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))except ExtractError as e:if self.errorlevel > 1:raiseelse:self._dbg(1, "tarfile: %s" % e)def extractfile(self, member):"""Extract a member from the archive as a file object. `member' may bea filename or a TarInfo object. If `member' is a regular file, afile-like object is returned. If `member' is a link, a file-likeobject is constructed from the link's target. If `member' is none ofthe above, None is returned.The file-like object is read-only and provides the followingmethods: read(), readline(), readlines(), seek() and tell()"""self._check("r")if isinstance(member, str):tarinfo = self.getmember(member)else:tarinfo = memberif tarinfo.isreg():return self.fileobject(self, tarinfo)elif tarinfo.type not in SUPPORTED_TYPES:# If a member's type is unknown, it is treated as a# regular file.return self.fileobject(self, tarinfo)elif tarinfo.islnk() or tarinfo.issym():if isinstance(self.fileobj, _Stream):# A small but ugly workaround for the case that someone tries# to extract a (sym)link as a file-object from a non-seekable# stream of tar blocks.raise StreamError("cannot extract (sym)link as file object")else:# A (sym)link's file object is its target's file object.return self.extractfile(self._find_link_target(tarinfo))else:# If there's no data associated with the member (directory, chrdev,# blkdev, etc.), return None instead of a file object.return Nonedef _extract_member(self, tarinfo, targetpath, set_attrs=True):"""Extract the TarInfo object tarinfo to a physicalfile called targetpath."""# Fetch the TarInfo object for the given name# and build the destination pathname, replacing# forward slashes to platform specific separators.targetpath = targetpath.rstrip("/")targetpath = targetpath.replace("/", os.sep)# Create all upper directories.upperdirs = os.path.dirname(targetpath)if upperdirs and not os.path.exists(upperdirs):# Create directories that are not part of the archive with# default permissions.os.makedirs(upperdirs)if tarinfo.islnk() or tarinfo.issym():self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))else:self._dbg(1, tarinfo.name)if tarinfo.isreg():self.makefile(tarinfo, targetpath)elif tarinfo.isdir():self.makedir(tarinfo, targetpath)elif tarinfo.isfifo():self.makefifo(tarinfo, targetpath)elif tarinfo.ischr() or tarinfo.isblk():self.makedev(tarinfo, targetpath)elif tarinfo.islnk() or tarinfo.issym():self.makelink(tarinfo, targetpath)elif tarinfo.type not in SUPPORTED_TYPES:self.makeunknown(tarinfo, targetpath)else:self.makefile(tarinfo, targetpath)if set_attrs:self.chown(tarinfo, targetpath)if not tarinfo.issym():self.chmod(tarinfo, targetpath)self.utime(tarinfo, targetpath)#--------------------------------------------------------------------------# Below are the different file methods. They are called via# _extract_member() when extract() is called. They can be replaced in a# subclass to implement other functionality.def makedir(self, tarinfo, targetpath):"""Make a directory called targetpath."""try:# Use a safe mode for the directory, the real mode is set# later in _extract_member().os.mkdir(targetpath, 0o700)except EnvironmentError as e:if e.errno != errno.EEXIST:raisedef makefile(self, tarinfo, targetpath):"""Make a file called targetpath."""source = self.fileobjsource.seek(tarinfo.offset_data)target = bltn_open(targetpath, "wb")if tarinfo.sparse is not None:for offset, size in tarinfo.sparse:target.seek(offset)copyfileobj(source, target, size)else:copyfileobj(source, target, tarinfo.size)target.seek(tarinfo.size)target.truncate()target.close()def makeunknown(self, tarinfo, targetpath):"""Make a file from a TarInfo object with an unknown typeat targetpath."""self.makefile(tarinfo, targetpath)self._dbg(1, "tarfile: Unknown file type %r, " \"extracted as regular file." % tarinfo.type)def makefifo(self, tarinfo, targetpath):"""Make a fifo called targetpath."""if hasattr(os, "mkfifo"):os.mkfifo(targetpath)else:raise ExtractError("fifo not supported by system")def makedev(self, tarinfo, targetpath):"""Make a character or block device called targetpath."""if not hasattr(os, "mknod") or not hasattr(os, "makedev"):raise ExtractError("special devices not supported by system")mode = tarinfo.modeif tarinfo.isblk():mode |= stat.S_IFBLKelse:mode |= stat.S_IFCHRos.mknod(targetpath, mode,os.makedev(tarinfo.devmajor, tarinfo.devminor))def makelink(self, tarinfo, targetpath):"""Make a (symbolic) link called targetpath. If it cannot be created(platform limitation), we try to make a copy of the referenced fileinstead of a link."""try:# For systems that support symbolic and hard links.if tarinfo.issym():os.symlink(tarinfo.linkname, targetpath)else:# See extract().if os.path.exists(tarinfo._link_target):os.link(tarinfo._link_target, targetpath)else:self._extract_member(self._find_link_target(tarinfo),targetpath)except symlink_exception:if tarinfo.issym():linkpath = os.path.join(os.path.dirname(tarinfo.name),tarinfo.linkname)else:linkpath = tarinfo.linknameelse:try:self._extract_member(self._find_link_target(tarinfo),targetpath)except KeyError:raise ExtractError("unable to resolve link inside archive")def chown(self, tarinfo, targetpath):"""Set owner of targetpath according to tarinfo."""if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:# We have to be root to do so.try:g = grp.getgrnam(tarinfo.gname)[2]except KeyError:g = tarinfo.gidtry:u = pwd.getpwnam(tarinfo.uname)[2]except KeyError:u = tarinfo.uidtry:if tarinfo.issym() and hasattr(os, "lchown"):os.lchown(targetpath, u, g)else:if sys.platform != "os2emx":os.chown(targetpath, u, g)except EnvironmentError as e:raise ExtractError("could not change owner")def chmod(self, tarinfo, targetpath):"""Set file permissions of targetpath according to tarinfo."""if hasattr(os, 'chmod'):try:os.chmod(targetpath, tarinfo.mode)except EnvironmentError as e:raise ExtractError("could not change mode")def utime(self, tarinfo, targetpath):"""Set modification time of targetpath according to tarinfo."""if not hasattr(os, 'utime'):returntry:os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))except EnvironmentError as e:raise ExtractError("could not change modification time")#--------------------------------------------------------------------------def next(self):"""Return the next member of the archive as a TarInfo object, whenTarFile is opened for reading. Return None if there is no moreavailable."""self._check("ra")if self.firstmember is not None:m = self.firstmemberself.firstmember = Nonereturn m# Read the next block.self.fileobj.seek(self.offset)tarinfo = Nonewhile True:try:tarinfo = self.tarinfo.fromtarfile(self)except EOFHeaderError as e:if self.ignore_zeros:self._dbg(2, "0x%X: %s" % (self.offset, e))self.offset += BLOCKSIZEcontinueexcept InvalidHeaderError as e:if self.ignore_zeros:self._dbg(2, "0x%X: %s" % (self.offset, e))self.offset += BLOCKSIZEcontinueelif self.offset == 0:raise ReadError(str(e))except EmptyHeaderError:if self.offset == 0:raise ReadError("empty file")except TruncatedHeaderError as e:if self.offset == 0:raise ReadError(str(e))except SubsequentHeaderError as e:raise ReadError(str(e))breakif tarinfo is not None:self.members.append(tarinfo)else:self._loaded = Truereturn tarinfo#--------------------------------------------------------------------------# Little helper methods:def _getmember(self, name, tarinfo=None, normalize=False):"""Find an archive member by name from bottom to top.If tarinfo is given, it is used as the starting point."""# Ensure that all members have been loaded.members = self.getmembers()# Limit the member search list up to tarinfo.if tarinfo is not None:members = members[:members.index(tarinfo)]if normalize:name = os.path.normpath(name)for member in reversed(members):if normalize:member_name = os.path.normpath(member.name)else:member_name = member.nameif name == member_name:return memberdef _load(self):"""Read through the entire archive file and look for readablemembers."""while True:tarinfo = self.next()if tarinfo is None:breakself._loaded = Truedef _check(self, mode=None):"""Check if TarFile is still open, and if the operation's modecorresponds to TarFile's mode."""if self.closed:raise IOError("%s is closed" % self.__class__.__name__)if mode is not None and self.mode not in mode:raise IOError("bad operation for mode %r" % self.mode)def _find_link_target(self, tarinfo):"""Find the target member of a symlink or hardlink member in thearchive."""if tarinfo.issym():# Always search the entire archive.linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linknamelimit = Noneelse:# Search the archive before the link, because a hard link is# just a reference to an already archived file.linkname = tarinfo.linknamelimit = tarinfomember = self._getmember(linkname, tarinfo=limit, normalize=True)if member is None:raise KeyError("linkname %r not found" % linkname)return memberdef __iter__(self):"""Provide an iterator object."""if self._loaded:return iter(self.members)else:return TarIter(self)def _dbg(self, level, msg):"""Write debugging output to sys.stderr."""if level <= self.debug:print(msg, file=sys.stderr)def __enter__(self):self._check()return selfdef __exit__(self, type, value, traceback):if type is None:self.close()else:# An exception occurred. We must not call close() because# it would try to write end-of-archive blocks and padding.if not self._extfileobj:self.fileobj.close()self.closed = True# class TarFileclass TarIter(object):"""Iterator Class.for tarinfo in TarFile(...):suite..."""def __init__(self, tarfile):"""Construct a TarIter object."""self.tarfile = tarfileself.index = 0def __iter__(self):"""Return iterator object."""return selfdef __next__(self):"""Return the next item using TarFile's next() method.When all members have been read, set TarFile as _loaded."""# Fix for SF #1100429: Under rare circumstances it can# happen that getmembers() is called during iteration,# which will cause TarIter to stop prematurely.if not self.tarfile._loaded:tarinfo = self.tarfile.next()if not tarinfo:self.tarfile._loaded = Trueraise StopIterationelse:try:tarinfo = self.tarfile.members[self.index]except IndexError:raise StopIterationself.index += 1return tarinfonext = __next__ # for Python 2.x#--------------------# exported functions#--------------------def is_tarfile(name):"""Return True if name points to a tar archive that weare able to handle, else return False."""try:t = open(name)t.close()return Trueexcept TarError:return Falsebltn_open = openopen = TarFile.open
# -*- coding: utf-8 -*-## Copyright (C) 2012 The Python Software Foundation.# See LICENSE.txt and CONTRIBUTORS.txt.#"""Access to Python's configuration information."""import codecsimport osimport reimport sysfrom os.path import pardir, realpathtry:import configparserexcept ImportError:import ConfigParser as configparser__all__ = ['get_config_h_filename','get_config_var','get_config_vars','get_makefile_filename','get_path','get_path_names','get_paths','get_platform','get_python_version','get_scheme_names','parse_config_h',]def _safe_realpath(path):try:return realpath(path)except OSError:return pathif sys.executable:_PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable))else:# sys.executable can be empty if argv[0] has been changed and Python is# unable to retrieve the real program name_PROJECT_BASE = _safe_realpath(os.getcwd())if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower():_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir))# PC/VS7.1if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower():_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))# PC/AMD64if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))def is_python_build():for fn in ("Setup.dist", "Setup.local"):if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):return Truereturn False_PYTHON_BUILD = is_python_build()_cfg_read = Falsedef _ensure_cfg_read():global _cfg_readif not _cfg_read:from ..resources import finderbackport_package = __name__.rsplit('.', 1)[0]_finder = finder(backport_package)_cfgfile = _finder.find('sysconfig.cfg')assert _cfgfile, 'sysconfig.cfg exists'with _cfgfile.as_stream() as s:_SCHEMES.readfp(s)if _PYTHON_BUILD:for scheme in ('posix_prefix', 'posix_home'):_SCHEMES.set(scheme, 'include', '{srcdir}/Include')_SCHEMES.set(scheme, 'platinclude', '{projectbase}/.')_cfg_read = True_SCHEMES = configparser.RawConfigParser()_VAR_REPL = re.compile(r'\{([^{]*?)\}')def _expand_globals(config):_ensure_cfg_read()if config.has_section('globals'):globals = config.items('globals')else:globals = tuple()sections = config.sections()for section in sections:if section == 'globals':continuefor option, value in globals:if config.has_option(section, option):continueconfig.set(section, option, value)config.remove_section('globals')# now expanding local variables defined in the cfg file#for section in config.sections():variables = dict(config.items(section))def _replacer(matchobj):name = matchobj.group(1)if name in variables:return variables[name]return matchobj.group(0)for option, value in config.items(section):config.set(section, option, _VAR_REPL.sub(_replacer, value))#_expand_globals(_SCHEMES)# FIXME don't rely on sys.version here, its format is an implementation detail# of CPython, use sys.version_info or sys.hexversion_PY_VERSION = sys.version.split()[0]_PY_VERSION_SHORT = sys.version[:3]_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2]_PREFIX = os.path.normpath(sys.prefix)_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)_CONFIG_VARS = None_USER_BASE = Nonedef _subst_vars(path, local_vars):"""In the string `path`, replace tokens like {some.thing} with thecorresponding value from the map `local_vars`.If there is no corresponding value, leave the token unchanged."""def _replacer(matchobj):name = matchobj.group(1)if name in local_vars:return local_vars[name]elif name in os.environ:return os.environ[name]return matchobj.group(0)return _VAR_REPL.sub(_replacer, path)def _extend_dict(target_dict, other_dict):target_keys = target_dict.keys()for key, value in other_dict.items():if key in target_keys:continuetarget_dict[key] = valuedef _expand_vars(scheme, vars):res = {}if vars is None:vars = {}_extend_dict(vars, get_config_vars())for key, value in _SCHEMES.items(scheme):if os.name in ('posix', 'nt'):value = os.path.expanduser(value)res[key] = os.path.normpath(_subst_vars(value, vars))return resdef format_value(value, vars):def _replacer(matchobj):name = matchobj.group(1)if name in vars:return vars[name]return matchobj.group(0)return _VAR_REPL.sub(_replacer, value)def _get_default_scheme():if os.name == 'posix':# the default scheme for posix is posix_prefixreturn 'posix_prefix'return os.namedef _getuserbase():env_base = os.environ.get("PYTHONUSERBASE", None)def joinuser(*args):return os.path.expanduser(os.path.join(*args))# what about 'os2emx', 'riscos' ?if os.name == "nt":base = os.environ.get("APPDATA") or "~"if env_base:return env_baseelse:return joinuser(base, "Python")if sys.platform == "darwin":framework = get_config_var("PYTHONFRAMEWORK")if framework:if env_base:return env_baseelse:return joinuser("~", "Library", framework, "%d.%d" %sys.version_info[:2])if env_base:return env_baseelse:return joinuser("~", ".local")def _parse_makefile(filename, vars=None):"""Parse a Makefile-style file.A dictionary containing name/value pairs is returned. If anoptional dictionary is passed in as the second argument, it isused instead of a new dictionary."""# Regexes needed for parsing Makefile (and similar syntaxes,# like old-style Setup files)._variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")if vars is None:vars = {}done = {}notdone = {}with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f:lines = f.readlines()for line in lines:if line.startswith('#') or line.strip() == '':continuem = _variable_rx.match(line)if m:n, v = m.group(1, 2)v = v.strip()# `$$' is a literal `$' in maketmpv = v.replace('$$', '')if "$" in tmpv:notdone[n] = velse:try:v = int(v)except ValueError:# insert literal `$'done[n] = v.replace('$$', '$')else:done[n] = v# do variable interpolation herevariables = list(notdone.keys())# Variables with a 'PY_' prefix in the makefile. These need to# be made available without that prefix through sysconfig.# Special care is needed to ensure that variable expansion works, even# if the expansion uses the name without a prefix.renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')while len(variables) > 0:for name in tuple(variables):value = notdone[name]m = _findvar1_rx.search(value) or _findvar2_rx.search(value)if m is not None:n = m.group(1)found = Trueif n in done:item = str(done[n])elif n in notdone:# get it on a subsequent roundfound = Falseelif n in os.environ:# do it like make: fall back to environmentitem = os.environ[n]elif n in renamed_variables:if (name.startswith('PY_') andname[3:] in renamed_variables):item = ""elif 'PY_' + n in notdone:found = Falseelse:item = str(done['PY_' + n])else:done[n] = item = ""if found:after = value[m.end():]value = value[:m.start()] + item + afterif "$" in after:notdone[name] = valueelse:try:value = int(value)except ValueError:done[name] = value.strip()else:done[name] = valuevariables.remove(name)if (name.startswith('PY_') andname[3:] in renamed_variables):name = name[3:]if name not in done:done[name] = valueelse:# bogus variable reference (e.g. "prefix=$/opt/python");# just drop it since we can't dealdone[name] = valuevariables.remove(name)# strip spurious spacesfor k, v in done.items():if isinstance(v, str):done[k] = v.strip()# save the results in the global dictionaryvars.update(done)return varsdef get_makefile_filename():"""Return the path of the Makefile."""if _PYTHON_BUILD:return os.path.join(_PROJECT_BASE, "Makefile")if hasattr(sys, 'abiflags'):config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags)else:config_dir_name = 'config'return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')def _init_posix(vars):"""Initialize the module as appropriate for POSIX systems."""# load the installed Makefile:makefile = get_makefile_filename()try:_parse_makefile(makefile, vars)except IOError as e:msg = "invalid Python installation: unable to open %s" % makefileif hasattr(e, "strerror"):msg = msg + " (%s)" % e.strerrorraise IOError(msg)# load the installed pyconfig.h:config_h = get_config_h_filename()try:with open(config_h) as f:parse_config_h(f, vars)except IOError as e:msg = "invalid Python installation: unable to open %s" % config_hif hasattr(e, "strerror"):msg = msg + " (%s)" % e.strerrorraise IOError(msg)# On AIX, there are wrong paths to the linker scripts in the Makefile# -- these paths are relative to the Python source, but when installed# the scripts are in another directory.if _PYTHON_BUILD:vars['LDSHARED'] = vars['BLDSHARED']def _init_non_posix(vars):"""Initialize the module as appropriate for NT"""# set basic install directoriesvars['LIBDEST'] = get_path('stdlib')vars['BINLIBDEST'] = get_path('platstdlib')vars['INCLUDEPY'] = get_path('include')vars['SO'] = '.pyd'vars['EXE'] = '.exe'vars['VERSION'] = _PY_VERSION_SHORT_NO_DOTvars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))## public APIs#def parse_config_h(fp, vars=None):"""Parse a config.h-style file.A dictionary containing name/value pairs is returned. If anoptional dictionary is passed in as the second argument, it isused instead of a new dictionary."""if vars is None:vars = {}define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")while True:line = fp.readline()if not line:breakm = define_rx.match(line)if m:n, v = m.group(1, 2)try:v = int(v)except ValueError:passvars[n] = velse:m = undef_rx.match(line)if m:vars[m.group(1)] = 0return varsdef get_config_h_filename():"""Return the path of pyconfig.h."""if _PYTHON_BUILD:if os.name == "nt":inc_dir = os.path.join(_PROJECT_BASE, "PC")else:inc_dir = _PROJECT_BASEelse:inc_dir = get_path('platinclude')return os.path.join(inc_dir, 'pyconfig.h')def get_scheme_names():"""Return a tuple containing the schemes names."""return tuple(sorted(_SCHEMES.sections()))def get_path_names():"""Return a tuple containing the paths names."""# xxx see if we want a static listreturn _SCHEMES.options('posix_prefix')def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):"""Return a mapping containing an install scheme.``scheme`` is the install scheme name. If not provided, it willreturn the default scheme for the current platform."""_ensure_cfg_read()if expand:return _expand_vars(scheme, vars)else:return dict(_SCHEMES.items(scheme))def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):"""Return a path corresponding to the scheme.``scheme`` is the install scheme name."""return get_paths(scheme, vars, expand)[name]def get_config_vars(*args):"""With no arguments, return a dictionary of all configurationvariables relevant for the current platform.On Unix, this means every variable defined in Python's installed Makefile;On Windows and Mac OS it's a much smaller set.With arguments, return a list of values that result from looking upeach argument in the configuration variable dictionary."""global _CONFIG_VARSif _CONFIG_VARS is None:_CONFIG_VARS = {}# Normalized versions of prefix and exec_prefix are handy to have;# in fact, these are the standard versions used most places in the# distutils2 module._CONFIG_VARS['prefix'] = _PREFIX_CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX_CONFIG_VARS['py_version'] = _PY_VERSION_CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT_CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]_CONFIG_VARS['base'] = _PREFIX_CONFIG_VARS['platbase'] = _EXEC_PREFIX_CONFIG_VARS['projectbase'] = _PROJECT_BASEtry:_CONFIG_VARS['abiflags'] = sys.abiflagsexcept AttributeError:# sys.abiflags may not be defined on all platforms._CONFIG_VARS['abiflags'] = ''if os.name in ('nt', 'os2'):_init_non_posix(_CONFIG_VARS)if os.name == 'posix':_init_posix(_CONFIG_VARS)# Setting 'userbase' is done below the call to the# init function to enable using 'get_config_var' in# the init-function.if sys.version >= '2.6':_CONFIG_VARS['userbase'] = _getuserbase()if 'srcdir' not in _CONFIG_VARS:_CONFIG_VARS['srcdir'] = _PROJECT_BASEelse:_CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir'])# Convert srcdir into an absolute path if it appears necessary.# Normally it is relative to the build directory. However, during# testing, for example, we might be running a non-installed python# from a different directory.if _PYTHON_BUILD and os.name == "posix":base = _PROJECT_BASEtry:cwd = os.getcwd()except OSError:cwd = Noneif (not os.path.isabs(_CONFIG_VARS['srcdir']) andbase != cwd):# srcdir is relative and we are not in the same directory# as the executable. Assume executable is in the build# directory and make srcdir absolute.srcdir = os.path.join(base, _CONFIG_VARS['srcdir'])_CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)if sys.platform == 'darwin':kernel_version = os.uname()[2] # Kernel version (8.4.3)major_version = int(kernel_version.split('.')[0])if major_version < 8:# On macOS before 10.4, check if -arch and -isysroot# are in CFLAGS or LDFLAGS and remove them if they are.# This is needed when building extensions on a 10.3 system# using a universal build of python.for key in ('LDFLAGS', 'BASECFLAGS',# a number of derived variables. These need to be# patched up as well.'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):flags = _CONFIG_VARS[key]flags = re.sub('-arch\s+\w+\s', ' ', flags)flags = re.sub('-isysroot [^ \t]*', ' ', flags)_CONFIG_VARS[key] = flagselse:# Allow the user to override the architecture flags using# an environment variable.# NOTE: This name was introduced by Apple in OSX 10.5 and# is used by several scripting languages distributed with# that OS release.if 'ARCHFLAGS' in os.environ:arch = os.environ['ARCHFLAGS']for key in ('LDFLAGS', 'BASECFLAGS',# a number of derived variables. These need to be# patched up as well.'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):flags = _CONFIG_VARS[key]flags = re.sub('-arch\s+\w+\s', ' ', flags)flags = flags + ' ' + arch_CONFIG_VARS[key] = flags# If we're on OSX 10.5 or later and the user tries to# compiles an extension using an SDK that is not present# on the current machine it is better to not use an SDK# than to fail.## The major usecase for this is users using a Python.org# binary installer on OSX 10.6: that installer uses# the 10.4u SDK, but that SDK is not installed by default# when you install Xcode.#CFLAGS = _CONFIG_VARS.get('CFLAGS', '')m = re.search('-isysroot\s+(\S+)', CFLAGS)if m is not None:sdk = m.group(1)if not os.path.exists(sdk):for key in ('LDFLAGS', 'BASECFLAGS',# a number of derived variables. These need to be# patched up as well.'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):flags = _CONFIG_VARS[key]flags = re.sub('-isysroot\s+\S+(\s|$)', ' ', flags)_CONFIG_VARS[key] = flagsif args:vals = []for name in args:vals.append(_CONFIG_VARS.get(name))return valselse:return _CONFIG_VARSdef get_config_var(name):"""Return the value of a single variable using the dictionary returned by'get_config_vars()'.Equivalent to get_config_vars().get(name)"""return get_config_vars().get(name)def get_platform():"""Return a string that identifies the current platform.This is used mainly to distinguish platform-specific build directories andplatform-specific built distributions. Typically includes the OS nameand version and the architecture (as supplied by 'os.uname()'),although the exact information included depends on the OS; eg. for IRIXthe architecture isn't particularly important (IRIX only runs on SGIhardware), but for Linux the kernel version isn't particularlyimportant.Examples of returned values:linux-i586linux-alpha (?)solaris-2.6-sun4uirix-5.3irix64-6.2Windows will return one of:win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)win-ia64 (64bit Windows on Itanium)win32 (all others - specifically, sys.platform is returned)For other non-POSIX platforms, currently just returns 'sys.platform'."""if os.name == 'nt':# sniff sys.version for architecture.prefix = " bit ("i = sys.version.find(prefix)if i == -1:return sys.platformj = sys.version.find(")", i)look = sys.version[i+len(prefix):j].lower()if look == 'amd64':return 'win-amd64'if look == 'itanium':return 'win-ia64'return sys.platformif os.name != "posix" or not hasattr(os, 'uname'):# XXX what about the architecture? NT is Intel or Alpha,# Mac OS is M68k or PPC, etc.return sys.platform# Try to distinguish various flavours of Unixosname, host, release, version, machine = os.uname()# Convert the OS name to lowercase, remove '/' characters# (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")osname = osname.lower().replace('/', '')machine = machine.replace(' ', '_')machine = machine.replace('/', '-')if osname[:5] == "linux":# At least on Linux/Intel, 'machine' is the processor --# i386, etc.# XXX what about Alpha, SPARC, etc?return "%s-%s" % (osname, machine)elif osname[:5] == "sunos":if release[0] >= "5": # SunOS 5 == Solaris 2osname = "solaris"release = "%d.%s" % (int(release[0]) - 3, release[2:])# fall through to standard osname-release-machine representationelif osname[:4] == "irix": # could be "irix64"!return "%s-%s" % (osname, release)elif osname[:3] == "aix":return "%s-%s.%s" % (osname, version, release)elif osname[:6] == "cygwin":osname = "cygwin"rel_re = re.compile(r'[\d.]+')m = rel_re.match(release)if m:release = m.group()elif osname[:6] == "darwin":## For our purposes, we'll assume that the system version from# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set# to. This makes the compatibility story a bit more sane because the# machine is going to compile and link as if it were# MACOSX_DEPLOYMENT_TARGET.cfgvars = get_config_vars()macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')if True:# Always calculate the release of the running machine,# needed to determine if we can build fat binaries or not.macrelease = macver# Get the system version. Reading this plist is a documented# way to get the system version (see the documentation for# the Gestalt Manager)try:f = open('/System/Library/CoreServices/SystemVersion.plist')except IOError:# We're on a plain darwin box, fall back to the default# behaviour.passelse:try:m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'r'<string>(.*?)</string>', f.read())finally:f.close()if m is not None:macrelease = '.'.join(m.group(1).split('.')[:2])# else: fall back to the default behaviourif not macver:macver = macreleaseif macver:release = macverosname = "macosx"if ((macrelease + '.') >= '10.4.' and'-arch' in get_config_vars().get('CFLAGS', '').strip()):# The universal build will build fat binaries, but not on# systems before 10.4## Try to detect 4-way universal builds, those have machine-type# 'universal' instead of 'fat'.machine = 'fat'cflags = get_config_vars().get('CFLAGS')archs = re.findall('-arch\s+(\S+)', cflags)archs = tuple(sorted(set(archs)))if len(archs) == 1:machine = archs[0]elif archs == ('i386', 'ppc'):machine = 'fat'elif archs == ('i386', 'x86_64'):machine = 'intel'elif archs == ('i386', 'ppc', 'x86_64'):machine = 'fat3'elif archs == ('ppc64', 'x86_64'):machine = 'fat64'elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):machine = 'universal'else:raise ValueError("Don't know machine value for archs=%r" % (archs,))elif machine == 'i386':# On OSX the machine type returned by uname is always the# 32-bit variant, even if the executable architecture is# the 64-bit variantif sys.maxsize >= 2**32:machine = 'x86_64'elif machine in ('PowerPC', 'Power_Macintosh'):# Pick a sane name for the PPC architecture.# See 'i386' caseif sys.maxsize >= 2**32:machine = 'ppc64'else:machine = 'ppc'return "%s-%s-%s" % (osname, release, machine)def get_python_version():return _PY_VERSION_SHORTdef _print_dict(title, data):for index, (key, value) in enumerate(sorted(data.items())):if index == 0:print('%s: ' % (title))print('\t%s = "%s"' % (key, value))def _main():"""Display all information sysconfig detains."""print('Platform: "%s"' % get_platform())print('Python version: "%s"' % get_python_version())print('Current installation scheme: "%s"' % _get_default_scheme())print()_print_dict('Paths', get_paths())print()_print_dict('Variables', get_config_vars())if __name__ == '__main__':_main()
[posix_prefix]# Configuration directories. Some of these come straight out of the# configure script. They are for implementing the other variables, not to# be used directly in [resource_locations].confdir = /etcdatadir = /usr/sharelibdir = /usr/libstatedir = /var# User resource directorylocal = ~/.local/{distribution.name}stdlib = {base}/lib/python{py_version_short}platstdlib = {platbase}/lib/python{py_version_short}purelib = {base}/lib/python{py_version_short}/site-packagesplatlib = {platbase}/lib/python{py_version_short}/site-packagesinclude = {base}/include/python{py_version_short}{abiflags}platinclude = {platbase}/include/python{py_version_short}{abiflags}data = {base}[posix_home]stdlib = {base}/lib/pythonplatstdlib = {base}/lib/pythonpurelib = {base}/lib/pythonplatlib = {base}/lib/pythoninclude = {base}/include/pythonplatinclude = {base}/include/pythonscripts = {base}/bindata = {base}[nt]stdlib = {base}/Libplatstdlib = {base}/Libpurelib = {base}/Lib/site-packagesplatlib = {base}/Lib/site-packagesinclude = {base}/Includeplatinclude = {base}/Includescripts = {base}/Scriptsdata = {base}[os2]stdlib = {base}/Libplatstdlib = {base}/Libpurelib = {base}/Lib/site-packagesplatlib = {base}/Lib/site-packagesinclude = {base}/Includeplatinclude = {base}/Includescripts = {base}/Scriptsdata = {base}[os2_home]stdlib = {userbase}/lib/python{py_version_short}platstdlib = {userbase}/lib/python{py_version_short}purelib = {userbase}/lib/python{py_version_short}/site-packagesplatlib = {userbase}/lib/python{py_version_short}/site-packagesinclude = {userbase}/include/python{py_version_short}scripts = {userbase}/bindata = {userbase}[nt_user]stdlib = {userbase}/Python{py_version_nodot}platstdlib = {userbase}/Python{py_version_nodot}purelib = {userbase}/Python{py_version_nodot}/site-packagesplatlib = {userbase}/Python{py_version_nodot}/site-packagesinclude = {userbase}/Python{py_version_nodot}/Includescripts = {userbase}/Scriptsdata = {userbase}[posix_user]stdlib = {userbase}/lib/python{py_version_short}platstdlib = {userbase}/lib/python{py_version_short}purelib = {userbase}/lib/python{py_version_short}/site-packagesplatlib = {userbase}/lib/python{py_version_short}/site-packagesinclude = {userbase}/include/python{py_version_short}scripts = {userbase}/bindata = {userbase}[osx_framework_user]stdlib = {userbase}/lib/pythonplatstdlib = {userbase}/lib/pythonpurelib = {userbase}/lib/python/site-packagesplatlib = {userbase}/lib/python/site-packagesinclude = {userbase}/includescripts = {userbase}/bindata = {userbase}
# -*- coding: utf-8 -*-## Copyright (C) 2012 The Python Software Foundation.# See LICENSE.txt and CONTRIBUTORS.txt.#"""Utility functions for copying and archiving files and directory trees.XXX The functions here don't copy the resource fork or other metadata on Mac."""import osimport sysimport statfrom os.path import abspathimport fnmatchimport collectionsimport errnofrom . import tarfiletry:import bz2_BZ2_SUPPORTED = Trueexcept ImportError:_BZ2_SUPPORTED = Falsetry:from pwd import getpwnamexcept ImportError:getpwnam = Nonetry:from grp import getgrnamexcept ImportError:getgrnam = None__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2","copytree", "move", "rmtree", "Error", "SpecialFileError","ExecError", "make_archive", "get_archive_formats","register_archive_format", "unregister_archive_format","get_unpack_formats", "register_unpack_format","unregister_unpack_format", "unpack_archive", "ignore_patterns"]class Error(EnvironmentError):passclass SpecialFileError(EnvironmentError):"""Raised when trying to do a kind of operation (e.g. copying) which isnot supported on a special file (e.g. a named pipe)"""class ExecError(EnvironmentError):"""Raised when a command could not be executed"""class ReadError(EnvironmentError):"""Raised when an archive cannot be read"""class RegistryError(Exception):"""Raised when a registry operation with the archivingand unpacking registries fails"""try:WindowsErrorexcept NameError:WindowsError = Nonedef copyfileobj(fsrc, fdst, length=16*1024):"""copy data from file-like object fsrc to file-like object fdst"""while 1:buf = fsrc.read(length)if not buf:breakfdst.write(buf)def _samefile(src, dst):# Macintosh, Unix.if hasattr(os.path, 'samefile'):try:return os.path.samefile(src, dst)except OSError:return False# All other platforms: check for same pathname.return (os.path.normcase(os.path.abspath(src)) ==os.path.normcase(os.path.abspath(dst)))def copyfile(src, dst):"""Copy data from src to dst"""if _samefile(src, dst):raise Error("`%s` and `%s` are the same file" % (src, dst))for fn in [src, dst]:try:st = os.stat(fn)except OSError:# File most likely does not existpasselse:# XXX What about other special files? (sockets, devices...)if stat.S_ISFIFO(st.st_mode):raise SpecialFileError("`%s` is a named pipe" % fn)with open(src, 'rb') as fsrc:with open(dst, 'wb') as fdst:copyfileobj(fsrc, fdst)def copymode(src, dst):"""Copy mode bits from src to dst"""if hasattr(os, 'chmod'):st = os.stat(src)mode = stat.S_IMODE(st.st_mode)os.chmod(dst, mode)def copystat(src, dst):"""Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""st = os.stat(src)mode = stat.S_IMODE(st.st_mode)if hasattr(os, 'utime'):os.utime(dst, (st.st_atime, st.st_mtime))if hasattr(os, 'chmod'):os.chmod(dst, mode)if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):try:os.chflags(dst, st.st_flags)except OSError as why:if (not hasattr(errno, 'EOPNOTSUPP') orwhy.errno != errno.EOPNOTSUPP):raisedef copy(src, dst):"""Copy data and mode bits ("cp src dst").The destination may be a directory."""if os.path.isdir(dst):dst = os.path.join(dst, os.path.basename(src))copyfile(src, dst)copymode(src, dst)def copy2(src, dst):"""Copy data and all stat info ("cp -p src dst").The destination may be a directory."""if os.path.isdir(dst):dst = os.path.join(dst, os.path.basename(src))copyfile(src, dst)copystat(src, dst)def ignore_patterns(*patterns):"""Function that can be used as copytree() ignore parameter.Patterns is a sequence of glob-style patternsthat are used to exclude files"""def _ignore_patterns(path, names):ignored_names = []for pattern in patterns:ignored_names.extend(fnmatch.filter(names, pattern))return set(ignored_names)return _ignore_patternsdef copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,ignore_dangling_symlinks=False):"""Recursively copy a directory tree.The destination directory must not already exist.If exception(s) occur, an Error is raised with a list of reasons.If the optional symlinks flag is true, symbolic links in thesource tree result in symbolic links in the destination tree; ifit is false, the contents of the files pointed to by symboliclinks are copied. If the file pointed by the symlink doesn'texist, an exception will be added in the list of errors raised inan Error exception at the end of the copy process.You can set the optional ignore_dangling_symlinks flag to true if youwant to silence this exception. Notice that this has no effect onplatforms that don't support os.symlink.The optional ignore argument is a callable. If given, itis called with the `src` parameter, which is the directorybeing visited by copytree(), and `names` which is the list of`src` contents, as returned by os.listdir():callable(src, names) -> ignored_namesSince copytree() is called recursively, the callable will becalled once for each directory that is copied. It returns alist of names relative to the `src` directory that shouldnot be copied.The optional copy_function argument is a callable that will be usedto copy each file. It will be called with the source path and thedestination path as arguments. By default, copy2() is used, but anyfunction that supports the same signature (like copy()) can be used."""names = os.listdir(src)if ignore is not None:ignored_names = ignore(src, names)else:ignored_names = set()os.makedirs(dst)errors = []for name in names:if name in ignored_names:continuesrcname = os.path.join(src, name)dstname = os.path.join(dst, name)try:if os.path.islink(srcname):linkto = os.readlink(srcname)if symlinks:os.symlink(linkto, dstname)else:# ignore dangling symlink if the flag is onif not os.path.exists(linkto) and ignore_dangling_symlinks:continue# otherwise let the copy occurs. copy2 will raise an errorcopy_function(srcname, dstname)elif os.path.isdir(srcname):copytree(srcname, dstname, symlinks, ignore, copy_function)else:# Will raise a SpecialFileError for unsupported file typescopy_function(srcname, dstname)# catch the Error from the recursive copytree so that we can# continue with other filesexcept Error as err:errors.extend(err.args[0])except EnvironmentError as why:errors.append((srcname, dstname, str(why)))try:copystat(src, dst)except OSError as why:if WindowsError is not None and isinstance(why, WindowsError):# Copying file access times may fail on Windowspasselse:errors.extend((src, dst, str(why)))if errors:raise Error(errors)def rmtree(path, ignore_errors=False, onerror=None):"""Recursively delete a directory tree.If ignore_errors is set, errors are ignored; otherwise, if onerroris set, it is called to handle the error with arguments (func,path, exc_info) where func is os.listdir, os.remove, or os.rmdir;path is the argument to that function that caused it to fail; andexc_info is a tuple returned by sys.exc_info(). If ignore_errorsis false and onerror is None, an exception is raised."""if ignore_errors:def onerror(*args):passelif onerror is None:def onerror(*args):raisetry:if os.path.islink(path):# symlinks to directories are forbidden, see bug #1669raise OSError("Cannot call rmtree on a symbolic link")except OSError:onerror(os.path.islink, path, sys.exc_info())# can't continue even if onerror hook returnsreturnnames = []try:names = os.listdir(path)except os.error:onerror(os.listdir, path, sys.exc_info())for name in names:fullname = os.path.join(path, name)try:mode = os.lstat(fullname).st_modeexcept os.error:mode = 0if stat.S_ISDIR(mode):rmtree(fullname, ignore_errors, onerror)else:try:os.remove(fullname)except os.error:onerror(os.remove, fullname, sys.exc_info())try:os.rmdir(path)except os.error:onerror(os.rmdir, path, sys.exc_info())def _basename(path):# A basename() variant which first strips the trailing slash, if present.# Thus we always get the last component of the path, even for directories.return os.path.basename(path.rstrip(os.path.sep))def move(src, dst):"""Recursively move a file or directory to another location. This issimilar to the Unix "mv" command.If the destination is a directory or a symlink to a directory, the sourceis moved inside the directory. The destination path must not alreadyexist.If the destination already exists but is not a directory, it may beoverwritten depending on os.rename() semantics.If the destination is on our current filesystem, then rename() is used.Otherwise, src is copied to the destination and then removed.A lot more could be done here... A look at a mv.c shows a lot ofthe issues this implementation glosses over."""real_dst = dstif os.path.isdir(dst):if _samefile(src, dst):# We might be on a case insensitive filesystem,# perform the rename anyway.os.rename(src, dst)returnreal_dst = os.path.join(dst, _basename(src))if os.path.exists(real_dst):raise Error("Destination path '%s' already exists" % real_dst)try:os.rename(src, real_dst)except OSError:if os.path.isdir(src):if _destinsrc(src, dst):raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))copytree(src, real_dst, symlinks=True)rmtree(src)else:copy2(src, real_dst)os.unlink(src)def _destinsrc(src, dst):src = abspath(src)dst = abspath(dst)if not src.endswith(os.path.sep):src += os.path.sepif not dst.endswith(os.path.sep):dst += os.path.sepreturn dst.startswith(src)def _get_gid(name):"""Returns a gid, given a group name."""if getgrnam is None or name is None:return Nonetry:result = getgrnam(name)except KeyError:result = Noneif result is not None:return result[2]return Nonedef _get_uid(name):"""Returns an uid, given a user name."""if getpwnam is None or name is None:return Nonetry:result = getpwnam(name)except KeyError:result = Noneif result is not None:return result[2]return Nonedef _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,owner=None, group=None, logger=None):"""Create a (possibly compressed) tar file from all the files under'base_dir'.'compress' must be "gzip" (the default), "bzip2", or None.'owner' and 'group' can be used to define an owner and a group for thearchive that is being built. If not provided, the current owner and groupwill be used.The output tar file will be named 'base_name' + ".tar", possibly plusthe appropriate compression extension (".gz", or ".bz2").Returns the output filename."""tar_compression = {'gzip': 'gz', None: ''}compress_ext = {'gzip': '.gz'}if _BZ2_SUPPORTED:tar_compression['bzip2'] = 'bz2'compress_ext['bzip2'] = '.bz2'# flags for compression program, each element of list will be an argumentif compress is not None and compress not in compress_ext:raise ValueError("bad value for 'compress', or compression format not ""supported : {0}".format(compress))archive_name = base_name + '.tar' + compress_ext.get(compress, '')archive_dir = os.path.dirname(archive_name)if not os.path.exists(archive_dir):if logger is not None:logger.info("creating %s", archive_dir)if not dry_run:os.makedirs(archive_dir)# creating the tarballif logger is not None:logger.info('Creating tar archive')uid = _get_uid(owner)gid = _get_gid(group)def _set_uid_gid(tarinfo):if gid is not None:tarinfo.gid = gidtarinfo.gname = groupif uid is not None:tarinfo.uid = uidtarinfo.uname = ownerreturn tarinfoif not dry_run:tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])try:tar.add(base_dir, filter=_set_uid_gid)finally:tar.close()return archive_namedef _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False):# XXX see if we want to keep an external call hereif verbose:zipoptions = "-r"else:zipoptions = "-rq"from distutils.errors import DistutilsExecErrorfrom distutils.spawn import spawntry:spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)except DistutilsExecError:# XXX really should distinguish between "couldn't find# external 'zip' command" and "zip failed".raise ExecError("unable to create zip file '%s': ""could neither import the 'zipfile' module nor ""find a standalone zip utility") % zip_filenamedef _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):"""Create a zip file from all the files under 'base_dir'.The output zip file will be named 'base_name' + ".zip". Uses either the"zipfile" Python module (if available) or the InfoZIP "zip" utility(if installed and found on the default search path). If neither tool isavailable, raises ExecError. Returns the name of the output zipfile."""zip_filename = base_name + ".zip"archive_dir = os.path.dirname(base_name)if not os.path.exists(archive_dir):if logger is not None:logger.info("creating %s", archive_dir)if not dry_run:os.makedirs(archive_dir)# If zipfile module is not available, try spawning an external 'zip'# command.try:import zipfileexcept ImportError:zipfile = Noneif zipfile is None:_call_external_zip(base_dir, zip_filename, verbose, dry_run)else:if logger is not None:logger.info("creating '%s' and adding '%s' to it",zip_filename, base_dir)if not dry_run:zip = zipfile.ZipFile(zip_filename, "w",compression=zipfile.ZIP_DEFLATED)for dirpath, dirnames, filenames in os.walk(base_dir):for name in filenames:path = os.path.normpath(os.path.join(dirpath, name))if os.path.isfile(path):zip.write(path, path)if logger is not None:logger.info("adding '%s'", path)zip.close()return zip_filename_ARCHIVE_FORMATS = {'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"),'zip': (_make_zipfile, [], "ZIP file"),}if _BZ2_SUPPORTED:_ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],"bzip2'ed tar-file")def get_archive_formats():"""Returns a list of supported formats for archiving and unarchiving.Each element of the returned sequence is a tuple (name, description)"""formats = [(name, registry[2]) for name, registry in_ARCHIVE_FORMATS.items()]formats.sort()return formatsdef register_archive_format(name, function, extra_args=None, description=''):"""Registers an archive format.name is the name of the format. function is the callable that will beused to create archives. If provided, extra_args is a sequence of(name, value) tuples that will be passed as arguments to the callable.description can be provided to describe the format, and will be returnedby the get_archive_formats() function."""if extra_args is None:extra_args = []if not isinstance(function, collections.Callable):raise TypeError('The %s object is not callable' % function)if not isinstance(extra_args, (tuple, list)):raise TypeError('extra_args needs to be a sequence')for element in extra_args:if not isinstance(element, (tuple, list)) or len(element) !=2:raise TypeError('extra_args elements are : (arg_name, value)')_ARCHIVE_FORMATS[name] = (function, extra_args, description)def unregister_archive_format(name):del _ARCHIVE_FORMATS[name]def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,dry_run=0, owner=None, group=None, logger=None):"""Create an archive file (eg. zip or tar).'base_name' is the name of the file to create, minus any format-specificextension; 'format' is the archive format: one of "zip", "tar", "bztar"or "gztar".'root_dir' is a directory that will be the root directory of thearchive; ie. we typically chdir into 'root_dir' before creating thearchive. 'base_dir' is the directory where we start archiving from;ie. 'base_dir' will be the common prefix of all files anddirectories in the archive. 'root_dir' and 'base_dir' both defaultto the current directory. Returns the name of the archive file.'owner' and 'group' are used when creating a tar archive. By default,uses the current owner and group."""save_cwd = os.getcwd()if root_dir is not None:if logger is not None:logger.debug("changing into '%s'", root_dir)base_name = os.path.abspath(base_name)if not dry_run:os.chdir(root_dir)if base_dir is None:base_dir = os.curdirkwargs = {'dry_run': dry_run, 'logger': logger}try:format_info = _ARCHIVE_FORMATS[format]except KeyError:raise ValueError("unknown archive format '%s'" % format)func = format_info[0]for arg, val in format_info[1]:kwargs[arg] = valif format != 'zip':kwargs['owner'] = ownerkwargs['group'] = grouptry:filename = func(base_name, base_dir, **kwargs)finally:if root_dir is not None:if logger is not None:logger.debug("changing back to '%s'", save_cwd)os.chdir(save_cwd)return filenamedef get_unpack_formats():"""Returns a list of supported formats for unpacking.Each element of the returned sequence is a tuple(name, extensions, description)"""formats = [(name, info[0], info[3]) for name, info in_UNPACK_FORMATS.items()]formats.sort()return formatsdef _check_unpack_options(extensions, function, extra_args):"""Checks what gets registered as an unpacker."""# first make sure no other unpacker is registered for this extensionexisting_extensions = {}for name, info in _UNPACK_FORMATS.items():for ext in info[0]:existing_extensions[ext] = namefor extension in extensions:if extension in existing_extensions:msg = '%s is already registered for "%s"'raise RegistryError(msg % (extension,existing_extensions[extension]))if not isinstance(function, collections.Callable):raise TypeError('The registered function must be a callable')def register_unpack_format(name, extensions, function, extra_args=None,description=''):"""Registers an unpack format.`name` is the name of the format. `extensions` is a list of extensionscorresponding to the format.`function` is the callable that will beused to unpack archives. The callable will receive archives to unpack.If it's unable to handle an archive, it needs to raise a ReadErrorexception.If provided, `extra_args` is a sequence of(name, value) tuples that will be passed as arguments to the callable.description can be provided to describe the format, and will be returnedby the get_unpack_formats() function."""if extra_args is None:extra_args = []_check_unpack_options(extensions, function, extra_args)_UNPACK_FORMATS[name] = extensions, function, extra_args, descriptiondef unregister_unpack_format(name):"""Removes the pack format from the registry."""del _UNPACK_FORMATS[name]def _ensure_directory(path):"""Ensure that the parent directory of `path` exists"""dirname = os.path.dirname(path)if not os.path.isdir(dirname):os.makedirs(dirname)def _unpack_zipfile(filename, extract_dir):"""Unpack zip `filename` to `extract_dir`"""try:import zipfileexcept ImportError:raise ReadError('zlib not supported, cannot unpack this archive.')if not zipfile.is_zipfile(filename):raise ReadError("%s is not a zip file" % filename)zip = zipfile.ZipFile(filename)try:for info in zip.infolist():name = info.filename# don't extract absolute paths or ones with .. in themif name.startswith('/') or '..' in name:continuetarget = os.path.join(extract_dir, *name.split('/'))if not target:continue_ensure_directory(target)if not name.endswith('/'):# filedata = zip.read(info.filename)f = open(target, 'wb')try:f.write(data)finally:f.close()del datafinally:zip.close()def _unpack_tarfile(filename, extract_dir):"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`"""try:tarobj = tarfile.open(filename)except tarfile.TarError:raise ReadError("%s is not a compressed or uncompressed tar file" % filename)try:tarobj.extractall(extract_dir)finally:tarobj.close()_UNPACK_FORMATS = {'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),'zip': (['.zip'], _unpack_zipfile, [], "ZIP file")}if _BZ2_SUPPORTED:_UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],"bzip2'ed tar-file")def _find_unpack_format(filename):for name, info in _UNPACK_FORMATS.items():for extension in info[0]:if filename.endswith(extension):return namereturn Nonedef unpack_archive(filename, extract_dir=None, format=None):"""Unpack an archive.`filename` is the name of the archive.`extract_dir` is the name of the target directory, where the archiveis unpacked. If not provided, the current working directory is used.`format` is the archive format: one of "zip", "tar", or "gztar". Or anyother registered format. If not provided, unpack_archive will use thefilename extension and see if an unpacker was registered for thatextension.In case none is found, a ValueError is raised."""if extract_dir is None:extract_dir = os.getcwd()if format is not None:try:format_info = _UNPACK_FORMATS[format]except KeyError:raise ValueError("Unknown unpack format '{0}'".format(format))func = format_info[1]func(filename, extract_dir, **dict(format_info[2]))else:# we need to look at the registered unpackers supported extensionsformat = _find_unpack_format(filename)if format is None:raise ReadError("Unknown archive format '{0}'".format(filename))func = _UNPACK_FORMATS[format][1]kwargs = dict(_UNPACK_FORMATS[format][2])func(filename, extract_dir, **kwargs)
# -*- coding: utf-8 -*-## Copyright (C) 2012 The Python Software Foundation.# See LICENSE.txt and CONTRIBUTORS.txt.#"""Backports for individual classes and functions."""import osimport sys__all__ = ['cache_from_source', 'callable', 'fsencode']try:from imp import cache_from_sourceexcept ImportError:def cache_from_source(py_file, debug=__debug__):ext = debug and 'c' or 'o'return py_file + exttry:callable = callableexcept NameError:from collections import Callabledef callable(obj):return isinstance(obj, Callable)try:fsencode = os.fsencodeexcept AttributeError:def fsencode(filename):if isinstance(filename, bytes):return filenameelif isinstance(filename, str):return filename.encode(sys.getfilesystemencoding())else:raise TypeError("expect bytes or str, not %s" %type(filename).__name__)
"""Modules copied from Python 3 standard libraries, for internal use only.Individual classes and functions are found in d2._backport.misc. Intendedusage is to always import things missing from 3.1 from that module: thebuilt-in/stdlib objects will be used if found."""
# -*- coding: utf-8 -*-## Copyright (C) 2012-2016 Vinay Sajip.# Licensed to the Python Software Foundation under a contributor agreement.# See LICENSE.txt and CONTRIBUTORS.txt.#import logging__version__ = '0.2.4'class DistlibException(Exception):passtry:from logging import NullHandlerexcept ImportError: # pragma: no coverclass NullHandler(logging.Handler):def handle(self, record): passdef emit(self, record): passdef createLock(self): self.lock = Nonelogger = logging.getLogger(__name__)logger.addHandler(NullHandler())
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.from . import win32# from wincon.hclass WinColor(object):BLACK = 0BLUE = 1GREEN = 2CYAN = 3RED = 4MAGENTA = 5YELLOW = 6GREY = 7# from wincon.hclass WinStyle(object):NORMAL = 0x00 # dim text, dim backgroundBRIGHT = 0x08 # bright text, dim backgroundBRIGHT_BACKGROUND = 0x80 # dim text, bright backgroundclass WinTerm(object):def __init__(self):self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributesself.set_attrs(self._default)self._default_fore = self._foreself._default_back = self._backself._default_style = self._style# In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style.# So that LIGHT_EX colors and BRIGHT style do not clobber each other,# we track them separately, since LIGHT_EX is overwritten by Fore/Back# and BRIGHT is overwritten by Style codes.self._light = 0def get_attrs(self):return self._fore + self._back * 16 + (self._style | self._light)def set_attrs(self, value):self._fore = value & 7self._back = (value >> 4) & 7self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)def reset_all(self, on_stderr=None):self.set_attrs(self._default)self.set_console(attrs=self._default)def fore(self, fore=None, light=False, on_stderr=False):if fore is None:fore = self._default_foreself._fore = fore# Emulate LIGHT_EX with BRIGHT Styleif light:self._light |= WinStyle.BRIGHTelse:self._light &= ~WinStyle.BRIGHTself.set_console(on_stderr=on_stderr)def back(self, back=None, light=False, on_stderr=False):if back is None:back = self._default_backself._back = back# Emulate LIGHT_EX with BRIGHT_BACKGROUND Styleif light:self._light |= WinStyle.BRIGHT_BACKGROUNDelse:self._light &= ~WinStyle.BRIGHT_BACKGROUNDself.set_console(on_stderr=on_stderr)def style(self, style=None, on_stderr=False):if style is None:style = self._default_styleself._style = styleself.set_console(on_stderr=on_stderr)def set_console(self, attrs=None, on_stderr=False):if attrs is None:attrs = self.get_attrs()handle = win32.STDOUTif on_stderr:handle = win32.STDERRwin32.SetConsoleTextAttribute(handle, attrs)def get_position(self, handle):position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition# Because Windows coordinates are 0-based,# and win32.SetConsoleCursorPosition expects 1-based.position.X += 1position.Y += 1return positiondef set_cursor_position(self, position=None, on_stderr=False):if position is None:# I'm not currently tracking the position, so there is no default.# position = self.get_position()returnhandle = win32.STDOUTif on_stderr:handle = win32.STDERRwin32.SetConsoleCursorPosition(handle, position)def cursor_adjust(self, x, y, on_stderr=False):handle = win32.STDOUTif on_stderr:handle = win32.STDERRposition = self.get_position(handle)adjusted_position = (position.Y + y, position.X + x)win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)def erase_screen(self, mode=0, on_stderr=False):# 0 should clear from the cursor to the end of the screen.# 1 should clear from the cursor to the beginning of the screen.# 2 should clear the entire screen, and move cursor to (1,1)handle = win32.STDOUTif on_stderr:handle = win32.STDERRcsbi = win32.GetConsoleScreenBufferInfo(handle)# get the number of character cells in the current buffercells_in_screen = csbi.dwSize.X * csbi.dwSize.Y# get number of character cells before current cursor positioncells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.Xif mode == 0:from_coord = csbi.dwCursorPositioncells_to_erase = cells_in_screen - cells_before_cursorif mode == 1:from_coord = win32.COORD(0, 0)cells_to_erase = cells_before_cursorelif mode == 2:from_coord = win32.COORD(0, 0)cells_to_erase = cells_in_screen# fill the entire screen with blankswin32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)# now set the buffer's attributes accordinglywin32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)if mode == 2:# put the cursor where neededwin32.SetConsoleCursorPosition(handle, (1, 1))def erase_line(self, mode=0, on_stderr=False):# 0 should clear from the cursor to the end of the line.# 1 should clear from the cursor to the beginning of the line.# 2 should clear the entire line.handle = win32.STDOUTif on_stderr:handle = win32.STDERRcsbi = win32.GetConsoleScreenBufferInfo(handle)if mode == 0:from_coord = csbi.dwCursorPositioncells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.Xif mode == 1:from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)cells_to_erase = csbi.dwCursorPosition.Xelif mode == 2:from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)cells_to_erase = csbi.dwSize.X# fill the entire screen with blankswin32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)# now set the buffer's attributes accordinglywin32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)def set_title(self, title):win32.SetConsoleTitle(title)
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.# from winbase.hSTDOUT = -11STDERR = -12try:import ctypesfrom ctypes import LibraryLoaderwindll = LibraryLoader(ctypes.WinDLL)from ctypes import wintypesexcept (AttributeError, ImportError):windll = NoneSetConsoleTextAttribute = lambda *_: Nonewinapi_test = lambda *_: Noneelse:from ctypes import byref, Structure, c_char, POINTERCOORD = wintypes._COORDclass CONSOLE_SCREEN_BUFFER_INFO(Structure):"""struct in wincon.h."""_fields_ = [("dwSize", COORD),("dwCursorPosition", COORD),("wAttributes", wintypes.WORD),("srWindow", wintypes.SMALL_RECT),("dwMaximumWindowSize", COORD),]def __str__(self):return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % (self.dwSize.Y, self.dwSize.X, self.dwCursorPosition.Y, self.dwCursorPosition.X, self.wAttributes, self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right, self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X)_GetStdHandle = windll.kernel32.GetStdHandle_GetStdHandle.argtypes = [wintypes.DWORD,]_GetStdHandle.restype = wintypes.HANDLE_GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo_GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,POINTER(CONSOLE_SCREEN_BUFFER_INFO),]_GetConsoleScreenBufferInfo.restype = wintypes.BOOL_SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute_SetConsoleTextAttribute.argtypes = [wintypes.HANDLE,wintypes.WORD,]_SetConsoleTextAttribute.restype = wintypes.BOOL_SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition_SetConsoleCursorPosition.argtypes = [wintypes.HANDLE,COORD,]_SetConsoleCursorPosition.restype = wintypes.BOOL_FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA_FillConsoleOutputCharacterA.argtypes = [wintypes.HANDLE,c_char,wintypes.DWORD,COORD,POINTER(wintypes.DWORD),]_FillConsoleOutputCharacterA.restype = wintypes.BOOL_FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute_FillConsoleOutputAttribute.argtypes = [wintypes.HANDLE,wintypes.WORD,wintypes.DWORD,COORD,POINTER(wintypes.DWORD),]_FillConsoleOutputAttribute.restype = wintypes.BOOL_SetConsoleTitleW = windll.kernel32.SetConsoleTitleA_SetConsoleTitleW.argtypes = [wintypes.LPCSTR]_SetConsoleTitleW.restype = wintypes.BOOLhandles = {STDOUT: _GetStdHandle(STDOUT),STDERR: _GetStdHandle(STDERR),}def winapi_test():handle = handles[STDOUT]csbi = CONSOLE_SCREEN_BUFFER_INFO()success = _GetConsoleScreenBufferInfo(handle, byref(csbi))return bool(success)def GetConsoleScreenBufferInfo(stream_id=STDOUT):handle = handles[stream_id]csbi = CONSOLE_SCREEN_BUFFER_INFO()success = _GetConsoleScreenBufferInfo(handle, byref(csbi))return csbidef SetConsoleTextAttribute(stream_id, attrs):handle = handles[stream_id]return _SetConsoleTextAttribute(handle, attrs)def SetConsoleCursorPosition(stream_id, position, adjust=True):position = COORD(*position)# If the position is out of range, do nothing.if position.Y <= 0 or position.X <= 0:return# Adjust for Windows' SetConsoleCursorPosition:# 1. being 0-based, while ANSI is 1-based.# 2. expecting (x,y), while ANSI uses (y,x).adjusted_position = COORD(position.Y - 1, position.X - 1)if adjust:# Adjust for viewport's scroll positionsr = GetConsoleScreenBufferInfo(STDOUT).srWindowadjusted_position.Y += sr.Topadjusted_position.X += sr.Left# Resume normal processinghandle = handles[stream_id]return _SetConsoleCursorPosition(handle, adjusted_position)def FillConsoleOutputCharacter(stream_id, char, length, start):handle = handles[stream_id]char = c_char(char.encode())length = wintypes.DWORD(length)num_written = wintypes.DWORD(0)# Note that this is hard-coded for ANSI (vs wide) bytes.success = _FillConsoleOutputCharacterA(handle, char, length, start, byref(num_written))return num_written.valuedef FillConsoleOutputAttribute(stream_id, attr, length, start):''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )'''handle = handles[stream_id]attribute = wintypes.WORD(attr)length = wintypes.DWORD(length)num_written = wintypes.DWORD(0)# Note that this is hard-coded for ANSI (vs wide) bytes.return _FillConsoleOutputAttribute(handle, attribute, length, start, byref(num_written))def SetConsoleTitle(title):return _SetConsoleTitleW(title)
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.import atexitimport contextlibimport sysfrom .ansitowin32 import AnsiToWin32orig_stdout = Noneorig_stderr = Nonewrapped_stdout = Nonewrapped_stderr = Noneatexit_done = Falsedef reset_all():if AnsiToWin32 is not None: # Issue #74: objects might become None at exitAnsiToWin32(orig_stdout).reset_all()def init(autoreset=False, convert=None, strip=None, wrap=True):if not wrap and any([autoreset, convert, strip]):raise ValueError('wrap=False conflicts with any other arg=True')global wrapped_stdout, wrapped_stderrglobal orig_stdout, orig_stderrorig_stdout = sys.stdoutorig_stderr = sys.stderrif sys.stdout is None:wrapped_stdout = Noneelse:sys.stdout = wrapped_stdout = \wrap_stream(orig_stdout, convert, strip, autoreset, wrap)if sys.stderr is None:wrapped_stderr = Noneelse:sys.stderr = wrapped_stderr = \wrap_stream(orig_stderr, convert, strip, autoreset, wrap)global atexit_doneif not atexit_done:atexit.register(reset_all)atexit_done = Truedef deinit():if orig_stdout is not None:sys.stdout = orig_stdoutif orig_stderr is not None:sys.stderr = orig_stderr@contextlib.contextmanagerdef colorama_text(*args, **kwargs):init(*args, **kwargs)try:yieldfinally:deinit()def reinit():if wrapped_stdout is not None:sys.stdout = wrapped_stdoutif wrapped_stderr is not None:sys.stderr = wrapped_stderrdef wrap_stream(stream, convert, strip, autoreset, wrap):if wrap:wrapper = AnsiToWin32(stream,convert=convert, strip=strip, autoreset=autoreset)if wrapper.should_wrap():stream = wrapper.streamreturn stream
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.import reimport sysimport osfrom .ansi import AnsiFore, AnsiBack, AnsiStyle, Stylefrom .winterm import WinTerm, WinColor, WinStylefrom .win32 import windll, winapi_testwinterm = Noneif windll is not None:winterm = WinTerm()def is_stream_closed(stream):return not hasattr(stream, 'closed') or stream.closeddef is_a_tty(stream):return hasattr(stream, 'isatty') and stream.isatty()class StreamWrapper(object):'''Wraps a stream (such as stdout), acting as a transparent proxy for allattribute access apart from method 'write()', which is delegated to ourConverter instance.'''def __init__(self, wrapped, converter):# double-underscore everything to prevent clashes with names of# attributes on the wrapped stream object.self.__wrapped = wrappedself.__convertor = converterdef __getattr__(self, name):return getattr(self.__wrapped, name)def write(self, text):self.__convertor.write(text)class AnsiToWin32(object):'''Implements a 'write()' method which, on Windows, will strip ANSI charactersequences from the text, and if outputting to a tty, will convert them intowin32 function calls.'''ANSI_CSI_RE = re.compile('\001?\033\[((?:\d|;)*)([a-zA-Z])\002?') # Control Sequence IntroducerANSI_OSC_RE = re.compile('\001?\033\]((?:.|;)*?)(\x07)\002?') # Operating System Commanddef __init__(self, wrapped, convert=None, strip=None, autoreset=False):# The wrapped stream (normally sys.stdout or sys.stderr)self.wrapped = wrapped# should we reset colors to defaults after every .write()self.autoreset = autoreset# create the proxy wrapping our output streamself.stream = StreamWrapper(wrapped, self)on_windows = os.name == 'nt'# We test if the WinAPI works, because even if we are on Windows# we may be using a terminal that doesn't support the WinAPI# (e.g. Cygwin Terminal). In this case it's up to the terminal# to support the ANSI codes.conversion_supported = on_windows and winapi_test()# should we strip ANSI sequences from our output?if strip is None:strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped))self.strip = strip# should we should convert ANSI sequences into win32 calls?if convert is None:convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped)self.convert = convert# dict of ansi codes to win32 functions and parametersself.win32_calls = self.get_win32_calls()# are we wrapping stderr?self.on_stderr = self.wrapped is sys.stderrdef should_wrap(self):'''True if this class is actually needed. If false, then the outputstream will not be affected, nor will win32 calls be issued, sowrapping stdout is not actually required. This will generally beFalse on non-Windows platforms, unless optional functionality likeautoreset has been requested using kwargs to init()'''return self.convert or self.strip or self.autoresetdef get_win32_calls(self):if self.convert and winterm:return {AnsiStyle.RESET_ALL: (winterm.reset_all, ),AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),AnsiFore.RED: (winterm.fore, WinColor.RED),AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),AnsiFore.WHITE: (winterm.fore, WinColor.GREY),AnsiFore.RESET: (winterm.fore, ),AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),AnsiBack.BLACK: (winterm.back, WinColor.BLACK),AnsiBack.RED: (winterm.back, WinColor.RED),AnsiBack.GREEN: (winterm.back, WinColor.GREEN),AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),AnsiBack.BLUE: (winterm.back, WinColor.BLUE),AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),AnsiBack.CYAN: (winterm.back, WinColor.CYAN),AnsiBack.WHITE: (winterm.back, WinColor.GREY),AnsiBack.RESET: (winterm.back, ),AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),}return dict()def write(self, text):if self.strip or self.convert:self.write_and_convert(text)else:self.wrapped.write(text)self.wrapped.flush()if self.autoreset:self.reset_all()def reset_all(self):if self.convert:self.call_win32('m', (0,))elif not self.strip and not is_stream_closed(self.wrapped):self.wrapped.write(Style.RESET_ALL)def write_and_convert(self, text):'''Write the given text to our wrapped stream, stripping any ANSIsequences from the text, and optionally converting them into win32calls.'''cursor = 0text = self.convert_osc(text)for match in self.ANSI_CSI_RE.finditer(text):start, end = match.span()self.write_plain_text(text, cursor, start)self.convert_ansi(*match.groups())cursor = endself.write_plain_text(text, cursor, len(text))def write_plain_text(self, text, start, end):if start < end:self.wrapped.write(text[start:end])self.wrapped.flush()def convert_ansi(self, paramstring, command):if self.convert:params = self.extract_params(command, paramstring)self.call_win32(command, params)def extract_params(self, command, paramstring):if command in 'Hf':params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))while len(params) < 2:# defaults:params = params + (1,)else:params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)if len(params) == 0:# defaults:if command in 'JKm':params = (0,)elif command in 'ABCD':params = (1,)return paramsdef call_win32(self, command, params):if command == 'm':for param in params:if param in self.win32_calls:func_args = self.win32_calls[param]func = func_args[0]args = func_args[1:]kwargs = dict(on_stderr=self.on_stderr)func(*args, **kwargs)elif command in 'J':winterm.erase_screen(params[0], on_stderr=self.on_stderr)elif command in 'K':winterm.erase_line(params[0], on_stderr=self.on_stderr)elif command in 'Hf': # cursor position - absolutewinterm.set_cursor_position(params, on_stderr=self.on_stderr)elif command in 'ABCD': # cursor position - relativen = params[0]# A - up, B - down, C - forward, D - backx, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)def convert_osc(self, text):for match in self.ANSI_OSC_RE.finditer(text):start, end = match.span()text = text[:start] + text[end:]paramstring, command = match.groups()if command in '\x07': # \x07 = BELparams = paramstring.split(";")# 0 - change title and icon (we will only change title)# 1 - change icon (we don't support this)# 2 - change titleif params[0] in '02':winterm.set_title(params[1])return text
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.'''This module generates ANSI character codes to printing colors to terminals.See: http://en.wikipedia.org/wiki/ANSI_escape_code'''CSI = '\033['OSC = '\033]'BEL = '\007'def code_to_chars(code):return CSI + str(code) + 'm'def set_title(title):return OSC + '2;' + title + BELdef clear_screen(mode=2):return CSI + str(mode) + 'J'def clear_line(mode=2):return CSI + str(mode) + 'K'class AnsiCodes(object):def __init__(self):# the subclasses declare class attributes which are numbers.# Upon instantiation we define instance attributes, which are the same# as the class attributes but wrapped with the ANSI escape sequencefor name in dir(self):if not name.startswith('_'):value = getattr(self, name)setattr(self, name, code_to_chars(value))class AnsiCursor(object):def UP(self, n=1):return CSI + str(n) + 'A'def DOWN(self, n=1):return CSI + str(n) + 'B'def FORWARD(self, n=1):return CSI + str(n) + 'C'def BACK(self, n=1):return CSI + str(n) + 'D'def POS(self, x=1, y=1):return CSI + str(y) + ';' + str(x) + 'H'class AnsiFore(AnsiCodes):BLACK = 30RED = 31GREEN = 32YELLOW = 33BLUE = 34MAGENTA = 35CYAN = 36WHITE = 37RESET = 39# These are fairly well supported, but not part of the standard.LIGHTBLACK_EX = 90LIGHTRED_EX = 91LIGHTGREEN_EX = 92LIGHTYELLOW_EX = 93LIGHTBLUE_EX = 94LIGHTMAGENTA_EX = 95LIGHTCYAN_EX = 96LIGHTWHITE_EX = 97class AnsiBack(AnsiCodes):BLACK = 40RED = 41GREEN = 42YELLOW = 43BLUE = 44MAGENTA = 45CYAN = 46WHITE = 47RESET = 49# These are fairly well supported, but not part of the standard.LIGHTBLACK_EX = 100LIGHTRED_EX = 101LIGHTGREEN_EX = 102LIGHTYELLOW_EX = 103LIGHTBLUE_EX = 104LIGHTMAGENTA_EX = 105LIGHTCYAN_EX = 106LIGHTWHITE_EX = 107class AnsiStyle(AnsiCodes):BRIGHT = 1DIM = 2NORMAL = 22RESET_ALL = 0Fore = AnsiFore()Back = AnsiBack()Style = AnsiStyle()Cursor = AnsiCursor()
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.from .initialise import init, deinit, reinit, colorama_textfrom .ansi import Fore, Back, Style, Cursorfrom .ansitowin32 import AnsiToWin32__version__ = '0.3.7'
from .adapter import CacheControlAdapterfrom .cache import DictCachedef CacheControl(sess,cache=None,cache_etags=True,serializer=None,heuristic=None):cache = cache or DictCache()adapter = CacheControlAdapter(cache,cache_etags=cache_etags,serializer=serializer,heuristic=heuristic,)sess.mount('http://', adapter)sess.mount('https://', adapter)return sess
import base64import ioimport jsonimport zlibfrom pip._vendor.requests.structures import CaseInsensitiveDictfrom .compat import HTTPResponse, pickle, text_typedef _b64_encode_bytes(b):return base64.b64encode(b).decode("ascii")def _b64_encode_str(s):return _b64_encode_bytes(s.encode("utf8"))def _b64_encode(s):if isinstance(s, text_type):return _b64_encode_str(s)return _b64_encode_bytes(s)def _b64_decode_bytes(b):return base64.b64decode(b.encode("ascii"))def _b64_decode_str(s):return _b64_decode_bytes(s).decode("utf8")class Serializer(object):def dumps(self, request, response, body=None):response_headers = CaseInsensitiveDict(response.headers)if body is None:body = response.read(decode_content=False)# NOTE: 99% sure this is dead code. I'm only leaving it# here b/c I don't have a test yet to prove# it. Basically, before using# `cachecontrol.filewrapper.CallbackFileWrapper`,# this made an effort to reset the file handle. The# `CallbackFileWrapper` short circuits this code by# setting the body as the content is consumed, the# result being a `body` argument is *always* passed# into cache_response, and in turn,# `Serializer.dump`.response._fp = io.BytesIO(body)data = {"response": {"body": _b64_encode_bytes(body),"headers": dict((_b64_encode(k), _b64_encode(v))for k, v in response.headers.items()),"status": response.status,"version": response.version,"reason": _b64_encode_str(response.reason),"strict": response.strict,"decode_content": response.decode_content,},}# Construct our vary headersdata["vary"] = {}if "vary" in response_headers:varied_headers = response_headers['vary'].split(',')for header in varied_headers:header = header.strip()data["vary"][header] = request.headers.get(header, None)# Encode our Vary headers to ensure they can be serialized as JSONdata["vary"] = dict((_b64_encode(k), _b64_encode(v) if v is not None else v)for k, v in data["vary"].items())return b",".join([b"cc=2",zlib.compress(json.dumps(data, separators=(",", ":"), sort_keys=True,).encode("utf8"),),])def loads(self, request, data):# Short circuit if we've been given an empty set of dataif not data:return# Determine what version of the serializer the data was serialized# withtry:ver, data = data.split(b",", 1)except ValueError:ver = b"cc=0"# Make sure that our "ver" is actually a version and isn't a false# positive from a , being in the data stream.if ver[:3] != b"cc=":data = ver + dataver = b"cc=0"# Get the version number out of the cc=Nver = ver.split(b"=", 1)[-1].decode("ascii")# Dispatch to the actual load method for the given versiontry:return getattr(self, "_loads_v{0}".format(ver))(request, data)except AttributeError:# This is a version we don't have a loads function for, so we'll# just treat it as a miss and return Nonereturndef prepare_response(self, request, cached):"""Verify our vary headers match and construct a real urllib3HTTPResponse object."""# Special case the '*' Vary value as it means we cannot actually# determine if the cached response is suitable for this request.if "*" in cached.get("vary", {}):return# Ensure that the Vary headers for the cached response match our# requestfor header, value in cached.get("vary", {}).items():if request.headers.get(header, None) != value:returnbody_raw = cached["response"].pop("body")headers = CaseInsensitiveDict(data=cached['response']['headers'])if headers.get('transfer-encoding', '') == 'chunked':headers.pop('transfer-encoding')cached['response']['headers'] = headerstry:body = io.BytesIO(body_raw)except TypeError:# This can happen if cachecontrol serialized to v1 format (pickle)# using Python 2. A Python 2 str(byte string) will be unpickled as# a Python 3 str (unicode string), which will cause the above to# fail with:## TypeError: 'str' does not support the buffer interfacebody = io.BytesIO(body_raw.encode('utf8'))return HTTPResponse(body=body,preload_content=False,**cached["response"])def _loads_v0(self, request, data):# The original legacy cache data. This doesn't contain enough# information to construct everything we need, so we'll treat this as# a miss.returndef _loads_v1(self, request, data):try:cached = pickle.loads(data)except ValueError:returnreturn self.prepare_response(request, cached)def _loads_v2(self, request, data):try:cached = json.loads(zlib.decompress(data).decode("utf8"))except ValueError:return# We need to decode the items that we've base64 encodedcached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"])cached["response"]["headers"] = dict((_b64_decode_str(k), _b64_decode_str(v))for k, v in cached["response"]["headers"].items())cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"],)cached["vary"] = dict((_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)for k, v in cached["vary"].items())return self.prepare_response(request, cached)
import calendarimport timefrom email.utils import formatdate, parsedate, parsedate_tzfrom datetime import datetime, timedeltaTIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"def expire_after(delta, date=None):date = date or datetime.now()return date + deltadef datetime_to_header(dt):return formatdate(calendar.timegm(dt.timetuple()))class BaseHeuristic(object):def warning(self, response):"""Return a valid 1xx warning header value describing the cacheadjustments.The response is provided too allow warnings like 113http://tools.ietf.org/html/rfc7234#section-5.5.4 where we needto explicitly say response is over 24 hours old."""return '110 - "Response is Stale"'def update_headers(self, response):"""Update the response headers with any new headers.NOTE: This SHOULD always include some Warning header tosignify that the response was cached by the client, notby way of the provided headers."""return {}def apply(self, response):updated_headers = self.update_headers(response)if updated_headers:response.headers.update(updated_headers)warning_header_value = self.warning(response)if warning_header_value is not None:response.headers.update({'Warning': warning_header_value})return responseclass OneDayCache(BaseHeuristic):"""Cache the response by providing an expires 1 day in thefuture."""def update_headers(self, response):headers = {}if 'expires' not in response.headers:date = parsedate(response.headers['date'])expires = expire_after(timedelta(days=1),date=datetime(*date[:6]))headers['expires'] = datetime_to_header(expires)headers['cache-control'] = 'public'return headersclass ExpiresAfter(BaseHeuristic):"""Cache **all** requests for a defined time period."""def __init__(self, **kw):self.delta = timedelta(**kw)def update_headers(self, response):expires = expire_after(self.delta)return {'expires': datetime_to_header(expires),'cache-control': 'public',}def warning(self, response):tmpl = '110 - Automatically cached for %s. Response might be stale'return tmpl % self.deltaclass LastModified(BaseHeuristic):"""If there is no Expires header already, fall back on Last-Modifiedusing the heuristic fromhttp://tools.ietf.org/html/rfc7234#section-4.2.2to calculate a reasonable value.Firefox also does something like this perhttps://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQhttp://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397Unlike mozilla we limit this to 24-hr."""cacheable_by_default_statuses = set([200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501])def update_headers(self, resp):headers = resp.headersif 'expires' in headers:return {}if 'cache-control' in headers and headers['cache-control'] != 'public':return {}if resp.status not in self.cacheable_by_default_statuses:return {}if 'date' not in headers or 'last-modified' not in headers:return {}date = calendar.timegm(parsedate_tz(headers['date']))last_modified = parsedate(headers['last-modified'])if date is None or last_modified is None:return {}now = time.time()current_age = max(0, now - date)delta = date - calendar.timegm(last_modified)freshness_lifetime = max(0, min(delta / 10, 24 * 3600))if freshness_lifetime <= current_age:return {}expires = date + freshness_lifetimereturn {'expires': time.strftime(TIME_FMT, time.gmtime(expires))}def warning(self, resp):return None
from io import BytesIOclass CallbackFileWrapper(object):"""Small wrapper around a fp object which will tee everything read into abuffer, and when that file is closed it will execute a callback with thecontents of that buffer.All attributes are proxied to the underlying file object.This class uses members with a double underscore (__) leading prefix so asnot to accidentally shadow an attribute."""def __init__(self, fp, callback):self.__buf = BytesIO()self.__fp = fpself.__callback = callbackdef __getattr__(self, name):# The vaguaries of garbage collection means that self.__fp is# not always set. By using __getattribute__ and the private# name[0] allows looking up the attribute value and raising an# AttributeError when it doesn't exist. This stop thigns from# infinitely recursing calls to getattr in the case where# self.__fp hasn't been set.## [0] https://docs.python.org/2/reference/expressions.html#atom-identifiersfp = self.__getattribute__('_CallbackFileWrapper__fp')return getattr(fp, name)def __is_fp_closed(self):try:return self.__fp.fp is Noneexcept AttributeError:passtry:return self.__fp.closedexcept AttributeError:pass# We just don't cache it then.# TODO: Add some logging here...return Falsedef _close(self):if self.__callback:self.__callback(self.__buf.getvalue())# We assign this to None here, because otherwise we can get into# really tricky problems where the CPython interpreter dead locks# because the callback is holding a reference to something which# has a __del__ method. Setting this to None breaks the cycle# and allows the garbage collector to do it's thing normally.self.__callback = Nonedef read(self, amt=None):data = self.__fp.read(amt)self.__buf.write(data)if self.__is_fp_closed():self._close()return datadef _safe_read(self, amt):data = self.__fp._safe_read(amt)if amt == 2 and data == b'\r\n':# urllib executes this read to toss the CRLF at the end# of the chunk.return dataself.__buf.write(data)if self.__is_fp_closed():self._close()return data
"""The httplib2 algorithms ported for use with requests."""import loggingimport reimport calendarimport timefrom email.utils import parsedate_tzfrom pip._vendor.requests.structures import CaseInsensitiveDictfrom .cache import DictCachefrom .serialize import Serializerlogger = logging.getLogger(__name__)URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")def parse_uri(uri):"""Parses a URI using the regex given in Appendix B of RFC 3986.(scheme, authority, path, query, fragment) = parse_uri(uri)"""groups = URI.match(uri).groups()return (groups[1], groups[3], groups[4], groups[6], groups[8])class CacheController(object):"""An interface to see if request should cached or not."""def __init__(self, cache=None, cache_etags=True, serializer=None):self.cache = cache or DictCache()self.cache_etags = cache_etagsself.serializer = serializer or Serializer()@classmethoddef _urlnorm(cls, uri):"""Normalize the URL to create a safe key for the cache"""(scheme, authority, path, query, fragment) = parse_uri(uri)if not scheme or not authority:raise Exception("Only absolute URIs are allowed. uri = %s" % uri)scheme = scheme.lower()authority = authority.lower()if not path:path = "/"# Could do syntax based normalization of the URI before# computing the digest. See Section 6.2.2 of Std 66.request_uri = query and "?".join([path, query]) or pathdefrag_uri = scheme + "://" + authority + request_urireturn defrag_uri@classmethoddef cache_url(cls, uri):return cls._urlnorm(uri)def parse_cache_control(self, headers):"""Parse the cache control headers returning a dictionary with valuesfor the different directives."""retval = {}cc_header = 'cache-control'if 'Cache-Control' in headers:cc_header = 'Cache-Control'if cc_header in headers:parts = headers[cc_header].split(',')parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)])for part in parts if -1 != part.find("=")]parts_wo_args = [(name.strip().lower(), 1)for name in parts if -1 == name.find("=")]retval = dict(parts_with_args + parts_wo_args)return retvaldef cached_request(self, request):"""Return a cached response if it exists in the cache, otherwisereturn False."""cache_url = self.cache_url(request.url)logger.debug('Looking up "%s" in the cache', cache_url)cc = self.parse_cache_control(request.headers)# Bail out if the request insists on fresh dataif 'no-cache' in cc:logger.debug('Request header has "no-cache", cache bypassed')return Falseif 'max-age' in cc and cc['max-age'] == 0:logger.debug('Request header has "max_age" as 0, cache bypassed')return False# Request allows serving from the cache, let's see if we find somethingcache_data = self.cache.get(cache_url)if cache_data is None:logger.debug('No cache entry available')return False# Check whether it can be deserializedresp = self.serializer.loads(request, cache_data)if not resp:logger.warning('Cache entry deserialization failed, entry ignored')return False# If we have a cached 301, return it immediately. We don't# need to test our response for other headers b/c it is# intrinsically "cacheable" as it is Permanent.# See:# https://tools.ietf.org/html/rfc7231#section-6.4.2## Client can try to refresh the value by repeating the request# with cache busting headers as usual (ie no-cache).if resp.status == 301:msg = ('Returning cached "301 Moved Permanently" response ''(ignoring date and etag information)')logger.debug(msg)return respheaders = CaseInsensitiveDict(resp.headers)if not headers or 'date' not in headers:if 'etag' not in headers:# Without date or etag, the cached response can never be used# and should be deleted.logger.debug('Purging cached response: no date or etag')self.cache.delete(cache_url)logger.debug('Ignoring cached response: no date')return Falsenow = time.time()date = calendar.timegm(parsedate_tz(headers['date']))current_age = max(0, now - date)logger.debug('Current age based on date: %i', current_age)# TODO: There is an assumption that the result will be a# urllib3 response object. This may not be best since we# could probably avoid instantiating or constructing the# response until we know we need it.resp_cc = self.parse_cache_control(headers)# determine freshnessfreshness_lifetime = 0# Check the max-age pragma in the cache control headerif 'max-age' in resp_cc and resp_cc['max-age'].isdigit():freshness_lifetime = int(resp_cc['max-age'])logger.debug('Freshness lifetime from max-age: %i',freshness_lifetime)# If there isn't a max-age, check for an expires headerelif 'expires' in headers:expires = parsedate_tz(headers['expires'])if expires is not None:expire_time = calendar.timegm(expires) - datefreshness_lifetime = max(0, expire_time)logger.debug("Freshness lifetime from expires: %i",freshness_lifetime)# Determine if we are setting freshness limit in the# request. Note, this overrides what was in the response.if 'max-age' in cc:try:freshness_lifetime = int(cc['max-age'])logger.debug('Freshness lifetime from request max-age: %i',freshness_lifetime)except ValueError:freshness_lifetime = 0if 'min-fresh' in cc:try:min_fresh = int(cc['min-fresh'])except ValueError:min_fresh = 0# adjust our current age by our min freshcurrent_age += min_freshlogger.debug('Adjusted current age from min-fresh: %i',current_age)# Return entry if it is fresh enoughif freshness_lifetime > current_age:logger.debug('The response is "fresh", returning cached response')logger.debug('%i > %i', freshness_lifetime, current_age)return resp# we're not fresh. If we don't have an Etag, clear it outif 'etag' not in headers:logger.debug('The cached response is "stale" with no etag, purging')self.cache.delete(cache_url)# return the original handlerreturn Falsedef conditional_headers(self, request):cache_url = self.cache_url(request.url)resp = self.serializer.loads(request, self.cache.get(cache_url))new_headers = {}if resp:headers = CaseInsensitiveDict(resp.headers)if 'etag' in headers:new_headers['If-None-Match'] = headers['ETag']if 'last-modified' in headers:new_headers['If-Modified-Since'] = headers['Last-Modified']return new_headersdef cache_response(self, request, response, body=None):"""Algorithm for caching requests.This assumes a requests Response object."""# From httplib2: Don't cache 206's since we aren't going to# handle byte range requestscacheable_status_codes = [200, 203, 300, 301]if response.status not in cacheable_status_codes:logger.debug('Status code %s not in %s',response.status,cacheable_status_codes)returnresponse_headers = CaseInsensitiveDict(response.headers)# If we've been given a body, our response has a Content-Length, that# Content-Length is valid then we can check to see if the body we've# been given matches the expected size, and if it doesn't we'll just# skip trying to cache it.if (body is not None and"content-length" in response_headers andresponse_headers["content-length"].isdigit() andint(response_headers["content-length"]) != len(body)):returncc_req = self.parse_cache_control(request.headers)cc = self.parse_cache_control(response_headers)cache_url = self.cache_url(request.url)logger.debug('Updating cache with response from "%s"', cache_url)# Delete it from the cache if we happen to have it stored thereno_store = Falseif cc.get('no-store'):no_store = Truelogger.debug('Response header has "no-store"')if cc_req.get('no-store'):no_store = Truelogger.debug('Request header has "no-store"')if no_store and self.cache.get(cache_url):logger.debug('Purging existing cache entry to honor "no-store"')self.cache.delete(cache_url)# If we've been given an etag, then keep the responseif self.cache_etags and 'etag' in response_headers:logger.debug('Caching due to etag')self.cache.set(cache_url,self.serializer.dumps(request, response, body=body),)# Add to the cache any 301s. We do this before looking that# the Date headers.elif response.status == 301:logger.debug('Caching permanant redirect')self.cache.set(cache_url,self.serializer.dumps(request, response))# Add to the cache if the response headers demand it. If there# is no date header then we can't do anything about expiring# the cache.elif 'date' in response_headers:# cache when there is a max-age > 0if cc and cc.get('max-age'):if cc['max-age'].isdigit() and int(cc['max-age']) > 0:logger.debug('Caching b/c date exists and max-age > 0')self.cache.set(cache_url,self.serializer.dumps(request, response, body=body),)# If the request can expire, it means we should cache it# in the meantime.elif 'expires' in response_headers:if response_headers['expires']:logger.debug('Caching b/c of expires header')self.cache.set(cache_url,self.serializer.dumps(request, response, body=body),)def update_cached_response(self, request, response):"""On a 304 we will get a new set of headers that we want toupdate our cached value with, assuming we have one.This should only ever be called when we've sent an ETag andgotten a 304 as the response."""cache_url = self.cache_url(request.url)cached_response = self.serializer.loads(request,self.cache.get(cache_url))if not cached_response:# we didn't have a cached responsereturn response# Lets update our headers with the headers from the new request:# http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1## The server isn't supposed to send headers that would make# the cached body invalid. But... just in case, we'll be sure# to strip out ones we know that might be problmatic due to# typical assumptions.excluded_headers = ["content-length",]cached_response.headers.update(dict((k, v) for k, v in response.headers.items()if k.lower() not in excluded_headers))# we want a 200 b/c we have content via the cachecached_response.status = 200# update our cacheself.cache.set(cache_url,self.serializer.dumps(request, cached_response),)return cached_response
try:from urllib.parse import urljoinexcept ImportError:from urlparse import urljointry:import cPickle as pickleexcept ImportError:import picklefrom pip._vendor.requests.packages.urllib3.response import HTTPResponsefrom pip._vendor.requests.packages.urllib3.util import is_fp_closed# Replicate some six behaviourtry:text_type = (unicode,)except NameError:text_type = (str,)
from __future__ import divisionfrom datetime import datetimedef total_seconds(td):"""Python 2.6 compatability"""if hasattr(td, 'total_seconds'):return td.total_seconds()ms = td.microsecondssecs = (td.seconds + td.days * 24 * 3600)return (ms + secs * 10**6) / 10**6class RedisCache(object):def __init__(self, conn):self.conn = conndef get(self, key):return self.conn.get(key)def set(self, key, value, expires=None):if not expires:self.conn.set(key, value)else:expires = expires - datetime.now()self.conn.setex(key, total_seconds(expires), value)def delete(self, key):self.conn.delete(key)def clear(self):"""Helper for clearing all the keys in a database. Use withcaution!"""for key in self.conn.keys():self.conn.delete(key)def close(self):self.conn.disconnect()
import hashlibimport osfrom pip._vendor.lockfile import LockFilefrom pip._vendor.lockfile.mkdirlockfile import MkdirLockFilefrom ..cache import BaseCachefrom ..controller import CacheControllerdef _secure_open_write(filename, fmode):# We only want to write to this file, so open it in write only modeflags = os.O_WRONLY# os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only# will open *new* files.# We specify this because we want to ensure that the mode we pass is the# mode of the file.flags |= os.O_CREAT | os.O_EXCL# Do not follow symlinks to prevent someone from making a symlink that# we follow and insecurely open a cache file.if hasattr(os, "O_NOFOLLOW"):flags |= os.O_NOFOLLOW# On Windows we'll mark this file as binaryif hasattr(os, "O_BINARY"):flags |= os.O_BINARY# Before we open our file, we want to delete any existing file that is# theretry:os.remove(filename)except (IOError, OSError):# The file must not exist already, so we can just skip ahead to openingpass# Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a# race condition happens between the os.remove and this line, that an# error will be raised. Because we utilize a lockfile this should only# happen if someone is attempting to attack us.fd = os.open(filename, flags, fmode)try:return os.fdopen(fd, "wb")except:# An error occurred wrapping our FD in a file objectos.close(fd)raiseclass FileCache(BaseCache):def __init__(self, directory, forever=False, filemode=0o0600,dirmode=0o0700, use_dir_lock=None, lock_class=None):if use_dir_lock is not None and lock_class is not None:raise ValueError("Cannot use use_dir_lock and lock_class together")if use_dir_lock:lock_class = MkdirLockFileif lock_class is None:lock_class = LockFileself.directory = directoryself.forever = foreverself.filemode = filemodeself.dirmode = dirmodeself.lock_class = lock_class@staticmethoddef encode(x):return hashlib.sha224(x.encode()).hexdigest()def _fn(self, name):# NOTE: This method should not change as some may depend on it.# See: https://github.com/ionrock/cachecontrol/issues/63hashed = self.encode(name)parts = list(hashed[:5]) + [hashed]return os.path.join(self.directory, *parts)def get(self, key):name = self._fn(key)if not os.path.exists(name):return Nonewith open(name, 'rb') as fh:return fh.read()def set(self, key, value):name = self._fn(key)# Make sure the directory existstry:os.makedirs(os.path.dirname(name), self.dirmode)except (IOError, OSError):passwith self.lock_class(name) as lock:# Write our actual filewith _secure_open_write(lock.path, self.filemode) as fh:fh.write(value)def delete(self, key):name = self._fn(key)if not self.forever:os.remove(name)def url_to_file_path(url, filecache):"""Return the file cache path based on the URL.This does not ensure the file exists!"""key = CacheController.cache_url(url)return filecache._fn(key)
from textwrap import dedenttry:from .file_cache import FileCacheexcept ImportError:notice = dedent('''NOTE: In order to use the FileCache you must havelockfile installed. You can install it via pip:pip install lockfile''')print(notice)try:import redisfrom .redis_cache import RedisCacheexcept ImportError:pass
"""The cache object API for implementing caches. The default is a threadsafe in-memory dictionary."""from threading import Lockclass BaseCache(object):def get(self, key):raise NotImplemented()def set(self, key, value):raise NotImplemented()def delete(self, key):raise NotImplemented()def close(self):passclass DictCache(BaseCache):def __init__(self, init_dict=None):self.lock = Lock()self.data = init_dict or {}def get(self, key):return self.data.get(key, None)def set(self, key, value):with self.lock:self.data.update({key: value})def delete(self, key):with self.lock:if key in self.data:self.data.pop(key)
import typesimport functoolsfrom pip._vendor.requests.adapters import HTTPAdapterfrom .controller import CacheControllerfrom .cache import DictCachefrom .filewrapper import CallbackFileWrapperclass CacheControlAdapter(HTTPAdapter):invalidating_methods = set(['PUT', 'DELETE'])def __init__(self, cache=None,cache_etags=True,controller_class=None,serializer=None,heuristic=None,*args, **kw):super(CacheControlAdapter, self).__init__(*args, **kw)self.cache = cache or DictCache()self.heuristic = heuristiccontroller_factory = controller_class or CacheControllerself.controller = controller_factory(self.cache,cache_etags=cache_etags,serializer=serializer,)def send(self, request, **kw):"""Send a request. Use the request information to see if itexists in the cache and cache the response if we need to and can."""if request.method == 'GET':cached_response = self.controller.cached_request(request)if cached_response:return self.build_response(request, cached_response,from_cache=True)# check for etags and add headers if appropriaterequest.headers.update(self.controller.conditional_headers(request))resp = super(CacheControlAdapter, self).send(request, **kw)return respdef build_response(self, request, response, from_cache=False):"""Build a response by making a request or using the cache.This will end up calling send and returning a potentiallycached response"""if not from_cache and request.method == 'GET':# Check for any heuristics that might update headers# before trying to cache.if self.heuristic:response = self.heuristic.apply(response)# apply any expiration heuristicsif response.status == 304:# We must have sent an ETag request. This could mean# that we've been expired already or that we simply# have an etag. In either case, we want to try and# update the cache if that is the case.cached_response = self.controller.update_cached_response(request, response)if cached_response is not response:from_cache = True# We are done with the server response, read a# possible response body (compliant servers will# not return one, but we cannot be 100% sure) and# release the connection back to the pool.response.read(decode_content=False)response.release_conn()response = cached_response# We always cache the 301 responseselif response.status == 301:self.controller.cache_response(request, response)else:# Wrap the response file with a wrapper that will cache the# response when the stream has been consumed.response._fp = CallbackFileWrapper(response._fp,functools.partial(self.controller.cache_response,request,response,))if response.chunked:super_update_chunk_length = response._update_chunk_lengthdef _update_chunk_length(self):super_update_chunk_length()if self.chunk_left == 0:self._fp._close()response._update_chunk_length = types.MethodType(_update_chunk_length, response)resp = super(CacheControlAdapter, self).build_response(request, response)# See if we should invalidate the cache.if request.method in self.invalidating_methods and resp.ok:cache_url = self.controller.cache_url(request.url)self.cache.delete(cache_url)# Give the request a from_cache attr to let people use itresp.from_cache = from_cachereturn respdef close(self):self.cache.close()super(CacheControlAdapter, self).close()
import loggingfrom pip._vendor import requestsfrom pip._vendor.cachecontrol.adapter import CacheControlAdapterfrom pip._vendor.cachecontrol.cache import DictCachefrom pip._vendor.cachecontrol.controller import loggerfrom argparse import ArgumentParserdef setup_logging():logger.setLevel(logging.DEBUG)handler = logging.StreamHandler()logger.addHandler(handler)def get_session():adapter = CacheControlAdapter(DictCache(),cache_etags=True,serializer=None,heuristic=None,)sess = requests.Session()sess.mount('http://', adapter)sess.mount('https://', adapter)sess.cache_controller = adapter.controllerreturn sessdef get_args():parser = ArgumentParser()parser.add_argument('url', help='The URL to try and cache')return parser.parse_args()def main(args=None):args = get_args()sess = get_session()# Make a request to get a responseresp = sess.get(args.url)# Turn on loggingsetup_logging()# try setting the cachesess.cache_controller.cache_response(resp.request, resp.raw)# Now try to get itif sess.cache_controller.cached_request(resp.request):print('Cached!')else:print('Not cached :(')if __name__ == '__main__':main()
"""CacheControl import Interface.Make it easy to import from cachecontrol without long namespaces."""__author__ = 'Eric Larson'__email__ = 'eric@ionrock.org'__version__ = '0.11.7'from .wrapper import CacheControlfrom .adapter import CacheControlAdapterfrom .controller import CacheController
#!/usr/bin/env python# -*- coding: utf-8 -*-# Copyright (c) 2005-2010 ActiveState Software Inc.# Copyright (c) 2013 Eddy Petrișor"""Utilities for determining application-specific dirs.See <http://github.com/ActiveState/appdirs> for details and usage."""# Dev Notes:# - MSDN on where to store app data files:# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120# - macOS: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html__version_info__ = (1, 4, 0)__version__ = '.'.join(map(str, __version_info__))import sysimport osPY3 = sys.version_info[0] == 3if PY3:unicode = strif sys.platform.startswith('java'):import platformos_name = platform.java_ver()[3][0]if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.system = 'win32'elif os_name.startswith('Mac'): # "macOS", etc.system = 'darwin'else: # "Linux", "SunOS", "FreeBSD", etc.# Setting this to "linux2" is not ideal, but only Windows or Mac# are actually checked for and the rest of the module expects# *sys.platform* style strings.system = 'linux2'else:system = sys.platformdef user_data_dir(appname=None, appauthor=None, version=None, roaming=False):r"""Return full path to the user-specific data dir for this application."appname" is the name of application.If None, just the system directory is returned."appauthor" (only used on Windows) is the name of theappauthor or distributing body for this application. Typicallyit is the owning company name. This falls back to appname. You maypass False to disable it."version" is an optional version path element to append to thepath. You might want to use this if you want multiple versionsof your app to be able to run independently. If used, thiswould typically be "<major>.<minor>".Only applied when appname is present."roaming" (boolean, default False) can be set True to use the Windowsroaming appdata directory. That means that for users on a Windowsnetwork setup for roaming profiles, this user data will besync'd on login. See<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>for a discussion of issues.Typical user data directories are:macOS: ~/Library/Application Support/<AppName>Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if definedWin XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>For Unix, we follow the XDG spec and support $XDG_DATA_HOME.That means, by default "~/.local/share/<AppName>"."""if system == "win32":if appauthor is None:appauthor = appnameconst = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"path = os.path.normpath(_get_win_folder(const))if appname:if appauthor is not False:path = os.path.join(path, appauthor, appname)else:path = os.path.join(path, appname)elif system == 'darwin':path = os.path.expanduser('~/Library/Application Support/')if appname:path = os.path.join(path, appname)else:path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))if appname:path = os.path.join(path, appname)if appname and version:path = os.path.join(path, version)return pathdef site_data_dir(appname=None, appauthor=None, version=None, multipath=False):"""Return full path to the user-shared data dir for this application."appname" is the name of application.If None, just the system directory is returned."appauthor" (only used on Windows) is the name of theappauthor or distributing body for this application. Typicallyit is the owning company name. This falls back to appname. You maypass False to disable it."version" is an optional version path element to append to thepath. You might want to use this if you want multiple versionsof your app to be able to run independently. If used, thiswould typically be "<major>.<minor>".Only applied when appname is present."multipath" is an optional parameter only applicable to *nixwhich indicates that the entire list of data dirs should bereturned. By default, the first item from XDG_DATA_DIRS isreturned, or '/usr/local/share/<AppName>',if XDG_DATA_DIRS is not setTypical user data directories are:macOS: /Library/Application Support/<AppName>Unix: /usr/local/share/<AppName> or /usr/share/<AppName>Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.For Unix, this is using the $XDG_DATA_DIRS[0] default.WARNING: Do not use this on Windows. See the Vista-Fail note above for why."""if system == "win32":if appauthor is None:appauthor = appnamepath = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))if appname:if appauthor is not False:path = os.path.join(path, appauthor, appname)else:path = os.path.join(path, appname)elif system == 'darwin':path = os.path.expanduser('/Library/Application Support')if appname:path = os.path.join(path, appname)else:# XDG default for $XDG_DATA_DIRS# only first, if multipath is Falsepath = os.getenv('XDG_DATA_DIRS',os.pathsep.join(['/usr/local/share', '/usr/share']))pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]if appname:if version:appname = os.path.join(appname, version)pathlist = [os.sep.join([x, appname]) for x in pathlist]if multipath:path = os.pathsep.join(pathlist)else:path = pathlist[0]return pathif appname and version:path = os.path.join(path, version)return pathdef user_config_dir(appname=None, appauthor=None, version=None, roaming=False):r"""Return full path to the user-specific config dir for this application."appname" is the name of application.If None, just the system directory is returned."appauthor" (only used on Windows) is the name of theappauthor or distributing body for this application. Typicallyit is the owning company name. This falls back to appname. You maypass False to disable it."version" is an optional version path element to append to thepath. You might want to use this if you want multiple versionsof your app to be able to run independently. If used, thiswould typically be "<major>.<minor>".Only applied when appname is present."roaming" (boolean, default False) can be set True to use the Windowsroaming appdata directory. That means that for users on a Windowsnetwork setup for roaming profiles, this user data will besync'd on login. See<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>for a discussion of issues.Typical user data directories are:macOS: same as user_data_dirUnix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if definedWin *: same as user_data_dirFor Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.That means, by deafult "~/.config/<AppName>"."""if system in ["win32", "darwin"]:path = user_data_dir(appname, appauthor, None, roaming)else:path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))if appname:path = os.path.join(path, appname)if appname and version:path = os.path.join(path, version)return pathdef site_config_dir(appname=None, appauthor=None, version=None, multipath=False):"""Return full path to the user-shared data dir for this application."appname" is the name of application.If None, just the system directory is returned."appauthor" (only used on Windows) is the name of theappauthor or distributing body for this application. Typicallyit is the owning company name. This falls back to appname. You maypass False to disable it."version" is an optional version path element to append to thepath. You might want to use this if you want multiple versionsof your app to be able to run independently. If used, thiswould typically be "<major>.<minor>".Only applied when appname is present."multipath" is an optional parameter only applicable to *nixwhich indicates that the entire list of config dirs should bereturned. By default, the first item from XDG_CONFIG_DIRS isreturned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not setTypical user data directories are:macOS: same as site_data_dirUnix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in$XDG_CONFIG_DIRSWin *: same as site_data_dirVista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=FalseWARNING: Do not use this on Windows. See the Vista-Fail note above for why."""if system in ["win32", "darwin"]:path = site_data_dir(appname, appauthor)if appname and version:path = os.path.join(path, version)else:# XDG default for $XDG_CONFIG_DIRS# only first, if multipath is Falsepath = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]if appname:if version:appname = os.path.join(appname, version)pathlist = [os.sep.join([x, appname]) for x in pathlist]if multipath:path = os.pathsep.join(pathlist)else:path = pathlist[0]return pathdef user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):r"""Return full path to the user-specific cache dir for this application."appname" is the name of application.If None, just the system directory is returned."appauthor" (only used on Windows) is the name of theappauthor or distributing body for this application. Typicallyit is the owning company name. This falls back to appname. You maypass False to disable it."version" is an optional version path element to append to thepath. You might want to use this if you want multiple versionsof your app to be able to run independently. If used, thiswould typically be "<major>.<minor>".Only applied when appname is present."opinion" (boolean) can be False to disable the appending of"Cache" to the base app data dir for Windows. Seediscussion below.Typical user cache directories are:macOS: ~/Library/Caches/<AppName>Unix: ~/.cache/<AppName> (XDG default)Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\CacheVista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\CacheOn Windows the only suggestion in the MSDN docs is that local settings go inthe `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roamingapp data dir (the default returned by `user_data_dir` above). Apps typicallyput cache data somewhere *under* the given dir here. Some examples:...\Mozilla\Firefox\Profiles\<ProfileName>\Cache...\Acme\SuperApp\Cache\1.0OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.This can be disabled with the `opinion=False` option."""if system == "win32":if appauthor is None:appauthor = appnamepath = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))if appname:if appauthor is not False:path = os.path.join(path, appauthor, appname)else:path = os.path.join(path, appname)if opinion:path = os.path.join(path, "Cache")elif system == 'darwin':path = os.path.expanduser('~/Library/Caches')if appname:path = os.path.join(path, appname)else:path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))if appname:path = os.path.join(path, appname)if appname and version:path = os.path.join(path, version)return pathdef user_log_dir(appname=None, appauthor=None, version=None, opinion=True):r"""Return full path to the user-specific log dir for this application."appname" is the name of application.If None, just the system directory is returned."appauthor" (only used on Windows) is the name of theappauthor or distributing body for this application. Typicallyit is the owning company name. This falls back to appname. You maypass False to disable it."version" is an optional version path element to append to thepath. You might want to use this if you want multiple versionsof your app to be able to run independently. If used, thiswould typically be "<major>.<minor>".Only applied when appname is present."opinion" (boolean) can be False to disable the appending of"Logs" to the base app data dir for Windows, and "log" to thebase cache dir for Unix. See discussion below.Typical user cache directories are:macOS: ~/Library/Logs/<AppName>Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if definedWin XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\LogsVista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\LogsOn Windows the only suggestion in the MSDN docs is that local settingsgo in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested inexamples of what some windows apps use for a logs dir.)OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`value for Windows and appends "log" to the user cache dir for Unix.This can be disabled with the `opinion=False` option."""if system == "darwin":path = os.path.join(os.path.expanduser('~/Library/Logs'),appname)elif system == "win32":path = user_data_dir(appname, appauthor, version)version = Falseif opinion:path = os.path.join(path, "Logs")else:path = user_cache_dir(appname, appauthor, version)version = Falseif opinion:path = os.path.join(path, "log")if appname and version:path = os.path.join(path, version)return pathclass AppDirs(object):"""Convenience wrapper for getting application dirs."""def __init__(self, appname, appauthor=None, version=None, roaming=False,multipath=False):self.appname = appnameself.appauthor = appauthorself.version = versionself.roaming = roamingself.multipath = multipath@propertydef user_data_dir(self):return user_data_dir(self.appname, self.appauthor,version=self.version, roaming=self.roaming)@propertydef site_data_dir(self):return site_data_dir(self.appname, self.appauthor,version=self.version, multipath=self.multipath)@propertydef user_config_dir(self):return user_config_dir(self.appname, self.appauthor,version=self.version, roaming=self.roaming)@propertydef site_config_dir(self):return site_config_dir(self.appname, self.appauthor,version=self.version, multipath=self.multipath)@propertydef user_cache_dir(self):return user_cache_dir(self.appname, self.appauthor,version=self.version)@propertydef user_log_dir(self):return user_log_dir(self.appname, self.appauthor,version=self.version)#---- internal support stuffdef _get_win_folder_from_registry(csidl_name):"""This is a fallback technique at best. I'm not sure if using theregistry for this guarantees us the correct answer for all CSIDL_*names."""import _winregshell_folder_name = {"CSIDL_APPDATA": "AppData","CSIDL_COMMON_APPDATA": "Common AppData","CSIDL_LOCAL_APPDATA": "Local AppData",}[csidl_name]key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")dir, type = _winreg.QueryValueEx(key, shell_folder_name)return dirdef _get_win_folder_with_pywin32(csidl_name):from win32com.shell import shellcon, shelldir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)# Try to make this a unicode path because SHGetFolderPath does# not return unicode strings when there is unicode data in the# path.try:dir = unicode(dir)# Downgrade to short path name if have highbit chars. See# <http://bugs.activestate.com/show_bug.cgi?id=85099>.has_high_char = Falsefor c in dir:if ord(c) > 255:has_high_char = Truebreakif has_high_char:try:import win32apidir = win32api.GetShortPathName(dir)except ImportError:passexcept UnicodeError:passreturn dirdef _get_win_folder_with_ctypes(csidl_name):import ctypescsidl_const = {"CSIDL_APPDATA": 26,"CSIDL_COMMON_APPDATA": 35,"CSIDL_LOCAL_APPDATA": 28,}[csidl_name]buf = ctypes.create_unicode_buffer(1024)ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)# Downgrade to short path name if have highbit chars. See# <http://bugs.activestate.com/show_bug.cgi?id=85099>.has_high_char = Falsefor c in buf:if ord(c) > 255:has_high_char = Truebreakif has_high_char:buf2 = ctypes.create_unicode_buffer(1024)if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):buf = buf2return buf.valuedef _get_win_folder_with_jna(csidl_name):import arrayfrom com.sun import jnafrom com.sun.jna.platform import win32buf_size = win32.WinDef.MAX_PATH * 2buf = array.zeros('c', buf_size)shell = win32.Shell32.INSTANCEshell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)dir = jna.Native.toString(buf.tostring()).rstrip("\0")# Downgrade to short path name if have highbit chars. See# <http://bugs.activestate.com/show_bug.cgi?id=85099>.has_high_char = Falsefor c in dir:if ord(c) > 255:has_high_char = Truebreakif has_high_char:buf = array.zeros('c', buf_size)kernel = win32.Kernel32.INSTANCEif kernal.GetShortPathName(dir, buf, buf_size):dir = jna.Native.toString(buf.tostring()).rstrip("\0")return dirif system == "win32":try:import win32com.shell_get_win_folder = _get_win_folder_with_pywin32except ImportError:try:from ctypes import windll_get_win_folder = _get_win_folder_with_ctypesexcept ImportError:try:import com.sun.jna_get_win_folder = _get_win_folder_with_jnaexcept ImportError:_get_win_folder = _get_win_folder_from_registry#---- self test codeif __name__ == "__main__":appname = "MyApp"appauthor = "MyCompany"props = ("user_data_dir", "site_data_dir","user_config_dir", "site_config_dir","user_cache_dir", "user_log_dir")print("-- app dirs (with optional 'version')")dirs = AppDirs(appname, appauthor, version="1.0")for prop in props:print("%s: %s" % (prop, getattr(dirs, prop)))print("\n-- app dirs (without optional 'version')")dirs = AppDirs(appname, appauthor)for prop in props:print("%s: %s" % (prop, getattr(dirs, prop)))print("\n-- app dirs (without optional 'appauthor')")dirs = AppDirs(appname)for prop in props:print("%s: %s" % (prop, getattr(dirs, prop)))print("\n-- app dirs (with disabled 'appauthor')")dirs = AppDirs(appname, appauthor=False)for prop in props:print("%s: %s" % (prop, getattr(dirs, prop)))
"""pip._vendor is for vendoring dependencies of pip to prevent needing pip todepend on something external.Files inside of pip._vendor should be considered immutable and should only beupdated to versions from upstream."""from __future__ import absolute_importimport globimport os.pathimport sys# Downstream redistributors which have debundled our dependencies should also# patch this value to be true. This will trigger the additional patching# to cause things like "six" to be available as pip.DEBUNDLED = False# By default, look in this directory for a bunch of .whl files which we will# add to the beginning of sys.path before attempting to import anything. This# is done to support downstream re-distributors like Debian and Fedora who# wish to create their own Wheels for our dependencies to aid in debundling.WHEEL_DIR = os.path.abspath(os.path.dirname(__file__))# Define a small helper function to alias our vendored modules to the real ones# if the vendored ones do not exist. This idea of this was taken from# https://github.com/kennethreitz/requests/pull/2567.def vendored(modulename):vendored_name = "{0}.{1}".format(__name__, modulename)try:__import__(vendored_name, globals(), locals(), level=0)except ImportError:try:__import__(modulename, globals(), locals(), level=0)except ImportError:# We can just silently allow import failures to pass here. If we# got to this point it means that ``import pip._vendor.whatever``# failed and so did ``import whatever``. Since we're importing this# upfront in an attempt to alias imports, not erroring here will# just mean we get a regular import error whenever pip *actually*# tries to import one of these modules to use it, which actually# gives us a better error message than we would have otherwise# gotten.passelse:sys.modules[vendored_name] = sys.modules[modulename]base, head = vendored_name.rsplit(".", 1)setattr(sys.modules[base], head, sys.modules[modulename])# If we're operating in a debundled setup, then we want to go ahead and trigger# the aliasing of our vendored libraries as well as looking for wheels to add# to our sys.path. This will cause all of this code to be a no-op typically# however downstream redistributors can enable it in a consistent way across# all platforms.if DEBUNDLED:# Actually look inside of WHEEL_DIR to find .whl files and add them to the# front of our sys.path.sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path# Actually alias all of our vendored dependencies.vendored("cachecontrol")vendored("colorama")vendored("distlib")vendored("distro")vendored("html5lib")vendored("lockfile")vendored("six")vendored("six.moves")vendored("six.moves.urllib")vendored("packaging")vendored("packaging.version")vendored("packaging.specifiers")vendored("pkg_resources")vendored("progress")vendored("retrying")vendored("requests")vendored("requests.packages")vendored("requests.packages.urllib3")vendored("requests.packages.urllib3._collections")vendored("requests.packages.urllib3.connection")vendored("requests.packages.urllib3.connectionpool")vendored("requests.packages.urllib3.contrib")vendored("requests.packages.urllib3.contrib.ntlmpool")vendored("requests.packages.urllib3.contrib.pyopenssl")vendored("requests.packages.urllib3.exceptions")vendored("requests.packages.urllib3.fields")vendored("requests.packages.urllib3.filepost")vendored("requests.packages.urllib3.packages")vendored("requests.packages.urllib3.packages.ordered_dict")vendored("requests.packages.urllib3.packages.six")vendored("requests.packages.urllib3.packages.ssl_match_hostname")vendored("requests.packages.urllib3.packages.ssl_match_hostname.""_implementation")vendored("requests.packages.urllib3.poolmanager")vendored("requests.packages.urllib3.request")vendored("requests.packages.urllib3.response")vendored("requests.packages.urllib3.util")vendored("requests.packages.urllib3.util.connection")vendored("requests.packages.urllib3.util.request")vendored("requests.packages.urllib3.util.response")vendored("requests.packages.urllib3.util.retry")vendored("requests.packages.urllib3.util.ssl_")vendored("requests.packages.urllib3.util.timeout")vendored("requests.packages.urllib3.util.url")
from __future__ import absolute_importimport osimport sys# If we are running from a wheel, add the wheel to sys.path# This allows the usage python pip-*.whl/pip install pip-*.whlif __package__ == '':# __file__ is pip-*.whl/pip/__main__.py# first dirname call strips of '/__main__.py', second strips off '/pip'# Resulting path is the name of the wheel itself# Add that to sys.path so we can import pippath = os.path.dirname(os.path.dirname(__file__))sys.path.insert(0, path)import pip # noqaif __name__ == '__main__':sys.exit(pip.main())
#!/usr/bin/env pythonfrom __future__ import absolute_importimport localeimport loggingimport osimport optparseimport warningsimport sysimport re# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,# but if invoked (i.e. imported), it will issue a warning to stderr if socks# isn't available. requests unconditionally imports urllib3's socks contrib# module, triggering this warning. The warning breaks DEP-8 tests (because of# the stderr output) and is just plain annoying in normal usage. I don't want# to add socks as yet another dependency for pip, nor do I want to allow-stder# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to# be done before the import of pip.vcs.from pip._vendor.requests.packages.urllib3.exceptions import DependencyWarningwarnings.filterwarnings("ignore", category=DependencyWarning) # noqafrom pip.exceptions import InstallationError, CommandError, PipErrorfrom pip.utils import get_installed_distributions, get_progfrom pip.utils import deprecation, dist_is_editablefrom pip.vcs import git, mercurial, subversion, bazaar # noqafrom pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatterfrom pip.commands import get_summaries, get_similar_commandsfrom pip.commands import commands_dictfrom pip._vendor.requests.packages.urllib3.exceptions import (InsecureRequestWarning,)# assignment for flake8 to be happy# This fixes a peculiarity when importing via __import__ - as we are# initialising the pip module, "from pip import cmdoptions" is recursive# and appears not to work properly in that situation.import pip.cmdoptionscmdoptions = pip.cmdoptions# The version as used in the setup.py and the docs conf.py__version__ = "9.0.1"logger = logging.getLogger(__name__)# Hide the InsecureRequestWarning from urllib3warnings.filterwarnings("ignore", category=InsecureRequestWarning)def autocomplete():"""Command and option completion for the main option parser (and options)and its subcommands (and options).Enable by sourcing one of the completion shell scripts (bash, zsh or fish)."""# Don't complete if user hasn't sourced bash_completion file.if 'PIP_AUTO_COMPLETE' not in os.environ:returncwords = os.environ['COMP_WORDS'].split()[1:]cword = int(os.environ['COMP_CWORD'])try:current = cwords[cword - 1]except IndexError:current = ''subcommands = [cmd for cmd, summary in get_summaries()]options = []# subcommandtry:subcommand_name = [w for w in cwords if w in subcommands][0]except IndexError:subcommand_name = Noneparser = create_main_parser()# subcommand optionsif subcommand_name:# special case: 'help' subcommand has no optionsif subcommand_name == 'help':sys.exit(1)# special case: list locally installed dists for uninstall commandif subcommand_name == 'uninstall' and not current.startswith('-'):installed = []lc = current.lower()for dist in get_installed_distributions(local_only=True):if dist.key.startswith(lc) and dist.key not in cwords[1:]:installed.append(dist.key)# if there are no dists installed, fall back to option completionif installed:for dist in installed:print(dist)sys.exit(1)subcommand = commands_dict[subcommand_name]()options += [(opt.get_opt_string(), opt.nargs)for opt in subcommand.parser.option_list_allif opt.help != optparse.SUPPRESS_HELP]# filter out previously specified options from available optionsprev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]options = [(x, v) for (x, v) in options if x not in prev_opts]# filter options by current inputoptions = [(k, v) for k, v in options if k.startswith(current)]for option in options:opt_label = option[0]# append '=' to options which require argsif option[1]:opt_label += '='print(opt_label)else:# show main parser options only when necessaryif current.startswith('-') or current.startswith('--'):opts = [i.option_list for i in parser.option_groups]opts.append(parser.option_list)opts = (o for it in opts for o in it)subcommands += [i.get_opt_string() for i in optsif i.help != optparse.SUPPRESS_HELP]print(' '.join([x for x in subcommands if x.startswith(current)]))sys.exit(1)def create_main_parser():parser_kw = {'usage': '\n%prog <command> [options]','add_help_option': False,'formatter': UpdatingDefaultsHelpFormatter(),'name': 'global','prog': get_prog(),}parser = ConfigOptionParser(**parser_kw)parser.disable_interspersed_args()pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))parser.version = 'pip %s from %s (python %s)' % (__version__, pip_pkg_dir, sys.version[:3])# add the general optionsgen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)parser.add_option_group(gen_opts)parser.main = True # so the help formatter knows# create command listing for descriptioncommand_summaries = get_summaries()description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]parser.description = '\n'.join(description)return parserdef parseopts(args):parser = create_main_parser()# Note: parser calls disable_interspersed_args(), so the result of this# call is to split the initial args into the general options before the# subcommand and everything else.# For example:# args: ['--timeout=5', 'install', '--user', 'INITools']# general_options: ['--timeout==5']# args_else: ['install', '--user', 'INITools']general_options, args_else = parser.parse_args(args)# --versionif general_options.version:sys.stdout.write(parser.version)sys.stdout.write(os.linesep)sys.exit()# pip || pip help -> print_help()if not args_else or (args_else[0] == 'help' and len(args_else) == 1):parser.print_help()sys.exit()# the subcommand namecmd_name = args_else[0]if cmd_name not in commands_dict:guess = get_similar_commands(cmd_name)msg = ['unknown command "%s"' % cmd_name]if guess:msg.append('maybe you meant "%s"' % guess)raise CommandError(' - '.join(msg))# all the args without the subcommandcmd_args = args[:]cmd_args.remove(cmd_name)return cmd_name, cmd_argsdef check_isolated(args):isolated = Falseif "--isolated" in args:isolated = Truereturn isolateddef main(args=None):if args is None:args = sys.argv[1:]# Configure our deprecation warnings to be sent through loggersdeprecation.install_warning_logger()autocomplete()try:cmd_name, cmd_args = parseopts(args)except PipError as exc:sys.stderr.write("ERROR: %s" % exc)sys.stderr.write(os.linesep)sys.exit(1)# Needed for locale.getpreferredencoding(False) to work# in pip.utils.encoding.auto_decodetry:locale.setlocale(locale.LC_ALL, '')except locale.Error as e:# setlocale can apparently crash if locale are uninitializedlogger.debug("Ignoring error %s when setting locale", e)command = commands_dict[cmd_name](isolated=check_isolated(cmd_args))return command.main(cmd_args)# ############################################################ # Writing freeze filesclass FrozenRequirement(object):def __init__(self, name, req, editable, comments=()):self.name = nameself.req = reqself.editable = editableself.comments = comments_rev_re = re.compile(r'-r(\d+)$')_date_re = re.compile(r'-(20\d\d\d\d\d\d)$')@classmethoddef from_dist(cls, dist, dependency_links):location = os.path.normcase(os.path.abspath(dist.location))comments = []from pip.vcs import vcs, get_src_requirementif dist_is_editable(dist) and vcs.get_backend_name(location):editable = Truetry:req = get_src_requirement(dist, location)except InstallationError as exc:logger.warning("Error when trying to get requirement for VCS system %s, ""falling back to uneditable format", exc)req = Noneif req is None:logger.warning('Could not determine repository location of %s', location)comments.append('## !! Could not determine repository location')req = dist.as_requirement()editable = Falseelse:editable = Falsereq = dist.as_requirement()specs = req.specsassert len(specs) == 1 and specs[0][0] in ["==", "==="], \'Expected 1 spec with == or ===; specs = %r; dist = %r' % \(specs, dist)version = specs[0][1]ver_match = cls._rev_re.search(version)date_match = cls._date_re.search(version)if ver_match or date_match:svn_backend = vcs.get_backend('svn')if svn_backend:svn_location = svn_backend().get_location(dist,dependency_links,)if not svn_location:logger.warning('Warning: cannot find svn location for %s', req)comments.append('## FIXME: could not find svn URL in dependency_links ''for this package:')else:comments.append('# Installing as editable to satisfy requirement %s:' %req)if ver_match:rev = ver_match.group(1)else:rev = '{%s}' % date_match.group(1)editable = Truereq = '%s@%s#egg=%s' % (svn_location,rev,cls.egg_name(dist))return cls(dist.project_name, req, editable, comments)@staticmethoddef egg_name(dist):name = dist.egg_name()match = re.search(r'-py\d\.\d$', name)if match:name = name[:match.start()]return namedef __str__(self):req = self.reqif self.editable:req = '-e %s' % reqreturn '\n'.join(list(self.comments) + [str(req)]) + '\n'if __name__ == '__main__':sys.exit(main())
"""Run the EasyInstall command"""if __name__ == '__main__':from setuptools.command.easy_install import mainmain()
/Users/ccummings/eventbrite_github/triage_projects/csv_validator/pysrc
/Users/ccummings/eventbrite_github/triage_projects/csv_validator/pysrc.
click
{"classifiers": ["License :: OSI Approved :: BSD License", "Programming Language :: Python", "Programming Language :: Python :: 3"], "extensions": {"python.details": {"contacts": [{"email": "armin.ronacher@active-4.com", "name": "Armin Ronacher", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://github.com/mitsuhiko/click"}}}, "generator": "bdist_wheel (0.30.0.a0)", "metadata_version": "2.0", "name": "click", "summary": "A simple wrapper around optparse for powerful command line utilities.", "version": "6.7"}
Wheel-Version: 1.0Generator: bdist_wheel (0.30.0.a0)Root-Is-Purelib: trueTag: py2-none-anyTag: py3-none-any
click/__init__.py,sha256=k8R00cFKWI8dhDVKQeLBlAdNh1CxerMEDRiGnr32gdw,2858click/_bashcomplete.py,sha256=82rMiibtEurdwBq60NHXVCBuGXJHDpblFO9o2YxJDF0,2423click/_compat.py,sha256=j59MpzxYGE-fTGj0A5sg8UI8GhHod1XMojiCA0jvbL0,21011click/_termui_impl.py,sha256=Ol1JJhvBRw3l8j1WIU0tOWjQtxxmwGE44lFDbzDqzoA,16395click/_textwrap.py,sha256=gwS4m7bdQiJnzaDG8osFcRb-5vn4t4l2qSCy-5csCEc,1198click/_unicodefun.py,sha256=A3UOzJw6lEZyol2SBg3fNXgweTutaOzkJ61OB7vik3Y,4204click/_winconsole.py,sha256=MzG46DEYPoRyx4SO7EIhFuFZHESgooAfJLIukbB6p5c,7790click/core.py,sha256=M0nJ6Kkye7XZXYG7HCbkJWSfy14WHV6bQmGLACrOhKw,70254click/decorators.py,sha256=y7CX2needh8iRWafj-QS_hGQFsN24eyXAhx5Y2ATwas,10941click/exceptions.py,sha256=rOa0pP3PbSy0_AAPOW9irBEM8AJ3BySN-4z2VUwFVo4,6788click/formatting.py,sha256=eh-cypTUAhpI3HD-K4ZpR3vCiURIO62xXvKkR3tNUTM,8889click/globals.py,sha256=PAgnKvGxq4YuEIldw3lgYOGBLYwsyxnm1IByBX3BFXo,1515click/parser.py,sha256=i01xgYuIA6AwQWEXjshwHSwnTR3gUep4FxJIfyW4ta4,15510click/termui.py,sha256=Bp99MSWQtyoWe1_7HggDmA77n--3KLxu7NsZMFMaCUo,21008click/testing.py,sha256=kJ9mjtJgwNAlkgKcFf9-ISxufmaPDbbuOHVC9WIvKdY,11002click/types.py,sha256=ZGb2lmFs5Vwd9loTRIMbGcqhPVOql8mGoBhWBRT6V4E,18864click/utils.py,sha256=1jalPlkUU28JReTEQeeSFtbJd-SirYWBNfjtELBKzT4,14916click-6.7.dist-info/DESCRIPTION.rst,sha256=OCTuuN6LcWulhHS3d5rfjdsQtW22n7HENFRh6jC6ego,10click-6.7.dist-info/METADATA,sha256=l6lAyogIUXiHKUK_rWguef-EMcvO5C6bXzFCNCcblbQ,424click-6.7.dist-info/RECORD,,click-6.7.dist-info/WHEEL,sha256=5wvfB7GvgZAbKBSE9uX9Zbi6LCL-_KgezgHblXhCRnM,113click-6.7.dist-info/metadata.json,sha256=qg0uO6amNHkIkOxnmWX7Xa_DNQMQ62Q6drivuP9Gh1c,571click-6.7.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6click-6.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4click/exceptions.pyc,,click/_unicodefun.pyc,,click/_termui_impl.pyc,,click/_compat.pyc,,click/_textwrap.pyc,,click/_winconsole.pyc,,click/decorators.pyc,,click/parser.pyc,,click/testing.pyc,,click/utils.pyc,,click/_bashcomplete.pyc,,click/types.pyc,,click/termui.pyc,,click/core.pyc,,click/formatting.pyc,,click/__init__.pyc,,click/globals.pyc,,
Metadata-Version: 2.0Name: clickVersion: 6.7Summary: A simple wrapper around optparse for powerful command line utilities.Home-page: http://github.com/mitsuhiko/clickAuthor: Armin RonacherAuthor-email: armin.ronacher@active-4.comLicense: UNKNOWNPlatform: UNKNOWNClassifier: License :: OSI Approved :: BSD LicenseClassifier: Programming Language :: PythonClassifier: Programming Language :: Python :: 3UNKNOWN
pip
UNKNOWN
import osimport sysfrom .globals import resolve_color_defaultfrom ._compat import text_type, open_stream, get_filesystem_encoding, \get_streerror, string_types, PY2, binary_streams, text_streams, \filename_to_ui, auto_wrap_for_ansi, strip_ansi, should_strip_ansi, \_default_text_stdout, _default_text_stderr, is_bytes, WINif not PY2:from ._compat import _find_binary_writerelif WIN:from ._winconsole import _get_windows_argv, \_hash_py_argv, _initial_argv_hashecho_native_types = string_types + (bytes, bytearray)def _posixify(name):return '-'.join(name.split()).lower()def safecall(func):"""Wraps a function so that it swallows exceptions."""def wrapper(*args, **kwargs):try:return func(*args, **kwargs)except Exception:passreturn wrapperdef make_str(value):"""Converts a value into a valid string."""if isinstance(value, bytes):try:return value.decode(get_filesystem_encoding())except UnicodeError:return value.decode('utf-8', 'replace')return text_type(value)def make_default_short_help(help, max_length=45):words = help.split()total_length = 0result = []done = Falsefor word in words:if word[-1:] == '.':done = Truenew_length = result and 1 + len(word) or len(word)if total_length + new_length > max_length:result.append('...')done = Trueelse:if result:result.append(' ')result.append(word)if done:breaktotal_length += new_lengthreturn ''.join(result)class LazyFile(object):"""A lazy file works like a regular file but it does not fully openthe file but it does perform some basic checks early to see if thefilename parameter does make sense. This is useful for safely openingfiles for writing."""def __init__(self, filename, mode='r', encoding=None, errors='strict',atomic=False):self.name = filenameself.mode = modeself.encoding = encodingself.errors = errorsself.atomic = atomicif filename == '-':self._f, self.should_close = open_stream(filename, mode,encoding, errors)else:if 'r' in mode:# Open and close the file in case we're opening it for# reading so that we can catch at least some errors in# some cases early.open(filename, mode).close()self._f = Noneself.should_close = Truedef __getattr__(self, name):return getattr(self.open(), name)def __repr__(self):if self._f is not None:return repr(self._f)return '<unopened file %r %s>' % (self.name, self.mode)def open(self):"""Opens the file if it's not yet open. This call might fail witha :exc:`FileError`. Not handling this error will produce an errorthat Click shows."""if self._f is not None:return self._ftry:rv, self.should_close = open_stream(self.name, self.mode,self.encoding,self.errors,atomic=self.atomic)except (IOError, OSError) as e:from .exceptions import FileErrorraise FileError(self.name, hint=get_streerror(e))self._f = rvreturn rvdef close(self):"""Closes the underlying file, no matter what."""if self._f is not None:self._f.close()def close_intelligently(self):"""This function only closes the file if it was opened by the lazyfile wrapper. For instance this will never close stdin."""if self.should_close:self.close()def __enter__(self):return selfdef __exit__(self, exc_type, exc_value, tb):self.close_intelligently()def __iter__(self):self.open()return iter(self._f)class KeepOpenFile(object):def __init__(self, file):self._file = filedef __getattr__(self, name):return getattr(self._file, name)def __enter__(self):return selfdef __exit__(self, exc_type, exc_value, tb):passdef __repr__(self):return repr(self._file)def __iter__(self):return iter(self._file)def echo(message=None, file=None, nl=True, err=False, color=None):"""Prints a message plus a newline to the given file or stdout. Onfirst sight, this looks like the print function, but it has improvedsupport for handling Unicode and binary data that does not fail nomatter how badly configured the system is.Primarily it means that you can print binary data as well as Unicodedata on both 2.x and 3.x to the given file in the most appropriate waypossible. This is a very carefree function as in that it will try itsbest to not fail. As of Click 6.0 this includes support for unicodeoutput on the Windows console.In addition to that, if `colorama`_ is installed, the echo function willalso support clever handling of ANSI codes. Essentially it will thendo the following:- add transparent handling of ANSI color codes on Windows.- hide ANSI codes automatically if the destination file is not aterminal... _colorama: http://pypi.python.org/pypi/colorama.. versionchanged:: 6.0As of Click 6.0 the echo function will properly support unicodeoutput on the windows console. Not that click does not modifythe interpreter in any way which means that `sys.stdout` or theprint statement or function will still not provide unicode support... versionchanged:: 2.0Starting with version 2.0 of Click, the echo function will workwith colorama if it's installed... versionadded:: 3.0The `err` parameter was added... versionchanged:: 4.0Added the `color` flag.:param message: the message to print:param file: the file to write to (defaults to ``stdout``):param err: if set to true the file defaults to ``stderr`` instead of``stdout``. This is faster and easier than calling:func:`get_text_stderr` yourself.:param nl: if set to `True` (the default) a newline is printed afterwards.:param color: controls if the terminal supports ANSI colors or not. Thedefault is autodetection."""if file is None:if err:file = _default_text_stderr()else:file = _default_text_stdout()# Convert non bytes/text into the native string type.if message is not None and not isinstance(message, echo_native_types):message = text_type(message)if nl:message = message or u''if isinstance(message, text_type):message += u'\n'else:message += b'\n'# If there is a message, and we're in Python 3, and the value looks# like bytes, we manually need to find the binary stream and write the# message in there. This is done separately so that most stream# types will work as you would expect. Eg: you can write to StringIO# for other cases.if message and not PY2 and is_bytes(message):binary_file = _find_binary_writer(file)if binary_file is not None:file.flush()binary_file.write(message)binary_file.flush()return# ANSI-style support. If there is no message or we are dealing with# bytes nothing is happening. If we are connected to a file we want# to strip colors. If we are on windows we either wrap the stream# to strip the color or we use the colorama support to translate the# ansi codes to API calls.if message and not is_bytes(message):color = resolve_color_default(color)if should_strip_ansi(file, color):message = strip_ansi(message)elif WIN:if auto_wrap_for_ansi is not None:file = auto_wrap_for_ansi(file)elif not color:message = strip_ansi(message)if message:file.write(message)file.flush()def get_binary_stream(name):"""Returns a system stream for byte processing. This essentiallyreturns the stream from the sys module with the given name but itsolves some compatibility issues between different Python versions.Primarily this function is necessary for getting binary streams onPython 3.:param name: the name of the stream to open. Valid names are ``'stdin'``,``'stdout'`` and ``'stderr'``"""opener = binary_streams.get(name)if opener is None:raise TypeError('Unknown standard stream %r' % name)return opener()def get_text_stream(name, encoding=None, errors='strict'):"""Returns a system stream for text processing. This usually returnsa wrapped stream around a binary stream returned from:func:`get_binary_stream` but it also can take shortcuts on Python 3for already correctly configured streams.:param name: the name of the stream to open. Valid names are ``'stdin'``,``'stdout'`` and ``'stderr'``:param encoding: overrides the detected default encoding.:param errors: overrides the default error mode."""opener = text_streams.get(name)if opener is None:raise TypeError('Unknown standard stream %r' % name)return opener(encoding, errors)def open_file(filename, mode='r', encoding=None, errors='strict',lazy=False, atomic=False):"""This is similar to how the :class:`File` works but for manualusage. Files are opened non lazy by default. This can open regularfiles as well as stdin/stdout if ``'-'`` is passed.If stdin/stdout is returned the stream is wrapped so that the contextmanager will not close the stream accidentally. This makes it possibleto always use the function like this without having to worry toaccidentally close a standard stream::with open_file(filename) as f:..... versionadded:: 3.0:param filename: the name of the file to open (or ``'-'`` for stdin/stdout).:param mode: the mode in which to open the file.:param encoding: the encoding to use.:param errors: the error handling for this file.:param lazy: can be flipped to true to open the file lazily.:param atomic: in atomic mode writes go into a temporary file and it'smoved on close."""if lazy:return LazyFile(filename, mode, encoding, errors, atomic=atomic)f, should_close = open_stream(filename, mode, encoding, errors,atomic=atomic)if not should_close:f = KeepOpenFile(f)return fdef get_os_args():"""This returns the argument part of sys.argv in the most appropriateform for processing. What this means is that this return value is ina format that works for Click to process but does not necessarilycorrespond well to what's actually standard for the interpreter.On most environments the return value is ``sys.argv[:1]`` unchanged.However if you are on Windows and running Python 2 the return valuewill actually be a list of unicode strings instead because thedefault behavior on that platform otherwise will not be able tocarry all possible values that sys.argv can have... versionadded:: 6.0"""# We can only extract the unicode argv if sys.argv has not been# changed since the startup of the application.if PY2 and WIN and _initial_argv_hash == _hash_py_argv():return _get_windows_argv()return sys.argv[1:]def format_filename(filename, shorten=False):"""Formats a filename for user display. The main purpose of thisfunction is to ensure that the filename can be displayed at all. Thiswill decode the filename to unicode if necessary in a way that it willnot fail. Optionally, it can shorten the filename to not include thefull path to the filename.:param filename: formats a filename for UI display. This will also convertthe filename into unicode without failing.:param shorten: this optionally shortens the filename to strip of thepath that leads up to it."""if shorten:filename = os.path.basename(filename)return filename_to_ui(filename)def get_app_dir(app_name, roaming=True, force_posix=False):r"""Returns the config folder for the application. The default behavioris to return whatever is most appropriate for the operating system.To give you an idea, for an app called ``"Foo Bar"``, something likethe following folders could be returned:Mac OS X:``~/Library/Application Support/Foo Bar``Mac OS X (POSIX):``~/.foo-bar``Unix:``~/.config/foo-bar``Unix (POSIX):``~/.foo-bar``Win XP (roaming):``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar``Win XP (not roaming):``C:\Documents and Settings\<user>\Application Data\Foo Bar``Win 7 (roaming):``C:\Users\<user>\AppData\Roaming\Foo Bar``Win 7 (not roaming):``C:\Users\<user>\AppData\Local\Foo Bar``.. versionadded:: 2.0:param app_name: the application name. This should be properly capitalizedand can contain whitespace.:param roaming: controls if the folder should be roaming or not on Windows.Has no affect otherwise.:param force_posix: if this is set to `True` then on any POSIX system thefolder will be stored in the home folder with a leadingdot instead of the XDG config home or darwin'sapplication support folder."""if WIN:key = roaming and 'APPDATA' or 'LOCALAPPDATA'folder = os.environ.get(key)if folder is None:folder = os.path.expanduser('~')return os.path.join(folder, app_name)if force_posix:return os.path.join(os.path.expanduser('~/.' + _posixify(app_name)))if sys.platform == 'darwin':return os.path.join(os.path.expanduser('~/Library/Application Support'), app_name)return os.path.join(os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')),_posixify(app_name))
import osimport statfrom ._compat import open_stream, text_type, filename_to_ui, \get_filesystem_encoding, get_streerror, _get_argv_encoding, PY2from .exceptions import BadParameterfrom .utils import safecall, LazyFileclass ParamType(object):"""Helper for converting values through types. The following isnecessary for a valid type:* it needs a name* it needs to pass through None unchanged* it needs to convert from a string* it needs to convert its result type through unchanged(eg: needs to be idempotent)* it needs to be able to deal with param and context being `None`.This can be the case when the object is used with promptinputs."""is_composite = False#: the descriptive name of this typename = None#: if a list of this type is expected and the value is pulled from a#: string environment variable, this is what splits it up. `None`#: means any whitespace. For all parameters the general rule is that#: whitespace splits them up. The exception are paths and files which#: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on#: Windows).envvar_list_splitter = Nonedef __call__(self, value, param=None, ctx=None):if value is not None:return self.convert(value, param, ctx)def get_metavar(self, param):"""Returns the metavar default for this param if it provides one."""def get_missing_message(self, param):"""Optionally might return extra information about a missingparameter... versionadded:: 2.0"""def convert(self, value, param, ctx):"""Converts the value. This is not invoked for values that are`None` (the missing value)."""return valuedef split_envvar_value(self, rv):"""Given a value from an environment variable this splits it upinto small chunks depending on the defined envvar list splitter.If the splitter is set to `None`, which means that whitespace splits,then leading and trailing whitespace is ignored. Otherwise, leadingand trailing splitters usually lead to empty items being included."""return (rv or '').split(self.envvar_list_splitter)def fail(self, message, param=None, ctx=None):"""Helper method to fail with an invalid value message."""raise BadParameter(message, ctx=ctx, param=param)class CompositeParamType(ParamType):is_composite = True@propertydef arity(self):raise NotImplementedError()class FuncParamType(ParamType):def __init__(self, func):self.name = func.__name__self.func = funcdef convert(self, value, param, ctx):try:return self.func(value)except ValueError:try:value = text_type(value)except UnicodeError:value = str(value).decode('utf-8', 'replace')self.fail(value, param, ctx)class UnprocessedParamType(ParamType):name = 'text'def convert(self, value, param, ctx):return valuedef __repr__(self):return 'UNPROCESSED'class StringParamType(ParamType):name = 'text'def convert(self, value, param, ctx):if isinstance(value, bytes):enc = _get_argv_encoding()try:value = value.decode(enc)except UnicodeError:fs_enc = get_filesystem_encoding()if fs_enc != enc:try:value = value.decode(fs_enc)except UnicodeError:value = value.decode('utf-8', 'replace')return valuereturn valuedef __repr__(self):return 'STRING'class Choice(ParamType):"""The choice type allows a value to be checked against a fixed set ofsupported values. All of these values have to be strings.See :ref:`choice-opts` for an example."""name = 'choice'def __init__(self, choices):self.choices = choicesdef get_metavar(self, param):return '[%s]' % '|'.join(self.choices)def get_missing_message(self, param):return 'Choose from %s.' % ', '.join(self.choices)def convert(self, value, param, ctx):# Exact matchif value in self.choices:return value# Match through normalizationif ctx is not None and \ctx.token_normalize_func is not None:value = ctx.token_normalize_func(value)for choice in self.choices:if ctx.token_normalize_func(choice) == value:return choiceself.fail('invalid choice: %s. (choose from %s)' %(value, ', '.join(self.choices)), param, ctx)def __repr__(self):return 'Choice(%r)' % list(self.choices)class IntParamType(ParamType):name = 'integer'def convert(self, value, param, ctx):try:return int(value)except (ValueError, UnicodeError):self.fail('%s is not a valid integer' % value, param, ctx)def __repr__(self):return 'INT'class IntRange(IntParamType):"""A parameter that works similar to :data:`click.INT` but restrictsthe value to fit into a range. The default behavior is to fail if thevalue falls outside the range, but it can also be silently clampedbetween the two edges.See :ref:`ranges` for an example."""name = 'integer range'def __init__(self, min=None, max=None, clamp=False):self.min = minself.max = maxself.clamp = clampdef convert(self, value, param, ctx):rv = IntParamType.convert(self, value, param, ctx)if self.clamp:if self.min is not None and rv < self.min:return self.minif self.max is not None and rv > self.max:return self.maxif self.min is not None and rv < self.min or \self.max is not None and rv > self.max:if self.min is None:self.fail('%s is bigger than the maximum valid value ''%s.' % (rv, self.max), param, ctx)elif self.max is None:self.fail('%s is smaller than the minimum valid value ''%s.' % (rv, self.min), param, ctx)else:self.fail('%s is not in the valid range of %s to %s.'% (rv, self.min, self.max), param, ctx)return rvdef __repr__(self):return 'IntRange(%r, %r)' % (self.min, self.max)class BoolParamType(ParamType):name = 'boolean'def convert(self, value, param, ctx):if isinstance(value, bool):return bool(value)value = value.lower()if value in ('true', '1', 'yes', 'y'):return Trueelif value in ('false', '0', 'no', 'n'):return Falseself.fail('%s is not a valid boolean' % value, param, ctx)def __repr__(self):return 'BOOL'class FloatParamType(ParamType):name = 'float'def convert(self, value, param, ctx):try:return float(value)except (UnicodeError, ValueError):self.fail('%s is not a valid floating point value' %value, param, ctx)def __repr__(self):return 'FLOAT'class UUIDParameterType(ParamType):name = 'uuid'def convert(self, value, param, ctx):import uuidtry:if PY2 and isinstance(value, text_type):value = value.encode('ascii')return uuid.UUID(value)except (UnicodeError, ValueError):self.fail('%s is not a valid UUID value' % value, param, ctx)def __repr__(self):return 'UUID'class File(ParamType):"""Declares a parameter to be a file for reading or writing. The fileis automatically closed once the context tears down (after the commandfinished working).Files can be opened for reading or writing. The special value ``-``indicates stdin or stdout depending on the mode.By default, the file is opened for reading text data, but it can also beopened in binary mode or for writing. The encoding parameter can be usedto force a specific encoding.The `lazy` flag controls if the file should be opened immediately orupon first IO. The default is to be non lazy for standard input andoutput streams as well as files opened for reading, lazy otherwise.Starting with Click 2.0, files can also be opened atomically in whichcase all writes go into a separate file in the same folder and uponcompletion the file will be moved over to the original location. Thisis useful if a file regularly read by other users is modified.See :ref:`file-args` for more information."""name = 'filename'envvar_list_splitter = os.path.pathsepdef __init__(self, mode='r', encoding=None, errors='strict', lazy=None,atomic=False):self.mode = modeself.encoding = encodingself.errors = errorsself.lazy = lazyself.atomic = atomicdef resolve_lazy_flag(self, value):if self.lazy is not None:return self.lazyif value == '-':return Falseelif 'w' in self.mode:return Truereturn Falsedef convert(self, value, param, ctx):try:if hasattr(value, 'read') or hasattr(value, 'write'):return valuelazy = self.resolve_lazy_flag(value)if lazy:f = LazyFile(value, self.mode, self.encoding, self.errors,atomic=self.atomic)if ctx is not None:ctx.call_on_close(f.close_intelligently)return ff, should_close = open_stream(value, self.mode,self.encoding, self.errors,atomic=self.atomic)# If a context is provided, we automatically close the file# at the end of the context execution (or flush out). If a# context does not exist, it's the caller's responsibility to# properly close the file. This for instance happens when the# type is used with prompts.if ctx is not None:if should_close:ctx.call_on_close(safecall(f.close))else:ctx.call_on_close(safecall(f.flush))return fexcept (IOError, OSError) as e:self.fail('Could not open file: %s: %s' % (filename_to_ui(value),get_streerror(e),), param, ctx)class Path(ParamType):"""The path type is similar to the :class:`File` type but it performsdifferent checks. First of all, instead of returning an open filehandle it returns just the filename. Secondly, it can perform variousbasic checks about what the file or directory should be... versionchanged:: 6.0`allow_dash` was added.:param exists: if set to true, the file or directory needs to exist forthis value to be valid. If this is not required and afile does indeed not exist, then all further checks aresilently skipped.:param file_okay: controls if a file is a possible value.:param dir_okay: controls if a directory is a possible value.:param writable: if true, a writable check is performed.:param readable: if true, a readable check is performed.:param resolve_path: if this is true, then the path is fully resolvedbefore the value is passed onwards. This meansthat it's absolute and symlinks are resolved.:param allow_dash: If this is set to `True`, a single dash to indicatestandard streams is permitted.:param type: optionally a string type that should be used torepresent the path. The default is `None` whichmeans the return value will be either bytes orunicode depending on what makes most sense given theinput data Click deals with."""envvar_list_splitter = os.path.pathsepdef __init__(self, exists=False, file_okay=True, dir_okay=True,writable=False, readable=True, resolve_path=False,allow_dash=False, path_type=None):self.exists = existsself.file_okay = file_okayself.dir_okay = dir_okayself.writable = writableself.readable = readableself.resolve_path = resolve_pathself.allow_dash = allow_dashself.type = path_typeif self.file_okay and not self.dir_okay:self.name = 'file'self.path_type = 'File'if self.dir_okay and not self.file_okay:self.name = 'directory'self.path_type = 'Directory'else:self.name = 'path'self.path_type = 'Path'def coerce_path_result(self, rv):if self.type is not None and not isinstance(rv, self.type):if self.type is text_type:rv = rv.decode(get_filesystem_encoding())else:rv = rv.encode(get_filesystem_encoding())return rvdef convert(self, value, param, ctx):rv = valueis_dash = self.file_okay and self.allow_dash and rv in (b'-', '-')if not is_dash:if self.resolve_path:rv = os.path.realpath(rv)try:st = os.stat(rv)except OSError:if not self.exists:return self.coerce_path_result(rv)self.fail('%s "%s" does not exist.' % (self.path_type,filename_to_ui(value)), param, ctx)if not self.file_okay and stat.S_ISREG(st.st_mode):self.fail('%s "%s" is a file.' % (self.path_type,filename_to_ui(value)), param, ctx)if not self.dir_okay and stat.S_ISDIR(st.st_mode):self.fail('%s "%s" is a directory.' % (self.path_type,filename_to_ui(value)), param, ctx)if self.writable and not os.access(value, os.W_OK):self.fail('%s "%s" is not writable.' % (self.path_type,filename_to_ui(value)), param, ctx)if self.readable and not os.access(value, os.R_OK):self.fail('%s "%s" is not readable.' % (self.path_type,filename_to_ui(value)), param, ctx)return self.coerce_path_result(rv)class Tuple(CompositeParamType):"""The default behavior of Click is to apply a type on a value directly.This works well in most cases, except for when `nargs` is set to a fixedcount and different types should be used for different items. In thiscase the :class:`Tuple` type can be used. This type can only be usedif `nargs` is set to a fixed number.For more information see :ref:`tuple-type`.This can be selected by using a Python tuple literal as a type.:param types: a list of types that should be used for the tuple items."""def __init__(self, types):self.types = [convert_type(ty) for ty in types]@propertydef name(self):return "<" + " ".join(ty.name for ty in self.types) + ">"@propertydef arity(self):return len(self.types)def convert(self, value, param, ctx):if len(value) != len(self.types):raise TypeError('It would appear that nargs is set to conflict ''with the composite type arity.')return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value))def convert_type(ty, default=None):"""Converts a callable or python ty into the most appropriate paramty."""guessed_type = Falseif ty is None and default is not None:if isinstance(default, tuple):ty = tuple(map(type, default))else:ty = type(default)guessed_type = Trueif isinstance(ty, tuple):return Tuple(ty)if isinstance(ty, ParamType):return tyif ty is text_type or ty is str or ty is None:return STRINGif ty is int:return INT# Booleans are only okay if not guessed. This is done because for# flags the default value is actually a bit of a lie in that it# indicates which of the flags is the one we want. See get_default()# for more information.if ty is bool and not guessed_type:return BOOLif ty is float:return FLOATif guessed_type:return STRING# Catch a common mistakeif __debug__:try:if issubclass(ty, ParamType):raise AssertionError('Attempted to use an uninstantiated ''parameter type (%s).' % ty)except TypeError:passreturn FuncParamType(ty)#: A dummy parameter type that just does nothing. From a user's#: perspective this appears to just be the same as `STRING` but internally#: no string conversion takes place. This is necessary to achieve the#: same bytes/unicode behavior on Python 2/3 in situations where you want#: to not convert argument types. This is usually useful when working#: with file paths as they can appear in bytes and unicode.#:#: For path related uses the :class:`Path` type is a better choice but#: there are situations where an unprocessed type is useful which is why#: it is is provided.#:#: .. versionadded:: 4.0UNPROCESSED = UnprocessedParamType()#: A unicode string parameter type which is the implicit default. This#: can also be selected by using ``str`` as type.STRING = StringParamType()#: An integer parameter. This can also be selected by using ``int`` as#: type.INT = IntParamType()#: A floating point value parameter. This can also be selected by using#: ``float`` as type.FLOAT = FloatParamType()#: A boolean parameter. This is the default for boolean flags. This can#: also be selected by using ``bool`` as a type.BOOL = BoolParamType()#: A UUID parameter.UUID = UUIDParameterType()
import osimport sysimport shutilimport tempfileimport contextlibfrom ._compat import iteritems, PY2# If someone wants to vendor click, we want to ensure the# correct package is discovered. Ideally we could use a# relative import here but unfortunately Python does not# support that.clickpkg = sys.modules[__name__.rsplit('.', 1)[0]]if PY2:from cStringIO import StringIOelse:import iofrom ._compat import _find_binary_readerclass EchoingStdin(object):def __init__(self, input, output):self._input = inputself._output = outputdef __getattr__(self, x):return getattr(self._input, x)def _echo(self, rv):self._output.write(rv)return rvdef read(self, n=-1):return self._echo(self._input.read(n))def readline(self, n=-1):return self._echo(self._input.readline(n))def readlines(self):return [self._echo(x) for x in self._input.readlines()]def __iter__(self):return iter(self._echo(x) for x in self._input)def __repr__(self):return repr(self._input)def make_input_stream(input, charset):# Is already an input stream.if hasattr(input, 'read'):if PY2:return inputrv = _find_binary_reader(input)if rv is not None:return rvraise TypeError('Could not find binary reader for input stream.')if input is None:input = b''elif not isinstance(input, bytes):input = input.encode(charset)if PY2:return StringIO(input)return io.BytesIO(input)class Result(object):"""Holds the captured result of an invoked CLI script."""def __init__(self, runner, output_bytes, exit_code, exception,exc_info=None):#: The runner that created the resultself.runner = runner#: The output as bytes.self.output_bytes = output_bytes#: The exit code as integer.self.exit_code = exit_code#: The exception that happend if one did.self.exception = exception#: The tracebackself.exc_info = exc_info@propertydef output(self):"""The output as unicode string."""return self.output_bytes.decode(self.runner.charset, 'replace') \.replace('\r\n', '\n')def __repr__(self):return '<Result %s>' % (self.exception and repr(self.exception) or 'okay',)class CliRunner(object):"""The CLI runner provides functionality to invoke a Click command linescript for unittesting purposes in a isolated environment. This onlyworks in single-threaded systems without any concurrency as it changes theglobal interpreter state.:param charset: the character set for the input and output data. This isUTF-8 by default and should not be changed currently asthe reporting to Click only works in Python 2 properly.:param env: a dictionary with environment variables for overriding.:param echo_stdin: if this is set to `True`, then reading from stdin writesto stdout. This is useful for showing examples insome circumstances. Note that regular promptswill automatically echo the input."""def __init__(self, charset=None, env=None, echo_stdin=False):if charset is None:charset = 'utf-8'self.charset = charsetself.env = env or {}self.echo_stdin = echo_stdindef get_default_prog_name(self, cli):"""Given a command object it will return the default program namefor it. The default is the `name` attribute or ``"root"`` if notset."""return cli.name or 'root'def make_env(self, overrides=None):"""Returns the environment overrides for invoking a script."""rv = dict(self.env)if overrides:rv.update(overrides)return rv@contextlib.contextmanagerdef isolation(self, input=None, env=None, color=False):"""A context manager that sets up the isolation for invoking of acommand line tool. This sets up stdin with the given input dataand `os.environ` with the overrides from the given dictionary.This also rebinds some internals in Click to be mocked (like theprompt functionality).This is automatically done in the :meth:`invoke` method... versionadded:: 4.0The ``color`` parameter was added.:param input: the input stream to put into sys.stdin.:param env: the environment overrides as dictionary.:param color: whether the output should contain color codes. Theapplication can still override this explicitly."""input = make_input_stream(input, self.charset)old_stdin = sys.stdinold_stdout = sys.stdoutold_stderr = sys.stderrold_forced_width = clickpkg.formatting.FORCED_WIDTHclickpkg.formatting.FORCED_WIDTH = 80env = self.make_env(env)if PY2:sys.stdout = sys.stderr = bytes_output = StringIO()if self.echo_stdin:input = EchoingStdin(input, bytes_output)else:bytes_output = io.BytesIO()if self.echo_stdin:input = EchoingStdin(input, bytes_output)input = io.TextIOWrapper(input, encoding=self.charset)sys.stdout = sys.stderr = io.TextIOWrapper(bytes_output, encoding=self.charset)sys.stdin = inputdef visible_input(prompt=None):sys.stdout.write(prompt or '')val = input.readline().rstrip('\r\n')sys.stdout.write(val + '\n')sys.stdout.flush()return valdef hidden_input(prompt=None):sys.stdout.write((prompt or '') + '\n')sys.stdout.flush()return input.readline().rstrip('\r\n')def _getchar(echo):char = sys.stdin.read(1)if echo:sys.stdout.write(char)sys.stdout.flush()return chardefault_color = colordef should_strip_ansi(stream=None, color=None):if color is None:return not default_colorreturn not colorold_visible_prompt_func = clickpkg.termui.visible_prompt_funcold_hidden_prompt_func = clickpkg.termui.hidden_prompt_funcold__getchar_func = clickpkg.termui._getcharold_should_strip_ansi = clickpkg.utils.should_strip_ansiclickpkg.termui.visible_prompt_func = visible_inputclickpkg.termui.hidden_prompt_func = hidden_inputclickpkg.termui._getchar = _getcharclickpkg.utils.should_strip_ansi = should_strip_ansiold_env = {}try:for key, value in iteritems(env):old_env[key] = os.environ.get(key)if value is None:try:del os.environ[key]except Exception:passelse:os.environ[key] = valueyield bytes_outputfinally:for key, value in iteritems(old_env):if value is None:try:del os.environ[key]except Exception:passelse:os.environ[key] = valuesys.stdout = old_stdoutsys.stderr = old_stderrsys.stdin = old_stdinclickpkg.termui.visible_prompt_func = old_visible_prompt_funcclickpkg.termui.hidden_prompt_func = old_hidden_prompt_funcclickpkg.termui._getchar = old__getchar_funcclickpkg.utils.should_strip_ansi = old_should_strip_ansiclickpkg.formatting.FORCED_WIDTH = old_forced_widthdef invoke(self, cli, args=None, input=None, env=None,catch_exceptions=True, color=False, **extra):"""Invokes a command in an isolated environment. The arguments areforwarded directly to the command line script, the `extra` keywordarguments are passed to the :meth:`~clickpkg.Command.main` function ofthe command.This returns a :class:`Result` object... versionadded:: 3.0The ``catch_exceptions`` parameter was added... versionchanged:: 3.0The result object now has an `exc_info` attribute with thetraceback if available... versionadded:: 4.0The ``color`` parameter was added.:param cli: the command to invoke:param args: the arguments to invoke:param input: the input data for `sys.stdin`.:param env: the environment overrides.:param catch_exceptions: Whether to catch any other exceptions than``SystemExit``.:param extra: the keyword arguments to pass to :meth:`main`.:param color: whether the output should contain color codes. Theapplication can still override this explicitly."""exc_info = Nonewith self.isolation(input=input, env=env, color=color) as out:exception = Noneexit_code = 0try:cli.main(args=args or (),prog_name=self.get_default_prog_name(cli), **extra)except SystemExit as e:if e.code != 0:exception = eexc_info = sys.exc_info()exit_code = e.codeif not isinstance(exit_code, int):sys.stdout.write(str(exit_code))sys.stdout.write('\n')exit_code = 1except Exception as e:if not catch_exceptions:raiseexception = eexit_code = -1exc_info = sys.exc_info()finally:sys.stdout.flush()output = out.getvalue()return Result(runner=self,output_bytes=output,exit_code=exit_code,exception=exception,exc_info=exc_info)@contextlib.contextmanagerdef isolated_filesystem(self):"""A context manager that creates a temporary folder and changesthe current working directory to it for isolated filesystem tests."""cwd = os.getcwd()t = tempfile.mkdtemp()os.chdir(t)try:yield tfinally:os.chdir(cwd)try:shutil.rmtree(t)except (OSError, IOError):pass
import osimport sysimport structfrom ._compat import raw_input, text_type, string_types, \isatty, strip_ansi, get_winterm_size, DEFAULT_COLUMNS, WINfrom .utils import echofrom .exceptions import Abort, UsageErrorfrom .types import convert_typefrom .globals import resolve_color_default# The prompt functions to use. The doc tools currently override these# functions to customize how they work.visible_prompt_func = raw_input_ansi_colors = ('black', 'red', 'green', 'yellow', 'blue', 'magenta','cyan', 'white', 'reset')_ansi_reset_all = '\033[0m'def hidden_prompt_func(prompt):import getpassreturn getpass.getpass(prompt)def _build_prompt(text, suffix, show_default=False, default=None):prompt = textif default is not None and show_default:prompt = '%s [%s]' % (prompt, default)return prompt + suffixdef prompt(text, default=None, hide_input=False,confirmation_prompt=False, type=None,value_proc=None, prompt_suffix=': ',show_default=True, err=False):"""Prompts a user for input. This is a convenience function that canbe used to prompt a user for input later.If the user aborts the input by sending a interrupt signal, thisfunction will catch it and raise a :exc:`Abort` exception... versionadded:: 6.0Added unicode support for cmd.exe on Windows... versionadded:: 4.0Added the `err` parameter.:param text: the text to show for the prompt.:param default: the default value to use if no input happens. If thisis not given it will prompt until it's aborted.:param hide_input: if this is set to true then the input value willbe hidden.:param confirmation_prompt: asks for confirmation for the value.:param type: the type to use to check the value against.:param value_proc: if this parameter is provided it's a function thatis invoked instead of the type conversion toconvert a value.:param prompt_suffix: a suffix that should be added to the prompt.:param show_default: shows or hides the default value in the prompt.:param err: if set to true the file defaults to ``stderr`` instead of``stdout``, the same as with echo."""result = Nonedef prompt_func(text):f = hide_input and hidden_prompt_func or visible_prompt_functry:# Write the prompt separately so that we get nice# coloring through colorama on Windowsecho(text, nl=False, err=err)return f('')except (KeyboardInterrupt, EOFError):# getpass doesn't print a newline if the user aborts input with ^C.# Allegedly this behavior is inherited from getpass(3).# A doc bug has been filed at https://bugs.python.org/issue24711if hide_input:echo(None, err=err)raise Abort()if value_proc is None:value_proc = convert_type(type, default)prompt = _build_prompt(text, prompt_suffix, show_default, default)while 1:while 1:value = prompt_func(prompt)if value:break# If a default is set and used, then the confirmation# prompt is always skipped because that's the only thing# that really makes sense.elif default is not None:return defaulttry:result = value_proc(value)except UsageError as e:echo('Error: %s' % e.message, err=err)continueif not confirmation_prompt:return resultwhile 1:value2 = prompt_func('Repeat for confirmation: ')if value2:breakif value == value2:return resultecho('Error: the two entered values do not match', err=err)def confirm(text, default=False, abort=False, prompt_suffix=': ',show_default=True, err=False):"""Prompts for confirmation (yes/no question).If the user aborts the input by sending a interrupt signal thisfunction will catch it and raise a :exc:`Abort` exception... versionadded:: 4.0Added the `err` parameter.:param text: the question to ask.:param default: the default for the prompt.:param abort: if this is set to `True` a negative answer aborts theexception by raising :exc:`Abort`.:param prompt_suffix: a suffix that should be added to the prompt.:param show_default: shows or hides the default value in the prompt.:param err: if set to true the file defaults to ``stderr`` instead of``stdout``, the same as with echo."""prompt = _build_prompt(text, prompt_suffix, show_default,default and 'Y/n' or 'y/N')while 1:try:# Write the prompt separately so that we get nice# coloring through colorama on Windowsecho(prompt, nl=False, err=err)value = visible_prompt_func('').lower().strip()except (KeyboardInterrupt, EOFError):raise Abort()if value in ('y', 'yes'):rv = Trueelif value in ('n', 'no'):rv = Falseelif value == '':rv = defaultelse:echo('Error: invalid input', err=err)continuebreakif abort and not rv:raise Abort()return rvdef get_terminal_size():"""Returns the current size of the terminal as tuple in the form``(width, height)`` in columns and rows."""# If shutil has get_terminal_size() (Python 3.3 and later) use thatif sys.version_info >= (3, 3):import shutilshutil_get_terminal_size = getattr(shutil, 'get_terminal_size', None)if shutil_get_terminal_size:sz = shutil_get_terminal_size()return sz.columns, sz.linesif get_winterm_size is not None:return get_winterm_size()def ioctl_gwinsz(fd):try:import fcntlimport termioscr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))except Exception:returnreturn crcr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)if not cr:try:fd = os.open(os.ctermid(), os.O_RDONLY)try:cr = ioctl_gwinsz(fd)finally:os.close(fd)except Exception:passif not cr or not cr[0] or not cr[1]:cr = (os.environ.get('LINES', 25),os.environ.get('COLUMNS', DEFAULT_COLUMNS))return int(cr[1]), int(cr[0])def echo_via_pager(text, color=None):"""This function takes a text and shows it via an environment specificpager on stdout... versionchanged:: 3.0Added the `color` flag.:param text: the text to page.:param color: controls if the pager supports ANSI colors or not. Thedefault is autodetection."""color = resolve_color_default(color)if not isinstance(text, string_types):text = text_type(text)from ._termui_impl import pagerreturn pager(text + '\n', color)def progressbar(iterable=None, length=None, label=None, show_eta=True,show_percent=None, show_pos=False,item_show_func=None, fill_char='#', empty_char='-',bar_template='%(label)s [%(bar)s] %(info)s',info_sep=' ', width=36, file=None, color=None):"""This function creates an iterable context manager that can be usedto iterate over something while showing a progress bar. It willeither iterate over the `iterable` or `length` items (that are countedup). While iteration happens, this function will print a renderedprogress bar to the given `file` (defaults to stdout) and will attemptto calculate remaining time and more. By default, this progress barwill not be rendered if the file is not a terminal.The context manager creates the progress bar. When the contextmanager is entered the progress bar is already displayed. With everyiteration over the progress bar, the iterable passed to the bar isadvanced and the bar is updated. When the context manager exits,a newline is printed and the progress bar is finalized on screen.No printing must happen or the progress bar will be unintentionallydestroyed.Example usage::with progressbar(items) as bar:for item in bar:do_something_with(item)Alternatively, if no iterable is specified, one can manually update theprogress bar through the `update()` method instead of directlyiterating over the progress bar. The update method accepts the numberof steps to increment the bar with::with progressbar(length=chunks.total_bytes) as bar:for chunk in chunks:process_chunk(chunk)bar.update(chunks.bytes).. versionadded:: 2.0.. versionadded:: 4.0Added the `color` parameter. Added a `update` method to theprogressbar object.:param iterable: an iterable to iterate over. If not provided the lengthis required.:param length: the number of items to iterate over. By default theprogressbar will attempt to ask the iterator about itslength, which might or might not work. If an iterable isalso provided this parameter can be used to override thelength. If an iterable is not provided the progress barwill iterate over a range of that length.:param label: the label to show next to the progress bar.:param show_eta: enables or disables the estimated time display. This isautomatically disabled if the length cannot bedetermined.:param show_percent: enables or disables the percentage display. Thedefault is `True` if the iterable has a length or`False` if not.:param show_pos: enables or disables the absolute position display. Thedefault is `False`.:param item_show_func: a function called with the current item whichcan return a string to show the current itemnext to the progress bar. Note that the currentitem can be `None`!:param fill_char: the character to use to show the filled part of theprogress bar.:param empty_char: the character to use to show the non-filled part ofthe progress bar.:param bar_template: the format string to use as template for the bar.The parameters in it are ``label`` for the label,``bar`` for the progress bar and ``info`` for theinfo section.:param info_sep: the separator between multiple info items (eta etc.):param width: the width of the progress bar in characters, 0 means fullterminal width:param file: the file to write to. If this is not a terminal thenonly the label is printed.:param color: controls if the terminal supports ANSI colors or not. Thedefault is autodetection. This is only needed if ANSIcodes are included anywhere in the progress bar outputwhich is not the case by default."""from ._termui_impl import ProgressBarcolor = resolve_color_default(color)return ProgressBar(iterable=iterable, length=length, show_eta=show_eta,show_percent=show_percent, show_pos=show_pos,item_show_func=item_show_func, fill_char=fill_char,empty_char=empty_char, bar_template=bar_template,info_sep=info_sep, file=file, label=label,width=width, color=color)def clear():"""Clears the terminal screen. This will have the effect of clearingthe whole visible space of the terminal and moving the cursor to thetop left. This does not do anything if not connected to a terminal... versionadded:: 2.0"""if not isatty(sys.stdout):return# If we're on Windows and we don't have colorama available, then we# clear the screen by shelling out. Otherwise we can use an escape# sequence.if WIN:os.system('cls')else:sys.stdout.write('\033[2J\033[1;1H')def style(text, fg=None, bg=None, bold=None, dim=None, underline=None,blink=None, reverse=None, reset=True):"""Styles a text with ANSI styles and returns the new string. Bydefault the styling is self contained which means that at the endof the string a reset code is issued. This can be prevented bypassing ``reset=False``.Examples::click.echo(click.style('Hello World!', fg='green'))click.echo(click.style('ATTENTION!', blink=True))click.echo(click.style('Some things', reverse=True, fg='cyan'))Supported color names:* ``black`` (might be a gray)* ``red``* ``green``* ``yellow`` (might be an orange)* ``blue``* ``magenta``* ``cyan``* ``white`` (might be light gray)* ``reset`` (reset the color code only).. versionadded:: 2.0:param text: the string to style with ansi codes.:param fg: if provided this will become the foreground color.:param bg: if provided this will become the background color.:param bold: if provided this will enable or disable bold mode.:param dim: if provided this will enable or disable dim mode. This isbadly supported.:param underline: if provided this will enable or disable underline.:param blink: if provided this will enable or disable blinking.:param reverse: if provided this will enable or disable inverserendering (foreground becomes background and theother way round).:param reset: by default a reset-all code is added at the end of thestring which means that styles do not carry over. Thiscan be disabled to compose styles."""bits = []if fg:try:bits.append('\033[%dm' % (_ansi_colors.index(fg) + 30))except ValueError:raise TypeError('Unknown color %r' % fg)if bg:try:bits.append('\033[%dm' % (_ansi_colors.index(bg) + 40))except ValueError:raise TypeError('Unknown color %r' % bg)if bold is not None:bits.append('\033[%dm' % (1 if bold else 22))if dim is not None:bits.append('\033[%dm' % (2 if dim else 22))if underline is not None:bits.append('\033[%dm' % (4 if underline else 24))if blink is not None:bits.append('\033[%dm' % (5 if blink else 25))if reverse is not None:bits.append('\033[%dm' % (7 if reverse else 27))bits.append(text)if reset:bits.append(_ansi_reset_all)return ''.join(bits)def unstyle(text):"""Removes ANSI styling information from a string. Usually it's notnecessary to use this function as Click's echo function willautomatically remove styling if necessary... versionadded:: 2.0:param text: the text to remove style information from."""return strip_ansi(text)def secho(text, file=None, nl=True, err=False, color=None, **styles):"""This function combines :func:`echo` and :func:`style` into onecall. As such the following two calls are the same::click.secho('Hello World!', fg='green')click.echo(click.style('Hello World!', fg='green'))All keyword arguments are forwarded to the underlying functionsdepending on which one they go with... versionadded:: 2.0"""return echo(style(text, **styles), file=file, nl=nl, err=err, color=color)def edit(text=None, editor=None, env=None, require_save=True,extension='.txt', filename=None):r"""Edits the given text in the defined editor. If an editor is given(should be the full path to the executable but the regular operatingsystem search path is used for finding the executable) it overridesthe detected editor. Optionally, some environment variables can beused. If the editor is closed without changes, `None` is returned. Incase a file is edited directly the return value is always `None` and`require_save` and `extension` are ignored.If the editor cannot be opened a :exc:`UsageError` is raised.Note for Windows: to simplify cross-platform usage, the newlines areautomatically converted from POSIX to Windows and vice versa. As such,the message here will have ``\n`` as newline markers.:param text: the text to edit.:param editor: optionally the editor to use. Defaults to automaticdetection.:param env: environment variables to forward to the editor.:param require_save: if this is true, then not saving in the editorwill make the return value become `None`.:param extension: the extension to tell the editor about. This defaultsto `.txt` but changing this might change syntaxhighlighting.:param filename: if provided it will edit this file instead of theprovided text contents. It will not use a temporaryfile as an indirection in that case."""from ._termui_impl import Editoreditor = Editor(editor=editor, env=env, require_save=require_save,extension=extension)if filename is None:return editor.edit(text)editor.edit_file(filename)def launch(url, wait=False, locate=False):"""This function launches the given URL (or filename) in the defaultviewer application for this file type. If this is an executable, itmight launch the executable in a new session. The return value isthe exit code of the launched application. Usually, ``0`` indicatessuccess.Examples::click.launch('http://click.pocoo.org/')click.launch('/my/downloaded/file', locate=True).. versionadded:: 2.0:param url: URL or filename of the thing to launch.:param wait: waits for the program to stop.:param locate: if this is set to `True` then instead of launching theapplication associated with the URL it will attempt tolaunch a file manager with the file located. Thismight have weird effects if the URL does not point tothe filesystem."""from ._termui_impl import open_urlreturn open_url(url, wait=wait, locate=locate)# If this is provided, getchar() calls into this instead. This is used# for unittesting purposes._getchar = Nonedef getchar(echo=False):"""Fetches a single character from the terminal and returns it. Thiswill always return a unicode character and under certain rarecircumstances this might return more than one character. Thesituations which more than one character is returned is when forwhatever reason multiple characters end up in the terminal buffer orstandard input was not actually a terminal.Note that this will always read from the terminal, even if somethingis piped into the standard input... versionadded:: 2.0:param echo: if set to `True`, the character read will also show up onthe terminal. The default is to not show it."""f = _getcharif f is None:from ._termui_impl import getchar as freturn f(echo)def pause(info='Press any key to continue ...', err=False):"""This command stops execution and waits for the user to press anykey to continue. This is similar to the Windows batch "pause"command. If the program is not run through a terminal, this commandwill instead do nothing... versionadded:: 2.0.. versionadded:: 4.0Added the `err` parameter.:param info: the info string to print before pausing.:param err: if set to message goes to ``stderr`` instead of``stdout``, the same as with echo."""if not isatty(sys.stdin) or not isatty(sys.stdout):returntry:if info:echo(info, nl=False, err=err)try:getchar()except (KeyboardInterrupt, EOFError):passfinally:if info:echo(err=err)
# -*- coding: utf-8 -*-"""click.parser~~~~~~~~~~~~This module started out as largely a copy paste from the stdlib'soptparse module with the features removed that we do not need fromoptparse because we implement them in Click on a higher level (forinstance type handling, help formatting and a lot more).The plan is to remove more and more from here over time.The reason this is a different module and not optparse from the stdlibis that there are differences in 2.x and 3.x about the error messagesgenerated and optparse in the stdlib uses gettext for no good reasonand might cause us issues."""import refrom collections import dequefrom .exceptions import UsageError, NoSuchOption, BadOptionUsage, \BadArgumentUsagedef _unpack_args(args, nargs_spec):"""Given an iterable of arguments and an iterable of nargs specifications,it returns a tuple with all the unpacked arguments at the first indexand all remaining arguments as the second.The nargs specification is the number of arguments that should be consumedor `-1` to indicate that this position should eat up all the remainders.Missing items are filled with `None`."""args = deque(args)nargs_spec = deque(nargs_spec)rv = []spos = Nonedef _fetch(c):try:if spos is None:return c.popleft()else:return c.pop()except IndexError:return Nonewhile nargs_spec:nargs = _fetch(nargs_spec)if nargs == 1:rv.append(_fetch(args))elif nargs > 1:x = [_fetch(args) for _ in range(nargs)]# If we're reversed, we're pulling in the arguments in reverse,# so we need to turn them around.if spos is not None:x.reverse()rv.append(tuple(x))elif nargs < 0:if spos is not None:raise TypeError('Cannot have two nargs < 0')spos = len(rv)rv.append(None)# spos is the position of the wildcard (star). If it's not `None`,# we fill it with the remainder.if spos is not None:rv[spos] = tuple(args)args = []rv[spos + 1:] = reversed(rv[spos + 1:])return tuple(rv), list(args)def _error_opt_args(nargs, opt):if nargs == 1:raise BadOptionUsage('%s option requires an argument' % opt)raise BadOptionUsage('%s option requires %d arguments' % (opt, nargs))def split_opt(opt):first = opt[:1]if first.isalnum():return '', optif opt[1:2] == first:return opt[:2], opt[2:]return first, opt[1:]def normalize_opt(opt, ctx):if ctx is None or ctx.token_normalize_func is None:return optprefix, opt = split_opt(opt)return prefix + ctx.token_normalize_func(opt)def split_arg_string(string):"""Given an argument string this attempts to split it into small parts."""rv = []for match in re.finditer(r"('([^'\\]*(?:\\.[^'\\]*)*)'"r'|"([^"\\]*(?:\\.[^"\\]*)*)"'r'|\S+)\s*', string, re.S):arg = match.group().strip()if arg[:1] == arg[-1:] and arg[:1] in '"\'':arg = arg[1:-1].encode('ascii', 'backslashreplace') \.decode('unicode-escape')try:arg = type(string)(arg)except UnicodeError:passrv.append(arg)return rvclass Option(object):def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None):self._short_opts = []self._long_opts = []self.prefixes = set()for opt in opts:prefix, value = split_opt(opt)if not prefix:raise ValueError('Invalid start character for option (%s)'% opt)self.prefixes.add(prefix[0])if len(prefix) == 1 and len(value) == 1:self._short_opts.append(opt)else:self._long_opts.append(opt)self.prefixes.add(prefix)if action is None:action = 'store'self.dest = destself.action = actionself.nargs = nargsself.const = constself.obj = obj@propertydef takes_value(self):return self.action in ('store', 'append')def process(self, value, state):if self.action == 'store':state.opts[self.dest] = valueelif self.action == 'store_const':state.opts[self.dest] = self.constelif self.action == 'append':state.opts.setdefault(self.dest, []).append(value)elif self.action == 'append_const':state.opts.setdefault(self.dest, []).append(self.const)elif self.action == 'count':state.opts[self.dest] = state.opts.get(self.dest, 0) + 1else:raise ValueError('unknown action %r' % self.action)state.order.append(self.obj)class Argument(object):def __init__(self, dest, nargs=1, obj=None):self.dest = destself.nargs = nargsself.obj = objdef process(self, value, state):if self.nargs > 1:holes = sum(1 for x in value if x is None)if holes == len(value):value = Noneelif holes != 0:raise BadArgumentUsage('argument %s takes %d values'% (self.dest, self.nargs))state.opts[self.dest] = valuestate.order.append(self.obj)class ParsingState(object):def __init__(self, rargs):self.opts = {}self.largs = []self.rargs = rargsself.order = []class OptionParser(object):"""The option parser is an internal class that is ultimately used toparse options and arguments. It's modelled after optparse and bringsa similar but vastly simplified API. It should generally not be useddirectly as the high level Click classes wrap it for you.It's not nearly as extensible as optparse or argparse as it does notimplement features that are implemented on a higher level (such astypes or defaults).:param ctx: optionally the :class:`~click.Context` where this parsershould go with."""def __init__(self, ctx=None):#: The :class:`~click.Context` for this parser. This might be#: `None` for some advanced use cases.self.ctx = ctx#: This controls how the parser deals with interspersed arguments.#: If this is set to `False`, the parser will stop on the first#: non-option. Click uses this to implement nested subcommands#: safely.self.allow_interspersed_args = True#: This tells the parser how to deal with unknown options. By#: default it will error out (which is sensible), but there is a#: second mode where it will ignore it and continue processing#: after shifting all the unknown options into the resulting args.self.ignore_unknown_options = Falseif ctx is not None:self.allow_interspersed_args = ctx.allow_interspersed_argsself.ignore_unknown_options = ctx.ignore_unknown_optionsself._short_opt = {}self._long_opt = {}self._opt_prefixes = set(['-', '--'])self._args = []def add_option(self, opts, dest, action=None, nargs=1, const=None,obj=None):"""Adds a new option named `dest` to the parser. The destinationis not inferred (unlike with optparse) and needs to be explicitlyprovided. Action can be any of ``store``, ``store_const``,``append``, ``appnd_const`` or ``count``.The `obj` can be used to identify the option in the order listthat is returned from the parser."""if obj is None:obj = destopts = [normalize_opt(opt, self.ctx) for opt in opts]option = Option(opts, dest, action=action, nargs=nargs,const=const, obj=obj)self._opt_prefixes.update(option.prefixes)for opt in option._short_opts:self._short_opt[opt] = optionfor opt in option._long_opts:self._long_opt[opt] = optiondef add_argument(self, dest, nargs=1, obj=None):"""Adds a positional argument named `dest` to the parser.The `obj` can be used to identify the option in the order listthat is returned from the parser."""if obj is None:obj = destself._args.append(Argument(dest=dest, nargs=nargs, obj=obj))def parse_args(self, args):"""Parses positional arguments and returns ``(values, args, order)``for the parsed options and arguments as well as the leftoverarguments if there are any. The order is a list of objects as theyappear on the command line. If arguments appear multiple times theywill be memorized multiple times as well."""state = ParsingState(args)try:self._process_args_for_options(state)self._process_args_for_args(state)except UsageError:if self.ctx is None or not self.ctx.resilient_parsing:raisereturn state.opts, state.largs, state.orderdef _process_args_for_args(self, state):pargs, args = _unpack_args(state.largs + state.rargs,[x.nargs for x in self._args])for idx, arg in enumerate(self._args):arg.process(pargs[idx], state)state.largs = argsstate.rargs = []def _process_args_for_options(self, state):while state.rargs:arg = state.rargs.pop(0)arglen = len(arg)# Double dashes always handled explicitly regardless of what# prefixes are valid.if arg == '--':returnelif arg[:1] in self._opt_prefixes and arglen > 1:self._process_opts(arg, state)elif self.allow_interspersed_args:state.largs.append(arg)else:state.rargs.insert(0, arg)return# Say this is the original argument list:# [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]# ^# (we are about to process arg(i)).## Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of# [arg0, ..., arg(i-1)] (any options and their arguments will have# been removed from largs).## The while loop will usually consume 1 or more arguments per pass.# If it consumes 1 (eg. arg is an option that takes no arguments),# then after _process_arg() is done the situation is:## largs = subset of [arg0, ..., arg(i)]# rargs = [arg(i+1), ..., arg(N-1)]## If allow_interspersed_args is false, largs will always be# *empty* -- still a subset of [arg0, ..., arg(i-1)], but# not a very interesting subset!def _match_long_opt(self, opt, explicit_value, state):if opt not in self._long_opt:possibilities = [word for word in self._long_optif word.startswith(opt)]raise NoSuchOption(opt, possibilities=possibilities)option = self._long_opt[opt]if option.takes_value:# At this point it's safe to modify rargs by injecting the# explicit value, because no exception is raised in this# branch. This means that the inserted value will be fully# consumed.if explicit_value is not None:state.rargs.insert(0, explicit_value)nargs = option.nargsif len(state.rargs) < nargs:_error_opt_args(nargs, opt)elif nargs == 1:value = state.rargs.pop(0)else:value = tuple(state.rargs[:nargs])del state.rargs[:nargs]elif explicit_value is not None:raise BadOptionUsage('%s option does not take a value' % opt)else:value = Noneoption.process(value, state)def _match_short_opt(self, arg, state):stop = Falsei = 1prefix = arg[0]unknown_options = []for ch in arg[1:]:opt = normalize_opt(prefix + ch, self.ctx)option = self._short_opt.get(opt)i += 1if not option:if self.ignore_unknown_options:unknown_options.append(ch)continueraise NoSuchOption(opt)if option.takes_value:# Any characters left in arg? Pretend they're the# next arg, and stop consuming characters of arg.if i < len(arg):state.rargs.insert(0, arg[i:])stop = Truenargs = option.nargsif len(state.rargs) < nargs:_error_opt_args(nargs, opt)elif nargs == 1:value = state.rargs.pop(0)else:value = tuple(state.rargs[:nargs])del state.rargs[:nargs]else:value = Noneoption.process(value, state)if stop:break# If we got any unknown options we re-combinate the string of the# remaining options and re-attach the prefix, then report that# to the state as new larg. This way there is basic combinatorics# that can be achieved while still ignoring unknown arguments.if self.ignore_unknown_options and unknown_options:state.largs.append(prefix + ''.join(unknown_options))def _process_opts(self, arg, state):explicit_value = None# Long option handling happens in two parts. The first part is# supporting explicitly attached values. In any case, we will try# to long match the option first.if '=' in arg:long_opt, explicit_value = arg.split('=', 1)else:long_opt = argnorm_long_opt = normalize_opt(long_opt, self.ctx)# At this point we will match the (assumed) long option through# the long option matching code. Note that this allows options# like "-foo" to be matched as long options.try:self._match_long_opt(norm_long_opt, explicit_value, state)except NoSuchOption:# At this point the long option matching failed, and we need# to try with short options. However there is a special rule# which says, that if we have a two character options prefix# (applies to "--foo" for instance), we do not dispatch to the# short option code and will instead raise the no option# error.if arg[:2] not in self._opt_prefixes:return self._match_short_opt(arg, state)if not self.ignore_unknown_options:raisestate.largs.append(arg)
from threading import local_local = local()def get_current_context(silent=False):"""Returns the current click context. This can be used as a way toaccess the current context object from anywhere. This is a more implicitalternative to the :func:`pass_context` decorator. This function isprimarily useful for helpers such as :func:`echo` which might beinterested in changing it's behavior based on the current context.To push the current context, :meth:`Context.scope` can be used... versionadded:: 5.0:param silent: is set to `True` the return value is `None` if no contextis available. The default behavior is to raise a:exc:`RuntimeError`."""try:return getattr(_local, 'stack')[-1]except (AttributeError, IndexError):if not silent:raise RuntimeError('There is no active click context.')def push_context(ctx):"""Pushes a new context to the current stack."""_local.__dict__.setdefault('stack', []).append(ctx)def pop_context():"""Removes the top level from the stack."""_local.stack.pop()def resolve_color_default(color=None):""""Internal helper to get the default value of the color flag. If avalue is passed it's returned unchanged, otherwise it's looked up fromthe current context."""if color is not None:return colorctx = get_current_context(silent=True)if ctx is not None:return ctx.color
from contextlib import contextmanagerfrom .termui import get_terminal_sizefrom .parser import split_optfrom ._compat import term_len# Can force a width. This is used by the test systemFORCED_WIDTH = Nonedef measure_table(rows):widths = {}for row in rows:for idx, col in enumerate(row):widths[idx] = max(widths.get(idx, 0), term_len(col))return tuple(y for x, y in sorted(widths.items()))def iter_rows(rows, col_count):for row in rows:row = tuple(row)yield row + ('',) * (col_count - len(row))def wrap_text(text, width=78, initial_indent='', subsequent_indent='',preserve_paragraphs=False):"""A helper function that intelligently wraps text. By default, itassumes that it operates on a single paragraph of text but if the`preserve_paragraphs` parameter is provided it will intelligentlyhandle paragraphs (defined by two empty lines).If paragraphs are handled, a paragraph can be prefixed with an emptyline containing the ``\\b`` character (``\\x08``) to indicate thatno rewrapping should happen in that block.:param text: the text that should be rewrapped.:param width: the maximum width for the text.:param initial_indent: the initial indent that should be placed on thefirst line as a string.:param subsequent_indent: the indent string that should be placed oneach consecutive line.:param preserve_paragraphs: if this flag is set then the wrapping willintelligently handle paragraphs."""from ._textwrap import TextWrappertext = text.expandtabs()wrapper = TextWrapper(width, initial_indent=initial_indent,subsequent_indent=subsequent_indent,replace_whitespace=False)if not preserve_paragraphs:return wrapper.fill(text)p = []buf = []indent = Nonedef _flush_par():if not buf:returnif buf[0].strip() == '\b':p.append((indent or 0, True, '\n'.join(buf[1:])))else:p.append((indent or 0, False, ' '.join(buf)))del buf[:]for line in text.splitlines():if not line:_flush_par()indent = Noneelse:if indent is None:orig_len = term_len(line)line = line.lstrip()indent = orig_len - term_len(line)buf.append(line)_flush_par()rv = []for indent, raw, text in p:with wrapper.extra_indent(' ' * indent):if raw:rv.append(wrapper.indent_only(text))else:rv.append(wrapper.fill(text))return '\n\n'.join(rv)class HelpFormatter(object):"""This class helps with formatting text-based help pages. It'susually just needed for very special internal cases, but it's alsoexposed so that developers can write their own fancy outputs.At present, it always writes into memory.:param indent_increment: the additional increment for each level.:param width: the width for the text. This defaults to the terminalwidth clamped to a maximum of 78."""def __init__(self, indent_increment=2, width=None, max_width=None):self.indent_increment = indent_incrementif max_width is None:max_width = 80if width is None:width = FORCED_WIDTHif width is None:width = max(min(get_terminal_size()[0], max_width) - 2, 50)self.width = widthself.current_indent = 0self.buffer = []def write(self, string):"""Writes a unicode string into the internal buffer."""self.buffer.append(string)def indent(self):"""Increases the indentation."""self.current_indent += self.indent_incrementdef dedent(self):"""Decreases the indentation."""self.current_indent -= self.indent_incrementdef write_usage(self, prog, args='', prefix='Usage: '):"""Writes a usage line into the buffer.:param prog: the program name.:param args: whitespace separated list of arguments.:param prefix: the prefix for the first line."""usage_prefix = '%*s%s ' % (self.current_indent, prefix, prog)text_width = self.width - self.current_indentif text_width >= (term_len(usage_prefix) + 20):# The arguments will fit to the right of the prefix.indent = ' ' * term_len(usage_prefix)self.write(wrap_text(args, text_width,initial_indent=usage_prefix,subsequent_indent=indent))else:# The prefix is too long, put the arguments on the next line.self.write(usage_prefix)self.write('\n')indent = ' ' * (max(self.current_indent, term_len(prefix)) + 4)self.write(wrap_text(args, text_width,initial_indent=indent,subsequent_indent=indent))self.write('\n')def write_heading(self, heading):"""Writes a heading into the buffer."""self.write('%*s%s:\n' % (self.current_indent, '', heading))def write_paragraph(self):"""Writes a paragraph into the buffer."""if self.buffer:self.write('\n')def write_text(self, text):"""Writes re-indented text into the buffer. This rewraps andpreserves paragraphs."""text_width = max(self.width - self.current_indent, 11)indent = ' ' * self.current_indentself.write(wrap_text(text, text_width,initial_indent=indent,subsequent_indent=indent,preserve_paragraphs=True))self.write('\n')def write_dl(self, rows, col_max=30, col_spacing=2):"""Writes a definition list into the buffer. This is how optionsand commands are usually formatted.:param rows: a list of two item tuples for the terms and values.:param col_max: the maximum width of the first column.:param col_spacing: the number of spaces between the first andsecond column."""rows = list(rows)widths = measure_table(rows)if len(widths) != 2:raise TypeError('Expected two columns for definition list')first_col = min(widths[0], col_max) + col_spacingfor first, second in iter_rows(rows, len(widths)):self.write('%*s%s' % (self.current_indent, '', first))if not second:self.write('\n')continueif term_len(first) <= first_col - col_spacing:self.write(' ' * (first_col - term_len(first)))else:self.write('\n')self.write(' ' * (first_col + self.current_indent))text_width = max(self.width - first_col - 2, 10)lines = iter(wrap_text(second, text_width).splitlines())if lines:self.write(next(lines) + '\n')for line in lines:self.write('%*s%s\n' % (first_col + self.current_indent, '', line))else:self.write('\n')@contextmanagerdef section(self, name):"""Helpful context manager that writes a paragraph, a heading,and the indents.:param name: the section name that is written as heading."""self.write_paragraph()self.write_heading(name)self.indent()try:yieldfinally:self.dedent()@contextmanagerdef indentation(self):"""A context manager that increases the indentation."""self.indent()try:yieldfinally:self.dedent()def getvalue(self):"""Returns the buffer contents."""return ''.join(self.buffer)def join_options(options):"""Given a list of option strings this joins them in the most appropriateway and returns them in the form ``(formatted_string,any_prefix_is_slash)`` where the second item in the tuple is a flag thatindicates if any of the option prefixes was a slash."""rv = []any_prefix_is_slash = Falsefor opt in options:prefix = split_opt(opt)[0]if prefix == '/':any_prefix_is_slash = Truerv.append((len(prefix), opt))rv.sort(key=lambda x: x[0])rv = ', '.join(x[1] for x in rv)return rv, any_prefix_is_slash
from ._compat import PY2, filename_to_ui, get_text_stderrfrom .utils import echoclass ClickException(Exception):"""An exception that Click can handle and show to the user."""#: The exit code for this exceptionexit_code = 1def __init__(self, message):if PY2:if message is not None:message = message.encode('utf-8')Exception.__init__(self, message)self.message = messagedef format_message(self):return self.messagedef show(self, file=None):if file is None:file = get_text_stderr()echo('Error: %s' % self.format_message(), file=file)class UsageError(ClickException):"""An internal exception that signals a usage error. This typicallyaborts any further handling.:param message: the error message to display.:param ctx: optionally the context that caused this error. Click willfill in the context automatically in some situations."""exit_code = 2def __init__(self, message, ctx=None):ClickException.__init__(self, message)self.ctx = ctxdef show(self, file=None):if file is None:file = get_text_stderr()color = Noneif self.ctx is not None:color = self.ctx.colorecho(self.ctx.get_usage() + '\n', file=file, color=color)echo('Error: %s' % self.format_message(), file=file, color=color)class BadParameter(UsageError):"""An exception that formats out a standardized error message for abad parameter. This is useful when thrown from a callback or type asClick will attach contextual information to it (for instance, whichparameter it is)... versionadded:: 2.0:param param: the parameter object that caused this error. This canbe left out, and Click will attach this info itselfif possible.:param param_hint: a string that shows up as parameter name. Thiscan be used as alternative to `param` in caseswhere custom validation should happen. If it isa string it's used as such, if it's a list theneach item is quoted and separated."""def __init__(self, message, ctx=None, param=None,param_hint=None):UsageError.__init__(self, message, ctx)self.param = paramself.param_hint = param_hintdef format_message(self):if self.param_hint is not None:param_hint = self.param_hintelif self.param is not None:param_hint = self.param.opts or [self.param.human_readable_name]else:return 'Invalid value: %s' % self.messageif isinstance(param_hint, (tuple, list)):param_hint = ' / '.join('"%s"' % x for x in param_hint)return 'Invalid value for %s: %s' % (param_hint, self.message)class MissingParameter(BadParameter):"""Raised if click required an option or argument but it was notprovided when invoking the script... versionadded:: 4.0:param param_type: a string that indicates the type of the parameter.The default is to inherit the parameter type fromthe given `param`. Valid values are ``'parameter'``,``'option'`` or ``'argument'``."""def __init__(self, message=None, ctx=None, param=None,param_hint=None, param_type=None):BadParameter.__init__(self, message, ctx, param, param_hint)self.param_type = param_typedef format_message(self):if self.param_hint is not None:param_hint = self.param_hintelif self.param is not None:param_hint = self.param.opts or [self.param.human_readable_name]else:param_hint = Noneif isinstance(param_hint, (tuple, list)):param_hint = ' / '.join('"%s"' % x for x in param_hint)param_type = self.param_typeif param_type is None and self.param is not None:param_type = self.param.param_type_namemsg = self.messageif self.param is not None:msg_extra = self.param.type.get_missing_message(self.param)if msg_extra:if msg:msg += '. ' + msg_extraelse:msg = msg_extrareturn 'Missing %s%s%s%s' % (param_type,param_hint and ' %s' % param_hint or '',msg and '. ' or '.',msg or '',)class NoSuchOption(UsageError):"""Raised if click attempted to handle an option that does notexist... versionadded:: 4.0"""def __init__(self, option_name, message=None, possibilities=None,ctx=None):if message is None:message = 'no such option: %s' % option_nameUsageError.__init__(self, message, ctx)self.option_name = option_nameself.possibilities = possibilitiesdef format_message(self):bits = [self.message]if self.possibilities:if len(self.possibilities) == 1:bits.append('Did you mean %s?' % self.possibilities[0])else:possibilities = sorted(self.possibilities)bits.append('(Possible options: %s)' % ', '.join(possibilities))return ' '.join(bits)class BadOptionUsage(UsageError):"""Raised if an option is generally supplied but the use of the optionwas incorrect. This is for instance raised if the number of argumentsfor an option is not correct... versionadded:: 4.0"""def __init__(self, message, ctx=None):UsageError.__init__(self, message, ctx)class BadArgumentUsage(UsageError):"""Raised if an argument is generally supplied but the use of the argumentwas incorrect. This is for instance raised if the number of valuesfor an argument is not correct... versionadded:: 6.0"""def __init__(self, message, ctx=None):UsageError.__init__(self, message, ctx)class FileError(ClickException):"""Raised if a file cannot be opened."""def __init__(self, filename, hint=None):ui_filename = filename_to_ui(filename)if hint is None:hint = 'unknown error'ClickException.__init__(self, hint)self.ui_filename = ui_filenameself.filename = filenamedef format_message(self):return 'Could not open file %s: %s' % (self.ui_filename, self.message)class Abort(RuntimeError):"""An internal signalling exception that signals Click to abort."""
import sysimport inspectfrom functools import update_wrapperfrom ._compat import iteritemsfrom ._unicodefun import _check_for_unicode_literalsfrom .utils import echofrom .globals import get_current_contextdef pass_context(f):"""Marks a callback as wanting to receive the current contextobject as first argument."""def new_func(*args, **kwargs):return f(get_current_context(), *args, **kwargs)return update_wrapper(new_func, f)def pass_obj(f):"""Similar to :func:`pass_context`, but only pass the object on thecontext onwards (:attr:`Context.obj`). This is useful if that objectrepresents the state of a nested system."""def new_func(*args, **kwargs):return f(get_current_context().obj, *args, **kwargs)return update_wrapper(new_func, f)def make_pass_decorator(object_type, ensure=False):"""Given an object type this creates a decorator that will worksimilar to :func:`pass_obj` but instead of passing the object of thecurrent context, it will find the innermost context of type:func:`object_type`.This generates a decorator that works roughly like this::from functools import update_wrapperdef decorator(f):@pass_contextdef new_func(ctx, *args, **kwargs):obj = ctx.find_object(object_type)return ctx.invoke(f, obj, *args, **kwargs)return update_wrapper(new_func, f)return decorator:param object_type: the type of the object to pass.:param ensure: if set to `True`, a new object will be created andremembered on the context if it's not there yet."""def decorator(f):def new_func(*args, **kwargs):ctx = get_current_context()if ensure:obj = ctx.ensure_object(object_type)else:obj = ctx.find_object(object_type)if obj is None:raise RuntimeError('Managed to invoke callback without a ''context object of type %r existing'% object_type.__name__)return ctx.invoke(f, obj, *args[1:], **kwargs)return update_wrapper(new_func, f)return decoratordef _make_command(f, name, attrs, cls):if isinstance(f, Command):raise TypeError('Attempted to convert a callback into a ''command twice.')try:params = f.__click_params__params.reverse()del f.__click_params__except AttributeError:params = []help = attrs.get('help')if help is None:help = inspect.getdoc(f)if isinstance(help, bytes):help = help.decode('utf-8')else:help = inspect.cleandoc(help)attrs['help'] = help_check_for_unicode_literals()return cls(name=name or f.__name__.lower(),callback=f, params=params, **attrs)def command(name=None, cls=None, **attrs):"""Creates a new :class:`Command` and uses the decorated function ascallback. This will also automatically attach all decorated:func:`option`\s and :func:`argument`\s as parameters to the command.The name of the command defaults to the name of the function. If youwant to change that, you can pass the intended name as the firstargument.All keyword arguments are forwarded to the underlying command class.Once decorated the function turns into a :class:`Command` instancethat can be invoked as a command line utility or be attached to acommand :class:`Group`.:param name: the name of the command. This defaults to the functionname.:param cls: the command class to instantiate. This defaults to:class:`Command`."""if cls is None:cls = Commanddef decorator(f):cmd = _make_command(f, name, attrs, cls)cmd.__doc__ = f.__doc__return cmdreturn decoratordef group(name=None, **attrs):"""Creates a new :class:`Group` with a function as callback. Thisworks otherwise the same as :func:`command` just that the `cls`parameter is set to :class:`Group`."""attrs.setdefault('cls', Group)return command(name, **attrs)def _param_memo(f, param):if isinstance(f, Command):f.params.append(param)else:if not hasattr(f, '__click_params__'):f.__click_params__ = []f.__click_params__.append(param)def argument(*param_decls, **attrs):"""Attaches an argument to the command. All positional arguments arepassed as parameter declarations to :class:`Argument`; all keywordarguments are forwarded unchanged (except ``cls``).This is equivalent to creating an :class:`Argument` instance manuallyand attaching it to the :attr:`Command.params` list.:param cls: the argument class to instantiate. This defaults to:class:`Argument`."""def decorator(f):ArgumentClass = attrs.pop('cls', Argument)_param_memo(f, ArgumentClass(param_decls, **attrs))return freturn decoratordef option(*param_decls, **attrs):"""Attaches an option to the command. All positional arguments arepassed as parameter declarations to :class:`Option`; all keywordarguments are forwarded unchanged (except ``cls``).This is equivalent to creating an :class:`Option` instance manuallyand attaching it to the :attr:`Command.params` list.:param cls: the option class to instantiate. This defaults to:class:`Option`."""def decorator(f):if 'help' in attrs:attrs['help'] = inspect.cleandoc(attrs['help'])OptionClass = attrs.pop('cls', Option)_param_memo(f, OptionClass(param_decls, **attrs))return freturn decoratordef confirmation_option(*param_decls, **attrs):"""Shortcut for confirmation prompts that can be ignored by passing``--yes`` as parameter.This is equivalent to decorating a function with :func:`option` withthe following parameters::def callback(ctx, param, value):if not value:ctx.abort()@click.command()@click.option('--yes', is_flag=True, callback=callback,expose_value=False, prompt='Do you want to continue?')def dropdb():pass"""def decorator(f):def callback(ctx, param, value):if not value:ctx.abort()attrs.setdefault('is_flag', True)attrs.setdefault('callback', callback)attrs.setdefault('expose_value', False)attrs.setdefault('prompt', 'Do you want to continue?')attrs.setdefault('help', 'Confirm the action without prompting.')return option(*(param_decls or ('--yes',)), **attrs)(f)return decoratordef password_option(*param_decls, **attrs):"""Shortcut for password prompts.This is equivalent to decorating a function with :func:`option` withthe following parameters::@click.command()@click.option('--password', prompt=True, confirmation_prompt=True,hide_input=True)def changeadmin(password):pass"""def decorator(f):attrs.setdefault('prompt', True)attrs.setdefault('confirmation_prompt', True)attrs.setdefault('hide_input', True)return option(*(param_decls or ('--password',)), **attrs)(f)return decoratordef version_option(version=None, *param_decls, **attrs):"""Adds a ``--version`` option which immediately ends the programprinting out the version number. This is implemented as an eageroption that prints the version and exits the program in the callback.:param version: the version number to show. If not provided Clickattempts an auto discovery via setuptools.:param prog_name: the name of the program (defaults to autodetection):param message: custom message to show instead of the default(``'%(prog)s, version %(version)s'``):param others: everything else is forwarded to :func:`option`."""if version is None:module = sys._getframe(1).f_globals.get('__name__')def decorator(f):prog_name = attrs.pop('prog_name', None)message = attrs.pop('message', '%(prog)s, version %(version)s')def callback(ctx, param, value):if not value or ctx.resilient_parsing:returnprog = prog_nameif prog is None:prog = ctx.find_root().info_namever = versionif ver is None:try:import pkg_resourcesexcept ImportError:passelse:for dist in pkg_resources.working_set:scripts = dist.get_entry_map().get('console_scripts') or {}for script_name, entry_point in iteritems(scripts):if entry_point.module_name == module:ver = dist.versionbreakif ver is None:raise RuntimeError('Could not determine version')echo(message % {'prog': prog,'version': ver,}, color=ctx.color)ctx.exit()attrs.setdefault('is_flag', True)attrs.setdefault('expose_value', False)attrs.setdefault('is_eager', True)attrs.setdefault('help', 'Show the version and exit.')attrs['callback'] = callbackreturn option(*(param_decls or ('--version',)), **attrs)(f)return decoratordef help_option(*param_decls, **attrs):"""Adds a ``--help`` option which immediately ends the programprinting out the help page. This is usually unnecessary to add asthis is added by default to all commands unless suppressed.Like :func:`version_option`, this is implemented as eager option thatprints in the callback and exits.All arguments are forwarded to :func:`option`."""def decorator(f):def callback(ctx, param, value):if value and not ctx.resilient_parsing:echo(ctx.get_help(), color=ctx.color)ctx.exit()attrs.setdefault('is_flag', True)attrs.setdefault('expose_value', False)attrs.setdefault('help', 'Show this message and exit.')attrs.setdefault('is_eager', True)attrs['callback'] = callbackreturn option(*(param_decls or ('--help',)), **attrs)(f)return decorator# Circular dependencies between core and decoratorsfrom .core import Command, Group, Argument, Option
import errnoimport osimport sysfrom contextlib import contextmanagerfrom itertools import repeatfrom functools import update_wrapperfrom .types import convert_type, IntRange, BOOLfrom .utils import make_str, make_default_short_help, echo, get_os_argsfrom .exceptions import ClickException, UsageError, BadParameter, Abort, \MissingParameterfrom .termui import prompt, confirmfrom .formatting import HelpFormatter, join_optionsfrom .parser import OptionParser, split_optfrom .globals import push_context, pop_contextfrom ._compat import PY2, isidentifier, iteritemsfrom ._unicodefun import _check_for_unicode_literals, _verify_python3_env_missing = object()SUBCOMMAND_METAVAR = 'COMMAND [ARGS]...'SUBCOMMANDS_METAVAR = 'COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]...'def _bashcomplete(cmd, prog_name, complete_var=None):"""Internal handler for the bash completion support."""if complete_var is None:complete_var = '_%s_COMPLETE' % (prog_name.replace('-', '_')).upper()complete_instr = os.environ.get(complete_var)if not complete_instr:returnfrom ._bashcomplete import bashcompleteif bashcomplete(cmd, prog_name, complete_var, complete_instr):sys.exit(1)def _check_multicommand(base_command, cmd_name, cmd, register=False):if not base_command.chain or not isinstance(cmd, MultiCommand):returnif register:hint = 'It is not possible to add multi commands as children to ' \'another multi command that is in chain mode'else:hint = 'Found a multi command as subcommand to a multi command ' \'that is in chain mode. This is not supported'raise RuntimeError('%s. Command "%s" is set to chain and "%s" was ''added as subcommand but it in itself is a ''multi command. ("%s" is a %s within a chained ''%s named "%s"). This restriction was supposed to ''be lifted in 6.0 but the fix was flawed. This ''will be fixed in Click 7.0' % (hint, base_command.name, cmd_name,cmd_name, cmd.__class__.__name__,base_command.__class__.__name__,base_command.name))def batch(iterable, batch_size):return list(zip(*repeat(iter(iterable), batch_size)))def invoke_param_callback(callback, ctx, param, value):code = getattr(callback, '__code__', None)args = getattr(code, 'co_argcount', 3)if args < 3:# This will become a warning in Click 3.0:from warnings import warnwarn(Warning('Invoked legacy parameter callback "%s". The new ''signature for such callbacks starting with ''click 2.0 is (ctx, param, value).'% callback), stacklevel=3)return callback(ctx, value)return callback(ctx, param, value)@contextmanagerdef augment_usage_errors(ctx, param=None):"""Context manager that attaches extra information to exceptions thatfly."""try:yieldexcept BadParameter as e:if e.ctx is None:e.ctx = ctxif param is not None and e.param is None:e.param = paramraiseexcept UsageError as e:if e.ctx is None:e.ctx = ctxraisedef iter_params_for_processing(invocation_order, declaration_order):"""Given a sequence of parameters in the order as should be consideredfor processing and an iterable of parameters that exist, this returnsa list in the correct order as they should be processed."""def sort_key(item):try:idx = invocation_order.index(item)except ValueError:idx = float('inf')return (not item.is_eager, idx)return sorted(declaration_order, key=sort_key)class Context(object):"""The context is a special internal object that holds state relevantfor the script execution at every single level. It's normally invisibleto commands unless they opt-in to getting access to it.The context is useful as it can pass internal objects around and cancontrol special execution features such as reading data fromenvironment variables.A context can be used as context manager in which case it will call:meth:`close` on teardown... versionadded:: 2.0Added the `resilient_parsing`, `help_option_names`,`token_normalize_func` parameters... versionadded:: 3.0Added the `allow_extra_args` and `allow_interspersed_args`parameters... versionadded:: 4.0Added the `color`, `ignore_unknown_options`, and`max_content_width` parameters.:param command: the command class for this context.:param parent: the parent context.:param info_name: the info name for this invocation. Generally thisis the most descriptive name for the script orcommand. For the toplevel script it is usuallythe name of the script, for commands below it it'sthe name of the script.:param obj: an arbitrary object of user data.:param auto_envvar_prefix: the prefix to use for automatic environmentvariables. If this is `None` then readingfrom environment variables is disabled. Thisdoes not affect manually set environmentvariables which are always read.:param default_map: a dictionary (like object) with default valuesfor parameters.:param terminal_width: the width of the terminal. The default isinherit from parent context. If no contextdefines the terminal width then autodetection will be applied.:param max_content_width: the maximum width for content rendered byClick (this currently only affects helppages). This defaults to 80 characters ifnot overridden. In other words: even if theterminal is larger than that, Click will notformat things wider than 80 characters bydefault. In addition to that, formatters mightadd some safety mapping on the right.:param resilient_parsing: if this flag is enabled then Click willparse without any interactivity or callbackinvocation. This is useful for implementingthings such as completion support.:param allow_extra_args: if this is set to `True` then extra argumentsat the end will not raise an error and will bekept on the context. The default is to inheritfrom the command.:param allow_interspersed_args: if this is set to `False` then optionsand arguments cannot be mixed. Thedefault is to inherit from the command.:param ignore_unknown_options: instructs click to ignore options it doesnot know and keeps them for laterprocessing.:param help_option_names: optionally a list of strings that define howthe default help parameter is named. Thedefault is ``['--help']``.:param token_normalize_func: an optional function that is used tonormalize tokens (options, choices,etc.). This for instance can be used toimplement case insensitive behavior.:param color: controls if the terminal supports ANSI colors or not. Thedefault is autodetection. This is only needed if ANSIcodes are used in texts that Click prints which is bydefault not the case. This for instance would affecthelp output."""def __init__(self, command, parent=None, info_name=None, obj=None,auto_envvar_prefix=None, default_map=None,terminal_width=None, max_content_width=None,resilient_parsing=False, allow_extra_args=None,allow_interspersed_args=None,ignore_unknown_options=None, help_option_names=None,token_normalize_func=None, color=None):#: the parent context or `None` if none exists.self.parent = parent#: the :class:`Command` for this context.self.command = command#: the descriptive information nameself.info_name = info_name#: the parsed parameters except if the value is hidden in which#: case it's not remembered.self.params = {}#: the leftover arguments.self.args = []#: protected arguments. These are arguments that are prepended#: to `args` when certain parsing scenarios are encountered but#: must be never propagated to another arguments. This is used#: to implement nested parsing.self.protected_args = []if obj is None and parent is not None:obj = parent.obj#: the user object stored.self.obj = objself._meta = getattr(parent, 'meta', {})#: A dictionary (-like object) with defaults for parameters.if default_map is None \and parent is not None \and parent.default_map is not None:default_map = parent.default_map.get(info_name)self.default_map = default_map#: This flag indicates if a subcommand is going to be executed. A#: group callback can use this information to figure out if it's#: being executed directly or because the execution flow passes#: onwards to a subcommand. By default it's None, but it can be#: the name of the subcommand to execute.#:#: If chaining is enabled this will be set to ``'*'`` in case#: any commands are executed. It is however not possible to#: figure out which ones. If you require this knowledge you#: should use a :func:`resultcallback`.self.invoked_subcommand = Noneif terminal_width is None and parent is not None:terminal_width = parent.terminal_width#: The width of the terminal (None is autodetection).self.terminal_width = terminal_widthif max_content_width is None and parent is not None:max_content_width = parent.max_content_width#: The maximum width of formatted content (None implies a sensible#: default which is 80 for most things).self.max_content_width = max_content_widthif allow_extra_args is None:allow_extra_args = command.allow_extra_args#: Indicates if the context allows extra args or if it should#: fail on parsing.#:#: .. versionadded:: 3.0self.allow_extra_args = allow_extra_argsif allow_interspersed_args is None:allow_interspersed_args = command.allow_interspersed_args#: Indicates if the context allows mixing of arguments and#: options or not.#:#: .. versionadded:: 3.0self.allow_interspersed_args = allow_interspersed_argsif ignore_unknown_options is None:ignore_unknown_options = command.ignore_unknown_options#: Instructs click to ignore options that a command does not#: understand and will store it on the context for later#: processing. This is primarily useful for situations where you#: want to call into external programs. Generally this pattern is#: strongly discouraged because it's not possibly to losslessly#: forward all arguments.#:#: .. versionadded:: 4.0self.ignore_unknown_options = ignore_unknown_optionsif help_option_names is None:if parent is not None:help_option_names = parent.help_option_nameselse:help_option_names = ['--help']#: The names for the help options.self.help_option_names = help_option_namesif token_normalize_func is None and parent is not None:token_normalize_func = parent.token_normalize_func#: An optional normalization function for tokens. This is#: options, choices, commands etc.self.token_normalize_func = token_normalize_func#: Indicates if resilient parsing is enabled. In that case Click#: will do its best to not cause any failures.self.resilient_parsing = resilient_parsing# If there is no envvar prefix yet, but the parent has one and# the command on this level has a name, we can expand the envvar# prefix automatically.if auto_envvar_prefix is None:if parent is not None \and parent.auto_envvar_prefix is not None and \self.info_name is not None:auto_envvar_prefix = '%s_%s' % (parent.auto_envvar_prefix,self.info_name.upper())else:self.auto_envvar_prefix = auto_envvar_prefix.upper()self.auto_envvar_prefix = auto_envvar_prefixif color is None and parent is not None:color = parent.color#: Controls if styling output is wanted or not.self.color = colorself._close_callbacks = []self._depth = 0def __enter__(self):self._depth += 1push_context(self)return selfdef __exit__(self, exc_type, exc_value, tb):self._depth -= 1if self._depth == 0:self.close()pop_context()@contextmanagerdef scope(self, cleanup=True):"""This helper method can be used with the context object to promoteit to the current thread local (see :func:`get_current_context`).The default behavior of this is to invoke the cleanup functions whichcan be disabled by setting `cleanup` to `False`. The cleanupfunctions are typically used for things such as closing file handles.If the cleanup is intended the context object can also be directlyused as a context manager.Example usage::with ctx.scope():assert get_current_context() is ctxThis is equivalent::with ctx:assert get_current_context() is ctx.. versionadded:: 5.0:param cleanup: controls if the cleanup functions should be run ornot. The default is to run these functions. Insome situations the context only wants to betemporarily pushed in which case this can be disabled.Nested pushes automatically defer the cleanup."""if not cleanup:self._depth += 1try:with self as rv:yield rvfinally:if not cleanup:self._depth -= 1@propertydef meta(self):"""This is a dictionary which is shared with all the contextsthat are nested. It exists so that click utiltiies can store somestate here if they need to. It is however the responsibility ofthat code to manage this dictionary well.The keys are supposed to be unique dotted strings. For instancemodule paths are a good choice for it. What is stored in there isirrelevant for the operation of click. However what is important isthat code that places data here adheres to the general semantics ofthe system.Example usage::LANG_KEY = __name__ + '.lang'def set_language(value):ctx = get_current_context()ctx.meta[LANG_KEY] = valuedef get_language():return get_current_context().meta.get(LANG_KEY, 'en_US').. versionadded:: 5.0"""return self._metadef make_formatter(self):"""Creates the formatter for the help and usage output."""return HelpFormatter(width=self.terminal_width,max_width=self.max_content_width)def call_on_close(self, f):"""This decorator remembers a function as callback that should beexecuted when the context tears down. This is most useful to bindresource handling to the script execution. For instance, file objectsopened by the :class:`File` type will register their close callbackshere.:param f: the function to execute on teardown."""self._close_callbacks.append(f)return fdef close(self):"""Invokes all close callbacks."""for cb in self._close_callbacks:cb()self._close_callbacks = []@propertydef command_path(self):"""The computed command path. This is used for the ``usage``information on the help page. It's automatically created bycombining the info names of the chain of contexts to the root."""rv = ''if self.info_name is not None:rv = self.info_nameif self.parent is not None:rv = self.parent.command_path + ' ' + rvreturn rv.lstrip()def find_root(self):"""Finds the outermost context."""node = selfwhile node.parent is not None:node = node.parentreturn nodedef find_object(self, object_type):"""Finds the closest object of a given type."""node = selfwhile node is not None:if isinstance(node.obj, object_type):return node.objnode = node.parentdef ensure_object(self, object_type):"""Like :meth:`find_object` but sets the innermost object to anew instance of `object_type` if it does not exist."""rv = self.find_object(object_type)if rv is None:self.obj = rv = object_type()return rvdef lookup_default(self, name):"""Looks up the default for a parameter name. This by defaultlooks into the :attr:`default_map` if available."""if self.default_map is not None:rv = self.default_map.get(name)if callable(rv):rv = rv()return rvdef fail(self, message):"""Aborts the execution of the program with a specific errormessage.:param message: the error message to fail with."""raise UsageError(message, self)def abort(self):"""Aborts the script."""raise Abort()def exit(self, code=0):"""Exits the application with a given exit code."""sys.exit(code)def get_usage(self):"""Helper method to get formatted usage string for the currentcontext and command."""return self.command.get_usage(self)def get_help(self):"""Helper method to get formatted help page for the currentcontext and command."""return self.command.get_help(self)def invoke(*args, **kwargs):"""Invokes a command callback in exactly the way it expects. Thereare two ways to invoke this method:1. the first argument can be a callback and all other arguments andkeyword arguments are forwarded directly to the function.2. the first argument is a click command object. In that case allarguments are forwarded as well but proper click parameters(options and click arguments) must be keyword arguments and Clickwill fill in defaults.Note that before Click 3.2 keyword arguments were not properly filledin against the intention of this code and no context was created. Formore information about this change and why it was done in a bugfixrelease see :ref:`upgrade-to-3.2`."""self, callback = args[:2]ctx = self# It's also possible to invoke another command which might or# might not have a callback. In that case we also fill# in defaults and make a new context for this command.if isinstance(callback, Command):other_cmd = callbackcallback = other_cmd.callbackctx = Context(other_cmd, info_name=other_cmd.name, parent=self)if callback is None:raise TypeError('The given command does not have a ''callback that can be invoked.')for param in other_cmd.params:if param.name not in kwargs and param.expose_value:kwargs[param.name] = param.get_default(ctx)args = args[2:]with augment_usage_errors(self):with ctx:return callback(*args, **kwargs)def forward(*args, **kwargs):"""Similar to :meth:`invoke` but fills in default keywordarguments from the current context if the other command expectsit. This cannot invoke callbacks directly, only other commands."""self, cmd = args[:2]# It's also possible to invoke another command which might or# might not have a callback.if not isinstance(cmd, Command):raise TypeError('Callback is not a command.')for param in self.params:if param not in kwargs:kwargs[param] = self.params[param]return self.invoke(cmd, **kwargs)class BaseCommand(object):"""The base command implements the minimal API contract of commands.Most code will never use this as it does not implement a lot of usefulfunctionality but it can act as the direct subclass of alternativeparsing methods that do not depend on the Click parser.For instance, this can be used to bridge Click and other systems likeargparse or docopt.Because base commands do not implement a lot of the API that otherparts of Click take for granted, they are not supported for alloperations. For instance, they cannot be used with the decoratorsusually and they have no built-in callback system... versionchanged:: 2.0Added the `context_settings` parameter.:param name: the name of the command to use unless a group overrides it.:param context_settings: an optional dictionary with defaults that arepassed to the context object."""#: the default for the :attr:`Context.allow_extra_args` flag.allow_extra_args = False#: the default for the :attr:`Context.allow_interspersed_args` flag.allow_interspersed_args = True#: the default for the :attr:`Context.ignore_unknown_options` flag.ignore_unknown_options = Falsedef __init__(self, name, context_settings=None):#: the name the command thinks it has. Upon registering a command#: on a :class:`Group` the group will default the command name#: with this information. You should instead use the#: :class:`Context`\'s :attr:`~Context.info_name` attribute.self.name = nameif context_settings is None:context_settings = {}#: an optional dictionary with defaults passed to the context.self.context_settings = context_settingsdef get_usage(self, ctx):raise NotImplementedError('Base commands cannot get usage')def get_help(self, ctx):raise NotImplementedError('Base commands cannot get help')def make_context(self, info_name, args, parent=None, **extra):"""This function when given an info name and arguments will kickoff the parsing and create a new :class:`Context`. It does notinvoke the actual command callback though.:param info_name: the info name for this invokation. Generally thisis the most descriptive name for the script orcommand. For the toplevel script it's usuallythe name of the script, for commands below it it'sthe name of the script.:param args: the arguments to parse as list of strings.:param parent: the parent context if available.:param extra: extra keyword arguments forwarded to the contextconstructor."""for key, value in iteritems(self.context_settings):if key not in extra:extra[key] = valuectx = Context(self, info_name=info_name, parent=parent, **extra)with ctx.scope(cleanup=False):self.parse_args(ctx, args)return ctxdef parse_args(self, ctx, args):"""Given a context and a list of arguments this creates the parserand parses the arguments, then modifies the context as necessary.This is automatically invoked by :meth:`make_context`."""raise NotImplementedError('Base commands do not know how to parse ''arguments.')def invoke(self, ctx):"""Given a context, this invokes the command. The defaultimplementation is raising a not implemented error."""raise NotImplementedError('Base commands are not invokable by default')def main(self, args=None, prog_name=None, complete_var=None,standalone_mode=True, **extra):"""This is the way to invoke a script with all the bells andwhistles as a command line application. This will always terminatethe application after a call. If this is not wanted, ``SystemExit``needs to be caught.This method is also available by directly calling the instance ofa :class:`Command`... versionadded:: 3.0Added the `standalone_mode` flag to control the standalone mode.:param args: the arguments that should be used for parsing. If notprovided, ``sys.argv[1:]`` is used.:param prog_name: the program name that should be used. By defaultthe program name is constructed by taking the filename from ``sys.argv[0]``.:param complete_var: the environment variable that controls thebash completion support. The default is``"_<prog_name>_COMPLETE"`` with prog name inuppercase.:param standalone_mode: the default behavior is to invoke the scriptin standalone mode. Click will thenhandle exceptions and convert them intoerror messages and the function will neverreturn but shut down the interpreter. Ifthis is set to `False` they will bepropagated to the caller and the returnvalue of this function is the return valueof :meth:`invoke`.:param extra: extra keyword arguments are forwarded to the contextconstructor. See :class:`Context` for more information."""# If we are in Python 3, we will verify that the environment is# sane at this point of reject further execution to avoid a# broken script.if not PY2:_verify_python3_env()else:_check_for_unicode_literals()if args is None:args = get_os_args()else:args = list(args)if prog_name is None:prog_name = make_str(os.path.basename(sys.argv and sys.argv[0] or __file__))# Hook for the Bash completion. This only activates if the Bash# completion is actually enabled, otherwise this is quite a fast# noop._bashcomplete(self, prog_name, complete_var)try:try:with self.make_context(prog_name, args, **extra) as ctx:rv = self.invoke(ctx)if not standalone_mode:return rvctx.exit()except (EOFError, KeyboardInterrupt):echo(file=sys.stderr)raise Abort()except ClickException as e:if not standalone_mode:raisee.show()sys.exit(e.exit_code)except IOError as e:if e.errno == errno.EPIPE:sys.exit(1)else:raiseexcept Abort:if not standalone_mode:raiseecho('Aborted!', file=sys.stderr)sys.exit(1)def __call__(self, *args, **kwargs):"""Alias for :meth:`main`."""return self.main(*args, **kwargs)class Command(BaseCommand):"""Commands are the basic building block of command line interfaces inClick. A basic command handles command line parsing and might dispatchmore parsing to commands nested below it... versionchanged:: 2.0Added the `context_settings` parameter.:param name: the name of the command to use unless a group overrides it.:param context_settings: an optional dictionary with defaults that arepassed to the context object.:param callback: the callback to invoke. This is optional.:param params: the parameters to register with this command. This canbe either :class:`Option` or :class:`Argument` objects.:param help: the help string to use for this command.:param epilog: like the help string but it's printed at the end of thehelp page after everything else.:param short_help: the short help to use for this command. This isshown on the command listing of the parent command.:param add_help_option: by default each command registers a ``--help``option. This can be disabled by this parameter."""def __init__(self, name, context_settings=None, callback=None,params=None, help=None, epilog=None, short_help=None,options_metavar='[OPTIONS]', add_help_option=True):BaseCommand.__init__(self, name, context_settings)#: the callback to execute when the command fires. This might be#: `None` in which case nothing happens.self.callback = callback#: the list of parameters for this command in the order they#: should show up in the help page and execute. Eager parameters#: will automatically be handled before non eager ones.self.params = params or []self.help = helpself.epilog = epilogself.options_metavar = options_metavarif short_help is None and help:short_help = make_default_short_help(help)self.short_help = short_helpself.add_help_option = add_help_optiondef get_usage(self, ctx):formatter = ctx.make_formatter()self.format_usage(ctx, formatter)return formatter.getvalue().rstrip('\n')def get_params(self, ctx):rv = self.paramshelp_option = self.get_help_option(ctx)if help_option is not None:rv = rv + [help_option]return rvdef format_usage(self, ctx, formatter):"""Writes the usage line into the formatter."""pieces = self.collect_usage_pieces(ctx)formatter.write_usage(ctx.command_path, ' '.join(pieces))def collect_usage_pieces(self, ctx):"""Returns all the pieces that go into the usage line and returnsit as a list of strings."""rv = [self.options_metavar]for param in self.get_params(ctx):rv.extend(param.get_usage_pieces(ctx))return rvdef get_help_option_names(self, ctx):"""Returns the names for the help option."""all_names = set(ctx.help_option_names)for param in self.params:all_names.difference_update(param.opts)all_names.difference_update(param.secondary_opts)return all_namesdef get_help_option(self, ctx):"""Returns the help option object."""help_options = self.get_help_option_names(ctx)if not help_options or not self.add_help_option:returndef show_help(ctx, param, value):if value and not ctx.resilient_parsing:echo(ctx.get_help(), color=ctx.color)ctx.exit()return Option(help_options, is_flag=True,is_eager=True, expose_value=False,callback=show_help,help='Show this message and exit.')def make_parser(self, ctx):"""Creates the underlying option parser for this command."""parser = OptionParser(ctx)parser.allow_interspersed_args = ctx.allow_interspersed_argsparser.ignore_unknown_options = ctx.ignore_unknown_optionsfor param in self.get_params(ctx):param.add_to_parser(parser, ctx)return parserdef get_help(self, ctx):"""Formats the help into a string and returns it. This creates aformatter and will call into the following formatting methods:"""formatter = ctx.make_formatter()self.format_help(ctx, formatter)return formatter.getvalue().rstrip('\n')def format_help(self, ctx, formatter):"""Writes the help into the formatter if it exists.This calls into the following methods:- :meth:`format_usage`- :meth:`format_help_text`- :meth:`format_options`- :meth:`format_epilog`"""self.format_usage(ctx, formatter)self.format_help_text(ctx, formatter)self.format_options(ctx, formatter)self.format_epilog(ctx, formatter)def format_help_text(self, ctx, formatter):"""Writes the help text to the formatter if it exists."""if self.help:formatter.write_paragraph()with formatter.indentation():formatter.write_text(self.help)def format_options(self, ctx, formatter):"""Writes all the options into the formatter if they exist."""opts = []for param in self.get_params(ctx):rv = param.get_help_record(ctx)if rv is not None:opts.append(rv)if opts:with formatter.section('Options'):formatter.write_dl(opts)def format_epilog(self, ctx, formatter):"""Writes the epilog into the formatter if it exists."""if self.epilog:formatter.write_paragraph()with formatter.indentation():formatter.write_text(self.epilog)def parse_args(self, ctx, args):parser = self.make_parser(ctx)opts, args, param_order = parser.parse_args(args=args)for param in iter_params_for_processing(param_order, self.get_params(ctx)):value, args = param.handle_parse_result(ctx, opts, args)if args and not ctx.allow_extra_args and not ctx.resilient_parsing:ctx.fail('Got unexpected extra argument%s (%s)'% (len(args) != 1 and 's' or '',' '.join(map(make_str, args))))ctx.args = argsreturn argsdef invoke(self, ctx):"""Given a context, this invokes the attached callback (if it exists)in the right way."""if self.callback is not None:return ctx.invoke(self.callback, **ctx.params)class MultiCommand(Command):"""A multi command is the basic implementation of a command thatdispatches to subcommands. The most common version is the:class:`Group`.:param invoke_without_command: this controls how the multi command itselfis invoked. By default it's only invokedif a subcommand is provided.:param no_args_is_help: this controls what happens if no arguments areprovided. This option is enabled by default if`invoke_without_command` is disabled or disabledif it's enabled. If enabled this will add``--help`` as argument if no arguments arepassed.:param subcommand_metavar: the string that is used in the documentationto indicate the subcommand place.:param chain: if this is set to `True` chaining of multiple subcommandsis enabled. This restricts the form of commands in thatthey cannot have optional arguments but it allowsmultiple commands to be chained together.:param result_callback: the result callback to attach to this multicommand."""allow_extra_args = Trueallow_interspersed_args = Falsedef __init__(self, name=None, invoke_without_command=False,no_args_is_help=None, subcommand_metavar=None,chain=False, result_callback=None, **attrs):Command.__init__(self, name, **attrs)if no_args_is_help is None:no_args_is_help = not invoke_without_commandself.no_args_is_help = no_args_is_helpself.invoke_without_command = invoke_without_commandif subcommand_metavar is None:if chain:subcommand_metavar = SUBCOMMANDS_METAVARelse:subcommand_metavar = SUBCOMMAND_METAVARself.subcommand_metavar = subcommand_metavarself.chain = chain#: The result callback that is stored. This can be set or#: overridden with the :func:`resultcallback` decorator.self.result_callback = result_callbackif self.chain:for param in self.params:if isinstance(param, Argument) and not param.required:raise RuntimeError('Multi commands in chain mode cannot ''have optional arguments.')def collect_usage_pieces(self, ctx):rv = Command.collect_usage_pieces(self, ctx)rv.append(self.subcommand_metavar)return rvdef format_options(self, ctx, formatter):Command.format_options(self, ctx, formatter)self.format_commands(ctx, formatter)def resultcallback(self, replace=False):"""Adds a result callback to the chain command. By default if aresult callback is already registered this will chain them butthis can be disabled with the `replace` parameter. The resultcallback is invoked with the return value of the subcommand(or the list of return values from all subcommands if chainingis enabled) as well as the parameters as they would be passedto the main callback.Example::@click.group()@click.option('-i', '--input', default=23)def cli(input):return 42@cli.resultcallback()def process_result(result, input):return result + input.. versionadded:: 3.0:param replace: if set to `True` an already existing resultcallback will be removed."""def decorator(f):old_callback = self.result_callbackif old_callback is None or replace:self.result_callback = freturn fdef function(__value, *args, **kwargs):return f(old_callback(__value, *args, **kwargs),*args, **kwargs)self.result_callback = rv = update_wrapper(function, f)return rvreturn decoratordef format_commands(self, ctx, formatter):"""Extra format methods for multi methods that adds all the commandsafter the options."""rows = []for subcommand in self.list_commands(ctx):cmd = self.get_command(ctx, subcommand)# What is this, the tool lied about a command. Ignore itif cmd is None:continuehelp = cmd.short_help or ''rows.append((subcommand, help))if rows:with formatter.section('Commands'):formatter.write_dl(rows)def parse_args(self, ctx, args):if not args and self.no_args_is_help and not ctx.resilient_parsing:echo(ctx.get_help(), color=ctx.color)ctx.exit()rest = Command.parse_args(self, ctx, args)if self.chain:ctx.protected_args = restctx.args = []elif rest:ctx.protected_args, ctx.args = rest[:1], rest[1:]return ctx.argsdef invoke(self, ctx):def _process_result(value):if self.result_callback is not None:value = ctx.invoke(self.result_callback, value,**ctx.params)return valueif not ctx.protected_args:# If we are invoked without command the chain flag controls# how this happens. If we are not in chain mode, the return# value here is the return value of the command.# If however we are in chain mode, the return value is the# return value of the result processor invoked with an empty# list (which means that no subcommand actually was executed).if self.invoke_without_command:if not self.chain:return Command.invoke(self, ctx)with ctx:Command.invoke(self, ctx)return _process_result([])ctx.fail('Missing command.')# Fetch args back outargs = ctx.protected_args + ctx.argsctx.args = []ctx.protected_args = []# If we're not in chain mode, we only allow the invocation of a# single command but we also inform the current context about the# name of the command to invoke.if not self.chain:# Make sure the context is entered so we do not clean up# resources until the result processor has worked.with ctx:cmd_name, cmd, args = self.resolve_command(ctx, args)ctx.invoked_subcommand = cmd_nameCommand.invoke(self, ctx)sub_ctx = cmd.make_context(cmd_name, args, parent=ctx)with sub_ctx:return _process_result(sub_ctx.command.invoke(sub_ctx))# In chain mode we create the contexts step by step, but after the# base command has been invoked. Because at that point we do not# know the subcommands yet, the invoked subcommand attribute is# set to ``*`` to inform the command that subcommands are executed# but nothing else.with ctx:ctx.invoked_subcommand = args and '*' or NoneCommand.invoke(self, ctx)# Otherwise we make every single context and invoke them in a# chain. In that case the return value to the result processor# is the list of all invoked subcommand's results.contexts = []while args:cmd_name, cmd, args = self.resolve_command(ctx, args)sub_ctx = cmd.make_context(cmd_name, args, parent=ctx,allow_extra_args=True,allow_interspersed_args=False)contexts.append(sub_ctx)args, sub_ctx.args = sub_ctx.args, []rv = []for sub_ctx in contexts:with sub_ctx:rv.append(sub_ctx.command.invoke(sub_ctx))return _process_result(rv)def resolve_command(self, ctx, args):cmd_name = make_str(args[0])original_cmd_name = cmd_name# Get the commandcmd = self.get_command(ctx, cmd_name)# If we can't find the command but there is a normalization# function available, we try with that one.if cmd is None and ctx.token_normalize_func is not None:cmd_name = ctx.token_normalize_func(cmd_name)cmd = self.get_command(ctx, cmd_name)# If we don't find the command we want to show an error message# to the user that it was not provided. However, there is# something else we should do: if the first argument looks like# an option we want to kick off parsing again for arguments to# resolve things like --help which now should go to the main# place.if cmd is None:if split_opt(cmd_name)[0]:self.parse_args(ctx, ctx.args)ctx.fail('No such command "%s".' % original_cmd_name)return cmd_name, cmd, args[1:]def get_command(self, ctx, cmd_name):"""Given a context and a command name, this returns a:class:`Command` object if it exists or returns `None`."""raise NotImplementedError()def list_commands(self, ctx):"""Returns a list of subcommand names in the order they shouldappear."""return []class Group(MultiCommand):"""A group allows a command to have subcommands attached. This is themost common way to implement nesting in Click.:param commands: a dictionary of commands."""def __init__(self, name=None, commands=None, **attrs):MultiCommand.__init__(self, name, **attrs)#: the registered subcommands by their exported names.self.commands = commands or {}def add_command(self, cmd, name=None):"""Registers another :class:`Command` with this group. If the nameis not provided, the name of the command is used."""name = name or cmd.nameif name is None:raise TypeError('Command has no name.')_check_multicommand(self, name, cmd, register=True)self.commands[name] = cmddef command(self, *args, **kwargs):"""A shortcut decorator for declaring and attaching a command tothe group. This takes the same arguments as :func:`command` butimmediately registers the created command with this instance bycalling into :meth:`add_command`."""def decorator(f):cmd = command(*args, **kwargs)(f)self.add_command(cmd)return cmdreturn decoratordef group(self, *args, **kwargs):"""A shortcut decorator for declaring and attaching a group tothe group. This takes the same arguments as :func:`group` butimmediately registers the created command with this instance bycalling into :meth:`add_command`."""def decorator(f):cmd = group(*args, **kwargs)(f)self.add_command(cmd)return cmdreturn decoratordef get_command(self, ctx, cmd_name):return self.commands.get(cmd_name)def list_commands(self, ctx):return sorted(self.commands)class CommandCollection(MultiCommand):"""A command collection is a multi command that merges multiple multicommands together into one. This is a straightforward implementationthat accepts a list of different multi commands as sources andprovides all the commands for each of them."""def __init__(self, name=None, sources=None, **attrs):MultiCommand.__init__(self, name, **attrs)#: The list of registered multi commands.self.sources = sources or []def add_source(self, multi_cmd):"""Adds a new multi command to the chain dispatcher."""self.sources.append(multi_cmd)def get_command(self, ctx, cmd_name):for source in self.sources:rv = source.get_command(ctx, cmd_name)if rv is not None:if self.chain:_check_multicommand(self, cmd_name, rv)return rvdef list_commands(self, ctx):rv = set()for source in self.sources:rv.update(source.list_commands(ctx))return sorted(rv)class Parameter(object):"""A parameter to a command comes in two versions: they are either:class:`Option`\s or :class:`Argument`\s. Other subclasses are currentlynot supported by design as some of the internals for parsing areintentionally not finalized.Some settings are supported by both options and arguments... versionchanged:: 2.0Changed signature for parameter callback to also be passed theparameter. In Click 2.0, the old callback format will still work,but it will raise a warning to give you change to migrate thecode easier.:param param_decls: the parameter declarations for this option orargument. This is a list of flags or argumentnames.:param type: the type that should be used. Either a :class:`ParamType`or a Python type. The later is converted into the formerautomatically if supported.:param required: controls if this is optional or not.:param default: the default value if omitted. This can also be a callable,in which case it's invoked when the default is neededwithout any arguments.:param callback: a callback that should be executed after the parameterwas matched. This is called as ``fn(ctx, param,value)`` and needs to return the value. Before Click2.0, the signature was ``(ctx, value)``.:param nargs: the number of arguments to match. If not ``1`` the returnvalue is a tuple instead of single value. The default fornargs is ``1`` (except if the type is a tuple, then it'sthe arity of the tuple).:param metavar: how the value is represented in the help page.:param expose_value: if this is `True` then the value is passed onwardsto the command callback and stored on the context,otherwise it's skipped.:param is_eager: eager values are processed before non eager ones. Thisshould not be set for arguments or it will inverse theorder of processing.:param envvar: a string or list of strings that are environment variablesthat should be checked."""param_type_name = 'parameter'def __init__(self, param_decls=None, type=None, required=False,default=None, callback=None, nargs=None, metavar=None,expose_value=True, is_eager=False, envvar=None):self.name, self.opts, self.secondary_opts = \self._parse_decls(param_decls or (), expose_value)self.type = convert_type(type, default)# Default nargs to what the type tells us if we have that# information available.if nargs is None:if self.type.is_composite:nargs = self.type.arityelse:nargs = 1self.required = requiredself.callback = callbackself.nargs = nargsself.multiple = Falseself.expose_value = expose_valueself.default = defaultself.is_eager = is_eagerself.metavar = metavarself.envvar = envvar@propertydef human_readable_name(self):"""Returns the human readable name of this parameter. This is thesame as the name for options, but the metavar for arguments."""return self.namedef make_metavar(self):if self.metavar is not None:return self.metavarmetavar = self.type.get_metavar(self)if metavar is None:metavar = self.type.name.upper()if self.nargs != 1:metavar += '...'return metavardef get_default(self, ctx):"""Given a context variable this calculates the default value."""# Otherwise go with the regular default.if callable(self.default):rv = self.default()else:rv = self.defaultreturn self.type_cast_value(ctx, rv)def add_to_parser(self, parser, ctx):passdef consume_value(self, ctx, opts):value = opts.get(self.name)if value is None:value = ctx.lookup_default(self.name)if value is None:value = self.value_from_envvar(ctx)return valuedef type_cast_value(self, ctx, value):"""Given a value this runs it properly through the type system.This automatically handles things like `nargs` and `multiple` aswell as composite types."""if self.type.is_composite:if self.nargs <= 1:raise TypeError('Attempted to invoke composite type ''but nargs has been set to %s. This is ''not supported; nargs needs to be set to ''a fixed value > 1.' % self.nargs)if self.multiple:return tuple(self.type(x or (), self, ctx) for x in value or ())return self.type(value or (), self, ctx)def _convert(value, level):if level == 0:return self.type(value, self, ctx)return tuple(_convert(x, level - 1) for x in value or ())return _convert(value, (self.nargs != 1) + bool(self.multiple))def process_value(self, ctx, value):"""Given a value and context this runs the logic to convert thevalue as necessary."""# If the value we were given is None we do nothing. This way# code that calls this can easily figure out if something was# not provided. Otherwise it would be converted into an empty# tuple for multiple invocations which is inconvenient.if value is not None:return self.type_cast_value(ctx, value)def value_is_missing(self, value):if value is None:return Trueif (self.nargs != 1 or self.multiple) and value == ():return Truereturn Falsedef full_process_value(self, ctx, value):value = self.process_value(ctx, value)if value is None:value = self.get_default(ctx)if self.required and self.value_is_missing(value):raise MissingParameter(ctx=ctx, param=self)return valuedef resolve_envvar_value(self, ctx):if self.envvar is None:returnif isinstance(self.envvar, (tuple, list)):for envvar in self.envvar:rv = os.environ.get(envvar)if rv is not None:return rvelse:return os.environ.get(self.envvar)def value_from_envvar(self, ctx):rv = self.resolve_envvar_value(ctx)if rv is not None and self.nargs != 1:rv = self.type.split_envvar_value(rv)return rvdef handle_parse_result(self, ctx, opts, args):with augment_usage_errors(ctx, param=self):value = self.consume_value(ctx, opts)try:value = self.full_process_value(ctx, value)except Exception:if not ctx.resilient_parsing:raisevalue = Noneif self.callback is not None:try:value = invoke_param_callback(self.callback, ctx, self, value)except Exception:if not ctx.resilient_parsing:raiseif self.expose_value:ctx.params[self.name] = valuereturn value, argsdef get_help_record(self, ctx):passdef get_usage_pieces(self, ctx):return []class Option(Parameter):"""Options are usually optional values on the command line andhave some extra features that arguments don't have.All other parameters are passed onwards to the parameter constructor.:param show_default: controls if the default value should be shown on thehelp page. Normally, defaults are not shown.:param prompt: if set to `True` or a non empty string then the user willbe prompted for input if not set. If set to `True` theprompt will be the option name capitalized.:param confirmation_prompt: if set then the value will need to be confirmedif it was prompted for.:param hide_input: if this is `True` then the input on the prompt will behidden from the user. This is useful for passwordinput.:param is_flag: forces this option to act as a flag. The default isauto detection.:param flag_value: which value should be used for this flag if it'senabled. This is set to a boolean automatically ifthe option string contains a slash to mark two options.:param multiple: if this is set to `True` then the argument is acceptedmultiple times and recorded. This is similar to ``nargs``in how it works but supports arbitrary number ofarguments.:param count: this flag makes an option increment an integer.:param allow_from_autoenv: if this is enabled then the value of thisparameter will be pulled from an environmentvariable in case a prefix is defined on thecontext.:param help: the help string."""param_type_name = 'option'def __init__(self, param_decls=None, show_default=False,prompt=False, confirmation_prompt=False,hide_input=False, is_flag=None, flag_value=None,multiple=False, count=False, allow_from_autoenv=True,type=None, help=None, **attrs):default_is_missing = attrs.get('default', _missing) is _missingParameter.__init__(self, param_decls, type=type, **attrs)if prompt is True:prompt_text = self.name.replace('_', ' ').capitalize()elif prompt is False:prompt_text = Noneelse:prompt_text = promptself.prompt = prompt_textself.confirmation_prompt = confirmation_promptself.hide_input = hide_input# Flagsif is_flag is None:if flag_value is not None:is_flag = Trueelse:is_flag = bool(self.secondary_opts)if is_flag and default_is_missing:self.default = Falseif flag_value is None:flag_value = not self.defaultself.is_flag = is_flagself.flag_value = flag_valueif self.is_flag and isinstance(self.flag_value, bool) \and type is None:self.type = BOOLself.is_bool_flag = Trueelse:self.is_bool_flag = False# Countingself.count = countif count:if type is None:self.type = IntRange(min=0)if default_is_missing:self.default = 0self.multiple = multipleself.allow_from_autoenv = allow_from_autoenvself.help = helpself.show_default = show_default# Sanity check for stuff we don't supportif __debug__:if self.nargs < 0:raise TypeError('Options cannot have nargs < 0')if self.prompt and self.is_flag and not self.is_bool_flag:raise TypeError('Cannot prompt for flags that are not bools.')if not self.is_bool_flag and self.secondary_opts:raise TypeError('Got secondary option for non boolean flag.')if self.is_bool_flag and self.hide_input \and self.prompt is not None:raise TypeError('Hidden input does not work with boolean ''flag prompts.')if self.count:if self.multiple:raise TypeError('Options cannot be multiple and count ''at the same time.')elif self.is_flag:raise TypeError('Options cannot be count and flags at ''the same time.')def _parse_decls(self, decls, expose_value):opts = []secondary_opts = []name = Nonepossible_names = []for decl in decls:if isidentifier(decl):if name is not None:raise TypeError('Name defined twice')name = declelse:split_char = decl[:1] == '/' and ';' or '/'if split_char in decl:first, second = decl.split(split_char, 1)first = first.rstrip()if first:possible_names.append(split_opt(first))opts.append(first)second = second.lstrip()if second:secondary_opts.append(second.lstrip())else:possible_names.append(split_opt(decl))opts.append(decl)if name is None and possible_names:possible_names.sort(key=lambda x: len(x[0]))name = possible_names[-1][1].replace('-', '_').lower()if not isidentifier(name):name = Noneif name is None:if not expose_value:return None, opts, secondary_optsraise TypeError('Could not determine name for option')if not opts and not secondary_opts:raise TypeError('No options defined but a name was passed (%s). ''Did you mean to declare an argument instead ''of an option?' % name)return name, opts, secondary_optsdef add_to_parser(self, parser, ctx):kwargs = {'dest': self.name,'nargs': self.nargs,'obj': self,}if self.multiple:action = 'append'elif self.count:action = 'count'else:action = 'store'if self.is_flag:kwargs.pop('nargs', None)if self.is_bool_flag and self.secondary_opts:parser.add_option(self.opts, action=action + '_const',const=True, **kwargs)parser.add_option(self.secondary_opts, action=action +'_const', const=False, **kwargs)else:parser.add_option(self.opts, action=action + '_const',const=self.flag_value,**kwargs)else:kwargs['action'] = actionparser.add_option(self.opts, **kwargs)def get_help_record(self, ctx):any_prefix_is_slash = []def _write_opts(opts):rv, any_slashes = join_options(opts)if any_slashes:any_prefix_is_slash[:] = [True]if not self.is_flag and not self.count:rv += ' ' + self.make_metavar()return rvrv = [_write_opts(self.opts)]if self.secondary_opts:rv.append(_write_opts(self.secondary_opts))help = self.help or ''extra = []if self.default is not None and self.show_default:extra.append('default: %s' % (', '.join('%s' % d for d in self.default)if isinstance(self.default, (list, tuple))else self.default, ))if self.required:extra.append('required')if extra:help = '%s[%s]' % (help and help + ' ' or '', '; '.join(extra))return ((any_prefix_is_slash and '; ' or ' / ').join(rv), help)def get_default(self, ctx):# If we're a non boolean flag out default is more complex because# we need to look at all flags in the same group to figure out# if we're the the default one in which case we return the flag# value as default.if self.is_flag and not self.is_bool_flag:for param in ctx.command.params:if param.name == self.name and param.default:return param.flag_valuereturn Nonereturn Parameter.get_default(self, ctx)def prompt_for_value(self, ctx):"""This is an alternative flow that can be activated in the fullvalue processing if a value does not exist. It will prompt theuser until a valid value exists and then returns the processedvalue as result."""# Calculate the default before prompting anything to be stable.default = self.get_default(ctx)# If this is a prompt for a flag we need to handle this# differently.if self.is_bool_flag:return confirm(self.prompt, default)return prompt(self.prompt, default=default,hide_input=self.hide_input,confirmation_prompt=self.confirmation_prompt,value_proc=lambda x: self.process_value(ctx, x))def resolve_envvar_value(self, ctx):rv = Parameter.resolve_envvar_value(self, ctx)if rv is not None:return rvif self.allow_from_autoenv and \ctx.auto_envvar_prefix is not None:envvar = '%s_%s' % (ctx.auto_envvar_prefix, self.name.upper())return os.environ.get(envvar)def value_from_envvar(self, ctx):rv = self.resolve_envvar_value(ctx)if rv is None:return Nonevalue_depth = (self.nargs != 1) + bool(self.multiple)if value_depth > 0 and rv is not None:rv = self.type.split_envvar_value(rv)if self.multiple and self.nargs != 1:rv = batch(rv, self.nargs)return rvdef full_process_value(self, ctx, value):if value is None and self.prompt is not None \and not ctx.resilient_parsing:return self.prompt_for_value(ctx)return Parameter.full_process_value(self, ctx, value)class Argument(Parameter):"""Arguments are positional parameters to a command. They generallyprovide fewer features than options but can have infinite ``nargs``and are required by default.All parameters are passed onwards to the parameter constructor."""param_type_name = 'argument'def __init__(self, param_decls, required=None, **attrs):if required is None:if attrs.get('default') is not None:required = Falseelse:required = attrs.get('nargs', 1) > 0Parameter.__init__(self, param_decls, required=required, **attrs)if self.default is not None and self.nargs < 0:raise TypeError('nargs=-1 in combination with a default value ''is not supported.')@propertydef human_readable_name(self):if self.metavar is not None:return self.metavarreturn self.name.upper()def make_metavar(self):if self.metavar is not None:return self.metavarvar = self.name.upper()if not self.required:var = '[%s]' % varif self.nargs != 1:var += '...'return vardef _parse_decls(self, decls, expose_value):if not decls:if not expose_value:return None, [], []raise TypeError('Could not determine name for argument')if len(decls) == 1:name = arg = decls[0]name = name.replace('-', '_').lower()elif len(decls) == 2:name, arg = declselse:raise TypeError('Arguments take exactly one or two ''parameter declarations, got %d' % len(decls))return name, [arg], []def get_usage_pieces(self, ctx):return [self.make_metavar()]def add_to_parser(self, parser, ctx):parser.add_argument(dest=self.name, nargs=self.nargs,obj=self)# Circular dependency between decorators and corefrom .decorators import command, group
# -*- coding: utf-8 -*-# This module is based on the excellent work by Adam Bartoš who# provided a lot of what went into the implementation here in# the discussion to issue1602 in the Python bug tracker.## There are some general differences in regards to how this works# compared to the original patches as we do not need to patch# the entire interpreter but just work in our little world of# echo and prmopt.import ioimport osimport sysimport zlibimport timeimport ctypesimport msvcrtfrom click._compat import _NonClosingTextIOWrapper, text_type, PY2from ctypes import byref, POINTER, c_int, c_char, c_char_p, \c_void_p, py_object, c_ssize_t, c_ulong, windll, WINFUNCTYPEtry:from ctypes import pythonapiPyObject_GetBuffer = pythonapi.PyObject_GetBufferPyBuffer_Release = pythonapi.PyBuffer_Releaseexcept ImportError:pythonapi = Nonefrom ctypes.wintypes import LPWSTR, LPCWSTRc_ssize_p = POINTER(c_ssize_t)kernel32 = windll.kernel32GetStdHandle = kernel32.GetStdHandleReadConsoleW = kernel32.ReadConsoleWWriteConsoleW = kernel32.WriteConsoleWGetLastError = kernel32.GetLastErrorGetCommandLineW = WINFUNCTYPE(LPWSTR)(('GetCommandLineW', windll.kernel32))CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(('CommandLineToArgvW', windll.shell32))STDIN_HANDLE = GetStdHandle(-10)STDOUT_HANDLE = GetStdHandle(-11)STDERR_HANDLE = GetStdHandle(-12)PyBUF_SIMPLE = 0PyBUF_WRITABLE = 1ERROR_SUCCESS = 0ERROR_NOT_ENOUGH_MEMORY = 8ERROR_OPERATION_ABORTED = 995STDIN_FILENO = 0STDOUT_FILENO = 1STDERR_FILENO = 2EOF = b'\x1a'MAX_BYTES_WRITTEN = 32767class Py_buffer(ctypes.Structure):_fields_ = [('buf', c_void_p),('obj', py_object),('len', c_ssize_t),('itemsize', c_ssize_t),('readonly', c_int),('ndim', c_int),('format', c_char_p),('shape', c_ssize_p),('strides', c_ssize_p),('suboffsets', c_ssize_p),('internal', c_void_p)]if PY2:_fields_.insert(-1, ('smalltable', c_ssize_t * 2))# On PyPy we cannot get buffers so our ability to operate here is# serverly limited.if pythonapi is None:get_buffer = Noneelse:def get_buffer(obj, writable=False):buf = Py_buffer()flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLEPyObject_GetBuffer(py_object(obj), byref(buf), flags)try:buffer_type = c_char * buf.lenreturn buffer_type.from_address(buf.buf)finally:PyBuffer_Release(byref(buf))class _WindowsConsoleRawIOBase(io.RawIOBase):def __init__(self, handle):self.handle = handledef isatty(self):io.RawIOBase.isatty(self)return Trueclass _WindowsConsoleReader(_WindowsConsoleRawIOBase):def readable(self):return Truedef readinto(self, b):bytes_to_be_read = len(b)if not bytes_to_be_read:return 0elif bytes_to_be_read % 2:raise ValueError('cannot read odd number of bytes from ''UTF-16-LE encoded console')buffer = get_buffer(b, writable=True)code_units_to_be_read = bytes_to_be_read // 2code_units_read = c_ulong()rv = ReadConsoleW(self.handle, buffer, code_units_to_be_read,byref(code_units_read), None)if GetLastError() == ERROR_OPERATION_ABORTED:# wait for KeyboardInterrupttime.sleep(0.1)if not rv:raise OSError('Windows error: %s' % GetLastError())if buffer[0] == EOF:return 0return 2 * code_units_read.valueclass _WindowsConsoleWriter(_WindowsConsoleRawIOBase):def writable(self):return True@staticmethoddef _get_error_message(errno):if errno == ERROR_SUCCESS:return 'ERROR_SUCCESS'elif errno == ERROR_NOT_ENOUGH_MEMORY:return 'ERROR_NOT_ENOUGH_MEMORY'return 'Windows error %s' % errnodef write(self, b):bytes_to_be_written = len(b)buf = get_buffer(b)code_units_to_be_written = min(bytes_to_be_written,MAX_BYTES_WRITTEN) // 2code_units_written = c_ulong()WriteConsoleW(self.handle, buf, code_units_to_be_written,byref(code_units_written), None)bytes_written = 2 * code_units_written.valueif bytes_written == 0 and bytes_to_be_written > 0:raise OSError(self._get_error_message(GetLastError()))return bytes_writtenclass ConsoleStream(object):def __init__(self, text_stream, byte_stream):self._text_stream = text_streamself.buffer = byte_stream@propertydef name(self):return self.buffer.namedef write(self, x):if isinstance(x, text_type):return self._text_stream.write(x)try:self.flush()except Exception:passreturn self.buffer.write(x)def writelines(self, lines):for line in lines:self.write(line)def __getattr__(self, name):return getattr(self._text_stream, name)def isatty(self):return self.buffer.isatty()def __repr__(self):return '<ConsoleStream name=%r encoding=%r>' % (self.name,self.encoding,)def _get_text_stdin(buffer_stream):text_stream = _NonClosingTextIOWrapper(io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)),'utf-16-le', 'strict', line_buffering=True)return ConsoleStream(text_stream, buffer_stream)def _get_text_stdout(buffer_stream):text_stream = _NonClosingTextIOWrapper(_WindowsConsoleWriter(STDOUT_HANDLE),'utf-16-le', 'strict', line_buffering=True)return ConsoleStream(text_stream, buffer_stream)def _get_text_stderr(buffer_stream):text_stream = _NonClosingTextIOWrapper(_WindowsConsoleWriter(STDERR_HANDLE),'utf-16-le', 'strict', line_buffering=True)return ConsoleStream(text_stream, buffer_stream)if PY2:def _hash_py_argv():return zlib.crc32('\x00'.join(sys.argv[1:]))_initial_argv_hash = _hash_py_argv()def _get_windows_argv():argc = c_int(0)argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc))argv = [argv_unicode[i] for i in range(0, argc.value)]if not hasattr(sys, 'frozen'):argv = argv[1:]while len(argv) > 0:arg = argv[0]if not arg.startswith('-') or arg == '-':breakargv = argv[1:]if arg.startswith(('-c', '-m')):breakreturn argv[1:]_stream_factories = {0: _get_text_stdin,1: _get_text_stdout,2: _get_text_stderr,}def _get_windows_console_stream(f, encoding, errors):if get_buffer is not None and \encoding in ('utf-16-le', None) \and errors in ('strict', None) and \hasattr(f, 'isatty') and f.isatty():func = _stream_factories.get(f.fileno())if func is not None:if not PY2:f = getattr(f, 'buffer')if f is None:return Noneelse:# If we are on Python 2 we need to set the stream that we# deal with to binary mode as otherwise the exercise if a# bit moot. The same problems apply as for# get_binary_stdin and friends from _compat.msvcrt.setmode(f.fileno(), os.O_BINARY)return func(f)
import osimport sysimport codecsfrom ._compat import PY2# If someone wants to vendor click, we want to ensure the# correct package is discovered. Ideally we could use a# relative import here but unfortunately Python does not# support that.click = sys.modules[__name__.rsplit('.', 1)[0]]def _find_unicode_literals_frame():import __future__frm = sys._getframe(1)idx = 1while frm is not None:if frm.f_globals.get('__name__', '').startswith('click.'):frm = frm.f_backidx += 1elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag:return idxelse:breakreturn 0def _check_for_unicode_literals():if not __debug__:returnif not PY2 or click.disable_unicode_literals_warning:returnbad_frame = _find_unicode_literals_frame()if bad_frame <= 0:returnfrom warnings import warnwarn(Warning('Click detected the use of the unicode_literals ''__future__ import. This is heavily discouraged ''because it can introduce subtle bugs in your ''code. You should instead use explicit u"" literals ''for your unicode strings. For more information see ''http://click.pocoo.org/python3/'),stacklevel=bad_frame)def _verify_python3_env():"""Ensures that the environment is good for unicode on Python 3."""if PY2:returntry:import localefs_enc = codecs.lookup(locale.getpreferredencoding()).nameexcept Exception:fs_enc = 'ascii'if fs_enc != 'ascii':returnextra = ''if os.name == 'posix':import subprocessrv = subprocess.Popen(['locale', '-a'], stdout=subprocess.PIPE,stderr=subprocess.PIPE).communicate()[0]good_locales = set()has_c_utf8 = False# Make sure we're operating on text here.if isinstance(rv, bytes):rv = rv.decode('ascii', 'replace')for line in rv.splitlines():locale = line.strip()if locale.lower().endswith(('.utf-8', '.utf8')):good_locales.add(locale)if locale.lower() in ('c.utf8', 'c.utf-8'):has_c_utf8 = Trueextra += '\n\n'if not good_locales:extra += ('Additional information: on this system no suitable UTF-8\n''locales were discovered. This most likely requires resolving\n''by reconfiguring the locale system.')elif has_c_utf8:extra += ('This system supports the C.UTF-8 locale which is recommended.\n''You might be able to resolve your issue by exporting the\n''following environment variables:\n\n'' export LC_ALL=C.UTF-8\n'' export LANG=C.UTF-8')else:extra += ('This system lists a couple of UTF-8 supporting locales that\n''you can pick from. The following suitable locales where\n''discovered: %s') % ', '.join(sorted(good_locales))bad_locale = Nonefor locale in os.environ.get('LC_ALL'), os.environ.get('LANG'):if locale and locale.lower().endswith(('.utf-8', '.utf8')):bad_locale = localeif locale is not None:breakif bad_locale is not None:extra += ('\n\nClick discovered that you exported a UTF-8 locale\n''but the locale system could not pick up from it because\n''it does not exist. The exported locale is "%s" but it\n''is not supported') % bad_localeraise RuntimeError('Click will abort further execution because Python 3 ''was configured to use ASCII as encoding for the ''environment. Consult http://click.pocoo.org/python3/''for mitigation steps.' + extra)
import textwrapfrom contextlib import contextmanagerclass TextWrapper(textwrap.TextWrapper):def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):space_left = max(width - cur_len, 1)if self.break_long_words:last = reversed_chunks[-1]cut = last[:space_left]res = last[space_left:]cur_line.append(cut)reversed_chunks[-1] = reselif not cur_line:cur_line.append(reversed_chunks.pop())@contextmanagerdef extra_indent(self, indent):old_initial_indent = self.initial_indentold_subsequent_indent = self.subsequent_indentself.initial_indent += indentself.subsequent_indent += indenttry:yieldfinally:self.initial_indent = old_initial_indentself.subsequent_indent = old_subsequent_indentdef indent_only(self, text):rv = []for idx, line in enumerate(text.splitlines()):indent = self.initial_indentif idx > 0:indent = self.subsequent_indentrv.append(indent + line)return '\n'.join(rv)
"""click._termui_impl~~~~~~~~~~~~~~~~~~This module contains implementations for the termui module. To keep theimport time of Click down, some infrequently used functionality is placedin this module and only imported as needed.:copyright: (c) 2014 by Armin Ronacher.:license: BSD, see LICENSE for more details."""import osimport sysimport timeimport mathfrom ._compat import _default_text_stdout, range_type, PY2, isatty, \open_stream, strip_ansi, term_len, get_best_encoding, WINfrom .utils import echofrom .exceptions import ClickExceptionif os.name == 'nt':BEFORE_BAR = '\r'AFTER_BAR = '\n'else:BEFORE_BAR = '\r\033[?25l'AFTER_BAR = '\033[?25h\n'def _length_hint(obj):"""Returns the length hint of an object."""try:return len(obj)except (AttributeError, TypeError):try:get_hint = type(obj).__length_hint__except AttributeError:return Nonetry:hint = get_hint(obj)except TypeError:return Noneif hint is NotImplemented or \not isinstance(hint, (int, long)) or \hint < 0:return Nonereturn hintclass ProgressBar(object):def __init__(self, iterable, length=None, fill_char='#', empty_char=' ',bar_template='%(bar)s', info_sep=' ', show_eta=True,show_percent=None, show_pos=False, item_show_func=None,label=None, file=None, color=None, width=30):self.fill_char = fill_charself.empty_char = empty_charself.bar_template = bar_templateself.info_sep = info_sepself.show_eta = show_etaself.show_percent = show_percentself.show_pos = show_posself.item_show_func = item_show_funcself.label = label or ''if file is None:file = _default_text_stdout()self.file = fileself.color = colorself.width = widthself.autowidth = width == 0if length is None:length = _length_hint(iterable)if iterable is None:if length is None:raise TypeError('iterable or length is required')iterable = range_type(length)self.iter = iter(iterable)self.length = lengthself.length_known = length is not Noneself.pos = 0self.avg = []self.start = self.last_eta = time.time()self.eta_known = Falseself.finished = Falseself.max_width = Noneself.entered = Falseself.current_item = Noneself.is_hidden = not isatty(self.file)self._last_line = Nonedef __enter__(self):self.entered = Trueself.render_progress()return selfdef __exit__(self, exc_type, exc_value, tb):self.render_finish()def __iter__(self):if not self.entered:raise RuntimeError('You need to use progress bars in a with block.')self.render_progress()return selfdef render_finish(self):if self.is_hidden:returnself.file.write(AFTER_BAR)self.file.flush()@propertydef pct(self):if self.finished:return 1.0return min(self.pos / (float(self.length) or 1), 1.0)@propertydef time_per_iteration(self):if not self.avg:return 0.0return sum(self.avg) / float(len(self.avg))@propertydef eta(self):if self.length_known and not self.finished:return self.time_per_iteration * (self.length - self.pos)return 0.0def format_eta(self):if self.eta_known:t = self.eta + 1seconds = t % 60t /= 60minutes = t % 60t /= 60hours = t % 24t /= 24if t > 0:days = treturn '%dd %02d:%02d:%02d' % (days, hours, minutes, seconds)else:return '%02d:%02d:%02d' % (hours, minutes, seconds)return ''def format_pos(self):pos = str(self.pos)if self.length_known:pos += '/%s' % self.lengthreturn posdef format_pct(self):return ('% 4d%%' % int(self.pct * 100))[1:]def format_progress_line(self):show_percent = self.show_percentinfo_bits = []if self.length_known:bar_length = int(self.pct * self.width)bar = self.fill_char * bar_lengthbar += self.empty_char * (self.width - bar_length)if show_percent is None:show_percent = not self.show_poselse:if self.finished:bar = self.fill_char * self.widthelse:bar = list(self.empty_char * (self.width or 1))if self.time_per_iteration != 0:bar[int((math.cos(self.pos * self.time_per_iteration)/ 2.0 + 0.5) * self.width)] = self.fill_charbar = ''.join(bar)if self.show_pos:info_bits.append(self.format_pos())if show_percent:info_bits.append(self.format_pct())if self.show_eta and self.eta_known and not self.finished:info_bits.append(self.format_eta())if self.item_show_func is not None:item_info = self.item_show_func(self.current_item)if item_info is not None:info_bits.append(item_info)return (self.bar_template % {'label': self.label,'bar': bar,'info': self.info_sep.join(info_bits)}).rstrip()def render_progress(self):from .termui import get_terminal_sizenl = Falseif self.is_hidden:buf = [self.label]nl = Trueelse:buf = []# Update width in case the terminal has been resizedif self.autowidth:old_width = self.widthself.width = 0clutter_length = term_len(self.format_progress_line())new_width = max(0, get_terminal_size()[0] - clutter_length)if new_width < old_width:buf.append(BEFORE_BAR)buf.append(' ' * self.max_width)self.max_width = new_widthself.width = new_widthclear_width = self.widthif self.max_width is not None:clear_width = self.max_widthbuf.append(BEFORE_BAR)line = self.format_progress_line()line_len = term_len(line)if self.max_width is None or self.max_width < line_len:self.max_width = line_lenbuf.append(line)buf.append(' ' * (clear_width - line_len))line = ''.join(buf)# Render the line only if it changed.if line != self._last_line:self._last_line = lineecho(line, file=self.file, color=self.color, nl=nl)self.file.flush()def make_step(self, n_steps):self.pos += n_stepsif self.length_known and self.pos >= self.length:self.finished = Trueif (time.time() - self.last_eta) < 1.0:returnself.last_eta = time.time()self.avg = self.avg[-6:] + [-(self.start - time.time()) / (self.pos)]self.eta_known = self.length_knowndef update(self, n_steps):self.make_step(n_steps)self.render_progress()def finish(self):self.eta_known = 0self.current_item = Noneself.finished = Truedef next(self):if self.is_hidden:return next(self.iter)try:rv = next(self.iter)self.current_item = rvexcept StopIteration:self.finish()self.render_progress()raise StopIteration()else:self.update(1)return rvif not PY2:__next__ = nextdel nextdef pager(text, color=None):"""Decide what method to use for paging through text."""stdout = _default_text_stdout()if not isatty(sys.stdin) or not isatty(stdout):return _nullpager(stdout, text, color)pager_cmd = (os.environ.get('PAGER', None) or '').strip()if pager_cmd:if WIN:return _tempfilepager(text, pager_cmd, color)return _pipepager(text, pager_cmd, color)if os.environ.get('TERM') in ('dumb', 'emacs'):return _nullpager(stdout, text, color)if WIN or sys.platform.startswith('os2'):return _tempfilepager(text, 'more <', color)if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:return _pipepager(text, 'less', color)import tempfilefd, filename = tempfile.mkstemp()os.close(fd)try:if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0:return _pipepager(text, 'more', color)return _nullpager(stdout, text, color)finally:os.unlink(filename)def _pipepager(text, cmd, color):"""Page through text by feeding it to another program. Invoking apager through this might support colors."""import subprocessenv = dict(os.environ)# If we're piping to less we might support colors under the# condition thatcmd_detail = cmd.rsplit('/', 1)[-1].split()if color is None and cmd_detail[0] == 'less':less_flags = os.environ.get('LESS', '') + ' '.join(cmd_detail[1:])if not less_flags:env['LESS'] = '-R'color = Trueelif 'r' in less_flags or 'R' in less_flags:color = Trueif not color:text = strip_ansi(text)c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE,env=env)encoding = get_best_encoding(c.stdin)try:c.stdin.write(text.encode(encoding, 'replace'))c.stdin.close()except (IOError, KeyboardInterrupt):pass# Less doesn't respect ^C, but catches it for its own UI purposes (aborting# search or other commands inside less).## That means when the user hits ^C, the parent process (click) terminates,# but less is still alive, paging the output and messing up the terminal.## If the user wants to make the pager exit on ^C, they should set# `LESS='-K'`. It's not our decision to make.while True:try:c.wait()except KeyboardInterrupt:passelse:breakdef _tempfilepager(text, cmd, color):"""Page through text by invoking a program on a temporary file."""import tempfilefilename = tempfile.mktemp()if not color:text = strip_ansi(text)encoding = get_best_encoding(sys.stdout)with open_stream(filename, 'wb')[0] as f:f.write(text.encode(encoding))try:os.system(cmd + ' "' + filename + '"')finally:os.unlink(filename)def _nullpager(stream, text, color):"""Simply print unformatted text. This is the ultimate fallback."""if not color:text = strip_ansi(text)stream.write(text)class Editor(object):def __init__(self, editor=None, env=None, require_save=True,extension='.txt'):self.editor = editorself.env = envself.require_save = require_saveself.extension = extensiondef get_editor(self):if self.editor is not None:return self.editorfor key in 'VISUAL', 'EDITOR':rv = os.environ.get(key)if rv:return rvif WIN:return 'notepad'for editor in 'vim', 'nano':if os.system('which %s >/dev/null 2>&1' % editor) == 0:return editorreturn 'vi'def edit_file(self, filename):import subprocesseditor = self.get_editor()if self.env:environ = os.environ.copy()environ.update(self.env)else:environ = Nonetry:c = subprocess.Popen('%s "%s"' % (editor, filename),env=environ, shell=True)exit_code = c.wait()if exit_code != 0:raise ClickException('%s: Editing failed!' % editor)except OSError as e:raise ClickException('%s: Editing failed: %s' % (editor, e))def edit(self, text):import tempfiletext = text or ''if text and not text.endswith('\n'):text += '\n'fd, name = tempfile.mkstemp(prefix='editor-', suffix=self.extension)try:if WIN:encoding = 'utf-8-sig'text = text.replace('\n', '\r\n')else:encoding = 'utf-8'text = text.encode(encoding)f = os.fdopen(fd, 'wb')f.write(text)f.close()timestamp = os.path.getmtime(name)self.edit_file(name)if self.require_save \and os.path.getmtime(name) == timestamp:return Nonef = open(name, 'rb')try:rv = f.read()finally:f.close()return rv.decode('utf-8-sig').replace('\r\n', '\n')finally:os.unlink(name)def open_url(url, wait=False, locate=False):import subprocessdef _unquote_file(url):try:import urllibexcept ImportError:import urllibif url.startswith('file://'):url = urllib.unquote(url[7:])return urlif sys.platform == 'darwin':args = ['open']if wait:args.append('-W')if locate:args.append('-R')args.append(_unquote_file(url))null = open('/dev/null', 'w')try:return subprocess.Popen(args, stderr=null).wait()finally:null.close()elif WIN:if locate:url = _unquote_file(url)args = 'explorer /select,"%s"' % _unquote_file(url.replace('"', ''))else:args = 'start %s "" "%s"' % (wait and '/WAIT' or '', url.replace('"', ''))return os.system(args)try:if locate:url = os.path.dirname(_unquote_file(url)) or '.'else:url = _unquote_file(url)c = subprocess.Popen(['xdg-open', url])if wait:return c.wait()return 0except OSError:if url.startswith(('http://', 'https://')) and not locate and not wait:import webbrowserwebbrowser.open(url)return 0return 1def _translate_ch_to_exc(ch):if ch == '\x03':raise KeyboardInterrupt()if ch == '\x04':raise EOFError()if WIN:import msvcrtdef getchar(echo):rv = msvcrt.getch()if echo:msvcrt.putchar(rv)_translate_ch_to_exc(rv)if PY2:enc = getattr(sys.stdin, 'encoding', None)if enc is not None:rv = rv.decode(enc, 'replace')else:rv = rv.decode('cp1252', 'replace')return rvelse:import ttyimport termiosdef getchar(echo):if not isatty(sys.stdin):f = open('/dev/tty')fd = f.fileno()else:fd = sys.stdin.fileno()f = Nonetry:old_settings = termios.tcgetattr(fd)try:tty.setraw(fd)ch = os.read(fd, 32)if echo and isatty(sys.stdout):sys.stdout.write(ch)finally:termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)sys.stdout.flush()if f is not None:f.close()except termios.error:pass_translate_ch_to_exc(ch)return ch.decode(get_best_encoding(sys.stdin), 'replace')
import reimport ioimport osimport sysimport codecsfrom weakref import WeakKeyDictionaryPY2 = sys.version_info[0] == 2WIN = sys.platform.startswith('win')DEFAULT_COLUMNS = 80_ansi_re = re.compile('\033\[((?:\d|;)*)([a-zA-Z])')def get_filesystem_encoding():return sys.getfilesystemencoding() or sys.getdefaultencoding()def _make_text_stream(stream, encoding, errors):if encoding is None:encoding = get_best_encoding(stream)if errors is None:errors = 'replace'return _NonClosingTextIOWrapper(stream, encoding, errors,line_buffering=True)def is_ascii_encoding(encoding):"""Checks if a given encoding is ascii."""try:return codecs.lookup(encoding).name == 'ascii'except LookupError:return Falsedef get_best_encoding(stream):"""Returns the default stream encoding if not found."""rv = getattr(stream, 'encoding', None) or sys.getdefaultencoding()if is_ascii_encoding(rv):return 'utf-8'return rvclass _NonClosingTextIOWrapper(io.TextIOWrapper):def __init__(self, stream, encoding, errors, **extra):self._stream = stream = _FixupStream(stream)io.TextIOWrapper.__init__(self, stream, encoding, errors, **extra)# The io module is a place where the Python 3 text behavior# was forced upon Python 2, so we need to unbreak# it to look like Python 2.if PY2:def write(self, x):if isinstance(x, str) or is_bytes(x):try:self.flush()except Exception:passreturn self.buffer.write(str(x))return io.TextIOWrapper.write(self, x)def writelines(self, lines):for line in lines:self.write(line)def __del__(self):try:self.detach()except Exception:passdef isatty(self):# https://bitbucket.org/pypy/pypy/issue/1803return self._stream.isatty()class _FixupStream(object):"""The new io interface needs more from streams than streamstraditionally implement. As such, this fix-up code is necessary insome circumstances."""def __init__(self, stream):self._stream = streamdef __getattr__(self, name):return getattr(self._stream, name)def read1(self, size):f = getattr(self._stream, 'read1', None)if f is not None:return f(size)# We only dispatch to readline instead of read in Python 2 as we# do not want cause problems with the different implementation# of line buffering.if PY2:return self._stream.readline(size)return self._stream.read(size)def readable(self):x = getattr(self._stream, 'readable', None)if x is not None:return x()try:self._stream.read(0)except Exception:return Falsereturn Truedef writable(self):x = getattr(self._stream, 'writable', None)if x is not None:return x()try:self._stream.write('')except Exception:try:self._stream.write(b'')except Exception:return Falsereturn Truedef seekable(self):x = getattr(self._stream, 'seekable', None)if x is not None:return x()try:self._stream.seek(self._stream.tell())except Exception:return Falsereturn Trueif PY2:text_type = unicodebytes = strraw_input = raw_inputstring_types = (str, unicode)iteritems = lambda x: x.iteritems()range_type = xrangedef is_bytes(x):return isinstance(x, (buffer, bytearray))_identifier_re = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*$')# For Windows, we need to force stdout/stdin/stderr to binary if it's# fetched for that. This obviously is not the most correct way to do# it as it changes global state. Unfortunately, there does not seem to# be a clear better way to do it as just reopening the file in binary# mode does not change anything.## An option would be to do what Python 3 does and to open the file as# binary only, patch it back to the system, and then use a wrapper# stream that converts newlines. It's not quite clear what's the# correct option here.## This code also lives in _winconsole for the fallback to the console# emulation stream.## There are also Windows environments where the `msvcrt` module is not# available (which is why we use try-catch instead of the WIN variable# here), such as the Google App Engine development server on Windows. In# those cases there is just nothing we can do.try:import msvcrtexcept ImportError:set_binary_mode = lambda x: xelse:def set_binary_mode(f):try:fileno = f.fileno()except Exception:passelse:msvcrt.setmode(fileno, os.O_BINARY)return fdef isidentifier(x):return _identifier_re.search(x) is not Nonedef get_binary_stdin():return set_binary_mode(sys.stdin)def get_binary_stdout():return set_binary_mode(sys.stdout)def get_binary_stderr():return set_binary_mode(sys.stderr)def get_text_stdin(encoding=None, errors=None):rv = _get_windows_console_stream(sys.stdin, encoding, errors)if rv is not None:return rvreturn _make_text_stream(sys.stdin, encoding, errors)def get_text_stdout(encoding=None, errors=None):rv = _get_windows_console_stream(sys.stdout, encoding, errors)if rv is not None:return rvreturn _make_text_stream(sys.stdout, encoding, errors)def get_text_stderr(encoding=None, errors=None):rv = _get_windows_console_stream(sys.stderr, encoding, errors)if rv is not None:return rvreturn _make_text_stream(sys.stderr, encoding, errors)def filename_to_ui(value):if isinstance(value, bytes):value = value.decode(get_filesystem_encoding(), 'replace')return valueelse:import iotext_type = strraw_input = inputstring_types = (str,)range_type = rangeisidentifier = lambda x: x.isidentifier()iteritems = lambda x: iter(x.items())def is_bytes(x):return isinstance(x, (bytes, memoryview, bytearray))def _is_binary_reader(stream, default=False):try:return isinstance(stream.read(0), bytes)except Exception:return default# This happens in some cases where the stream was already# closed. In this case, we assume the default.def _is_binary_writer(stream, default=False):try:stream.write(b'')except Exception:try:stream.write('')return Falseexcept Exception:passreturn defaultreturn Truedef _find_binary_reader(stream):# We need to figure out if the given stream is already binary.# This can happen because the official docs recommend detaching# the streams to get binary streams. Some code might do this, so# we need to deal with this case explicitly.if _is_binary_reader(stream, False):return streambuf = getattr(stream, 'buffer', None)# Same situation here; this time we assume that the buffer is# actually binary in case it's closed.if buf is not None and _is_binary_reader(buf, True):return bufdef _find_binary_writer(stream):# We need to figure out if the given stream is already binary.# This can happen because the official docs recommend detatching# the streams to get binary streams. Some code might do this, so# we need to deal with this case explicitly.if _is_binary_writer(stream, False):return streambuf = getattr(stream, 'buffer', None)# Same situation here; this time we assume that the buffer is# actually binary in case it's closed.if buf is not None and _is_binary_writer(buf, True):return bufdef _stream_is_misconfigured(stream):"""A stream is misconfigured if its encoding is ASCII."""# If the stream does not have an encoding set, we assume it's set# to ASCII. This appears to happen in certain unittest# environments. It's not quite clear what the correct behavior is# but this at least will force Click to recover somehow.return is_ascii_encoding(getattr(stream, 'encoding', None) or 'ascii')def _is_compatible_text_stream(stream, encoding, errors):stream_encoding = getattr(stream, 'encoding', None)stream_errors = getattr(stream, 'errors', None)# Perfect match.if stream_encoding == encoding and stream_errors == errors:return True# Otherwise, it's only a compatible stream if we did not ask for# an encoding.if encoding is None:return stream_encoding is not Nonereturn Falsedef _force_correct_text_reader(text_reader, encoding, errors):if _is_binary_reader(text_reader, False):binary_reader = text_readerelse:# If there is no target encoding set, we need to verify that the# reader is not actually misconfigured.if encoding is None and not _stream_is_misconfigured(text_reader):return text_readerif _is_compatible_text_stream(text_reader, encoding, errors):return text_reader# If the reader has no encoding, we try to find the underlying# binary reader for it. If that fails because the environment is# misconfigured, we silently go with the same reader because this# is too common to happen. In that case, mojibake is better than# exceptions.binary_reader = _find_binary_reader(text_reader)if binary_reader is None:return text_reader# At this point, we default the errors to replace instead of strict# because nobody handles those errors anyways and at this point# we're so fundamentally fucked that nothing can repair it.if errors is None:errors = 'replace'return _make_text_stream(binary_reader, encoding, errors)def _force_correct_text_writer(text_writer, encoding, errors):if _is_binary_writer(text_writer, False):binary_writer = text_writerelse:# If there is no target encoding set, we need to verify that the# writer is not actually misconfigured.if encoding is None and not _stream_is_misconfigured(text_writer):return text_writerif _is_compatible_text_stream(text_writer, encoding, errors):return text_writer# If the writer has no encoding, we try to find the underlying# binary writer for it. If that fails because the environment is# misconfigured, we silently go with the same writer because this# is too common to happen. In that case, mojibake is better than# exceptions.binary_writer = _find_binary_writer(text_writer)if binary_writer is None:return text_writer# At this point, we default the errors to replace instead of strict# because nobody handles those errors anyways and at this point# we're so fundamentally fucked that nothing can repair it.if errors is None:errors = 'replace'return _make_text_stream(binary_writer, encoding, errors)def get_binary_stdin():reader = _find_binary_reader(sys.stdin)if reader is None:raise RuntimeError('Was not able to determine binary ''stream for sys.stdin.')return readerdef get_binary_stdout():writer = _find_binary_writer(sys.stdout)if writer is None:raise RuntimeError('Was not able to determine binary ''stream for sys.stdout.')return writerdef get_binary_stderr():writer = _find_binary_writer(sys.stderr)if writer is None:raise RuntimeError('Was not able to determine binary ''stream for sys.stderr.')return writerdef get_text_stdin(encoding=None, errors=None):rv = _get_windows_console_stream(sys.stdin, encoding, errors)if rv is not None:return rvreturn _force_correct_text_reader(sys.stdin, encoding, errors)def get_text_stdout(encoding=None, errors=None):rv = _get_windows_console_stream(sys.stdout, encoding, errors)if rv is not None:return rvreturn _force_correct_text_writer(sys.stdout, encoding, errors)def get_text_stderr(encoding=None, errors=None):rv = _get_windows_console_stream(sys.stderr, encoding, errors)if rv is not None:return rvreturn _force_correct_text_writer(sys.stderr, encoding, errors)def filename_to_ui(value):if isinstance(value, bytes):value = value.decode(get_filesystem_encoding(), 'replace')else:value = value.encode('utf-8', 'surrogateescape') \.decode('utf-8', 'replace')return valuedef get_streerror(e, default=None):if hasattr(e, 'strerror'):msg = e.strerrorelse:if default is not None:msg = defaultelse:msg = str(e)if isinstance(msg, bytes):msg = msg.decode('utf-8', 'replace')return msgdef open_stream(filename, mode='r', encoding=None, errors='strict',atomic=False):# Standard streams first. These are simple because they don't need# special handling for the atomic flag. It's entirely ignored.if filename == '-':if 'w' in mode:if 'b' in mode:return get_binary_stdout(), Falsereturn get_text_stdout(encoding=encoding, errors=errors), Falseif 'b' in mode:return get_binary_stdin(), Falsereturn get_text_stdin(encoding=encoding, errors=errors), False# Non-atomic writes directly go out through the regular open functions.if not atomic:if encoding is None:return open(filename, mode), Truereturn io.open(filename, mode, encoding=encoding, errors=errors), True# Some usability stuff for atomic writesif 'a' in mode:raise ValueError('Appending to an existing file is not supported, because that ''would involve an expensive `copy`-operation to a temporary ''file. Open the file in normal `w`-mode and copy explicitly ''if that\'s what you\'re after.')if 'x' in mode:raise ValueError('Use the `overwrite`-parameter instead.')if 'w' not in mode:raise ValueError('Atomic writes only make sense with `w`-mode.')# Atomic writes are more complicated. They work by opening a file# as a proxy in the same folder and then using the fdopen# functionality to wrap it in a Python file. Then we wrap it in an# atomic file that moves the file over on close.import tempfilefd, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename),prefix='.__atomic-write')if encoding is not None:f = io.open(fd, mode, encoding=encoding, errors=errors)else:f = os.fdopen(fd, mode)return _AtomicFile(f, tmp_filename, filename), True# Used in a destructor call, needs extra protection from interpreter cleanup.if hasattr(os, 'replace'):_replace = os.replace_can_replace = Trueelse:_replace = os.rename_can_replace = not WINclass _AtomicFile(object):def __init__(self, f, tmp_filename, real_filename):self._f = fself._tmp_filename = tmp_filenameself._real_filename = real_filenameself.closed = False@propertydef name(self):return self._real_filenamedef close(self, delete=False):if self.closed:returnself._f.close()if not _can_replace:try:os.remove(self._real_filename)except OSError:pass_replace(self._tmp_filename, self._real_filename)self.closed = Truedef __getattr__(self, name):return getattr(self._f, name)def __enter__(self):return selfdef __exit__(self, exc_type, exc_value, tb):self.close(delete=exc_type is not None)def __repr__(self):return repr(self._f)auto_wrap_for_ansi = Nonecolorama = Noneget_winterm_size = Nonedef strip_ansi(value):return _ansi_re.sub('', value)def should_strip_ansi(stream=None, color=None):if color is None:if stream is None:stream = sys.stdinreturn not isatty(stream)return not color# If we're on Windows, we provide transparent integration through# colorama. This will make ANSI colors through the echo function# work automatically.if WIN:# Windows has a smaller terminalDEFAULT_COLUMNS = 79from ._winconsole import _get_windows_console_streamdef _get_argv_encoding():import localereturn locale.getpreferredencoding()if PY2:def raw_input(prompt=''):sys.stderr.flush()if prompt:stdout = _default_text_stdout()stdout.write(prompt)stdin = _default_text_stdin()return stdin.readline().rstrip('\r\n')try:import coloramaexcept ImportError:passelse:_ansi_stream_wrappers = WeakKeyDictionary()def auto_wrap_for_ansi(stream, color=None):"""This function wraps a stream so that calls through coloramaare issued to the win32 console API to recolor on demand. Italso ensures to reset the colors if a write call is interruptedto not destroy the console afterwards."""try:cached = _ansi_stream_wrappers.get(stream)except Exception:cached = Noneif cached is not None:return cachedstrip = should_strip_ansi(stream, color)ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip)rv = ansi_wrapper.stream_write = rv.writedef _safe_write(s):try:return _write(s)except:ansi_wrapper.reset_all()raiserv.write = _safe_writetry:_ansi_stream_wrappers[stream] = rvexcept Exception:passreturn rvdef get_winterm_size():win = colorama.win32.GetConsoleScreenBufferInfo(colorama.win32.STDOUT).srWindowreturn win.Right - win.Left, win.Bottom - win.Topelse:def _get_argv_encoding():return getattr(sys.stdin, 'encoding', None) or get_filesystem_encoding()_get_windows_console_stream = lambda *x: Nonedef term_len(x):return len(strip_ansi(x))def isatty(stream):try:return stream.isatty()except Exception:return Falsedef _make_cached_stream_func(src_func, wrapper_func):cache = WeakKeyDictionary()def func():stream = src_func()try:rv = cache.get(stream)except Exception:rv = Noneif rv is not None:return rvrv = wrapper_func()try:cache[stream] = rvexcept Exception:passreturn rvreturn func_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin)_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout)_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr)binary_streams = {'stdin': get_binary_stdin,'stdout': get_binary_stdout,'stderr': get_binary_stderr,}text_streams = {'stdin': get_text_stdin,'stdout': get_text_stdout,'stderr': get_text_stderr,}
import osimport refrom .utils import echofrom .parser import split_arg_stringfrom .core import MultiCommand, OptionCOMPLETION_SCRIPT = '''%(complete_func)s() {COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\COMP_CWORD=$COMP_CWORD \\%(autocomplete_var)s=complete $1 ) )return 0}complete -F %(complete_func)s -o default %(script_names)s'''_invalid_ident_char_re = re.compile(r'[^a-zA-Z0-9_]')def get_completion_script(prog_name, complete_var):cf_name = _invalid_ident_char_re.sub('', prog_name.replace('-', '_'))return (COMPLETION_SCRIPT % {'complete_func': '_%s_completion' % cf_name,'script_names': prog_name,'autocomplete_var': complete_var,}).strip() + ';'def resolve_ctx(cli, prog_name, args):ctx = cli.make_context(prog_name, args, resilient_parsing=True)while ctx.protected_args + ctx.args and isinstance(ctx.command, MultiCommand):a = ctx.protected_args + ctx.argscmd = ctx.command.get_command(ctx, a[0])if cmd is None:return Nonectx = cmd.make_context(a[0], a[1:], parent=ctx, resilient_parsing=True)return ctxdef get_choices(cli, prog_name, args, incomplete):ctx = resolve_ctx(cli, prog_name, args)if ctx is None:returnchoices = []if incomplete and not incomplete[:1].isalnum():for param in ctx.command.params:if not isinstance(param, Option):continuechoices.extend(param.opts)choices.extend(param.secondary_opts)elif isinstance(ctx.command, MultiCommand):choices.extend(ctx.command.list_commands(ctx))for item in choices:if item.startswith(incomplete):yield itemdef do_complete(cli, prog_name):cwords = split_arg_string(os.environ['COMP_WORDS'])cword = int(os.environ['COMP_CWORD'])args = cwords[1:cword]try:incomplete = cwords[cword]except IndexError:incomplete = ''for item in get_choices(cli, prog_name, args, incomplete):echo(item)return Truedef bashcomplete(cli, prog_name, complete_var, complete_instr):if complete_instr == 'source':echo(get_completion_script(prog_name, complete_var))return Trueelif complete_instr == 'complete':return do_complete(cli, prog_name)return False
# -*- coding: utf-8 -*-"""click~~~~~Click is a simple Python module that wraps the stdlib's optparse to makewriting command line scripts fun. Unlike other modules, it's based arounda simple API that does not come with too much magic and is composable.In case optparse ever gets removed from the stdlib, it will be shipped bythis module.:copyright: (c) 2014 by Armin Ronacher.:license: BSD, see LICENSE for more details."""# Core classesfrom .core import Context, BaseCommand, Command, MultiCommand, Group, \CommandCollection, Parameter, Option, Argument# Globalsfrom .globals import get_current_context# Decoratorsfrom .decorators import pass_context, pass_obj, make_pass_decorator, \command, group, argument, option, confirmation_option, \password_option, version_option, help_option# Typesfrom .types import ParamType, File, Path, Choice, IntRange, Tuple, \STRING, INT, FLOAT, BOOL, UUID, UNPROCESSED# Utilitiesfrom .utils import echo, get_binary_stream, get_text_stream, open_file, \format_filename, get_app_dir, get_os_args# Terminal functionsfrom .termui import prompt, confirm, get_terminal_size, echo_via_pager, \progressbar, clear, style, unstyle, secho, edit, launch, getchar, \pause# Exceptionsfrom .exceptions import ClickException, UsageError, BadParameter, \FileError, Abort, NoSuchOption, BadOptionUsage, BadArgumentUsage, \MissingParameter# Formattingfrom .formatting import HelpFormatter, wrap_text# Parsingfrom .parser import OptionParser__all__ = [# Core classes'Context', 'BaseCommand', 'Command', 'MultiCommand', 'Group','CommandCollection', 'Parameter', 'Option', 'Argument',# Globals'get_current_context',# Decorators'pass_context', 'pass_obj', 'make_pass_decorator', 'command', 'group','argument', 'option', 'confirmation_option', 'password_option','version_option', 'help_option',# Types'ParamType', 'File', 'Path', 'Choice', 'IntRange', 'Tuple', 'STRING','INT', 'FLOAT', 'BOOL', 'UUID', 'UNPROCESSED',# Utilities'echo', 'get_binary_stream', 'get_text_stream', 'open_file','format_filename', 'get_app_dir', 'get_os_args',# Terminal functions'prompt', 'confirm', 'get_terminal_size', 'echo_via_pager','progressbar', 'clear', 'style', 'unstyle', 'secho', 'edit', 'launch','getchar', 'pause',# Exceptions'ClickException', 'UsageError', 'BadParameter', 'FileError','Abort', 'NoSuchOption', 'BadOptionUsage', 'BadArgumentUsage','MissingParameter',# Formatting'HelpFormatter', 'wrap_text',# Parsing'OptionParser',]# Controls if click should emit the warning about the use of unicode# literals.disable_unicode_literals_warning = False__version__ = '6.7'
/Users/ccummings/.pyenv/versions/2.7.13
#!/Users/ccummings/eventbrite_github/triage_projects/csv_validator/pysrc/venv/bin/python2.7# -*- coding: utf-8 -*-import reimport sysfrom wheel.tool import mainif __name__ == '__main__':sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])sys.exit(main())
#!/Users/ccummings/eventbrite_github/triage_projects/csv_validator/pysrc/venv/bin/pythonimport sysimport getoptimport sysconfigvalid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags','ldflags', 'help']if sys.version_info >= (3, 2):valid_opts.insert(-1, 'extension-suffix')valid_opts.append('abiflags')if sys.version_info >= (3, 3):valid_opts.append('configdir')def exit_with_usage(code=1):sys.stderr.write("Usage: {0} [{1}]\n".format(sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))sys.exit(code)try:opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)except getopt.error:exit_with_usage()if not opts:exit_with_usage()pyver = sysconfig.get_config_var('VERSION')getvar = sysconfig.get_config_varopt_flags = [flag for (flag, val) in opts]if '--help' in opt_flags:exit_with_usage(code=0)for opt in opt_flags:if opt == '--prefix':print(sysconfig.get_config_var('prefix'))elif opt == '--exec-prefix':print(sysconfig.get_config_var('exec_prefix'))elif opt in ('--includes', '--cflags'):flags = ['-I' + sysconfig.get_path('include'),'-I' + sysconfig.get_path('platinclude')]if opt == '--cflags':flags.extend(getvar('CFLAGS').split())print(' '.join(flags))elif opt in ('--libs', '--ldflags'):abiflags = getattr(sys, 'abiflags', '')libs = ['-lpython' + pyver + abiflags]libs += getvar('LIBS').split()libs += getvar('SYSLIBS').split()# add the prefix/lib/pythonX.Y/config dir, but only if there is no# shared library in prefix/lib/.if opt == '--ldflags':if not getvar('Py_ENABLE_SHARED'):libs.insert(0, '-L' + getvar('LIBPL'))if not getvar('PYTHONFRAMEWORK'):libs.extend(getvar('LINKFORSHARED').split())print(' '.join(libs))elif opt == '--extension-suffix':ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')if ext_suffix is None:ext_suffix = sysconfig.get_config_var('SO')print(ext_suffix)elif opt == '--abiflags':if not getattr(sys, 'abiflags', None):exit_with_usage()print(sys.abiflags)elif opt == '--configdir':print(sysconfig.get_config_var('LIBPL'))
#!/Users/ccummings/eventbrite_github/triage_projects/csv_validator/pysrc/venv/bin/python2.7# -*- coding: utf-8 -*-import reimport sysfrom pip import mainif __name__ == '__main__':sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])sys.exit(main())
#!/Users/ccummings/eventbrite_github/triage_projects/csv_validator/pysrc/venv/bin/python2.7# -*- coding: utf-8 -*-import reimport sysfrom pip import mainif __name__ == '__main__':sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])sys.exit(main())
#!/Users/ccummings/eventbrite_github/triage_projects/csv_validator/pysrc/venv/bin/python2.7# -*- coding: utf-8 -*-import reimport sysfrom pip import mainif __name__ == '__main__':sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])sys.exit(main())
#!/Users/ccummings/eventbrite_github/triage_projects/csv_validator/pysrc/venv/bin/python2.7# -*- coding: utf-8 -*-import reimport sysfrom setuptools.command.easy_install import mainif __name__ == '__main__':sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])sys.exit(main())
#!/Users/ccummings/eventbrite_github/triage_projects/csv_validator/pysrc/venv/bin/python2.7# -*- coding: utf-8 -*-import reimport sysfrom setuptools.command.easy_install import mainif __name__ == '__main__':sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])sys.exit(main())
#!/Users/ccummings/eventbrite_github/triage_projects/csv_validator/pysrc/venv/bin/python2.7# EASY-INSTALL-ENTRY-SCRIPT: 'csvcheck','console_scripts','csvcheck'__requires__ = 'csvcheck'import reimport sysfrom pkg_resources import load_entry_pointif __name__ == '__main__':sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])sys.exit(load_entry_point('csvcheck', 'console_scripts', 'csvcheck')())
"""By using execfile(this_file, dict(__file__=this_file)) you willactivate this virtualenv environment.This can be used when you must use an existing Python interpreter, notthe virtualenv bin/python"""try:__file__except NameError:raise AssertionError("You must run this like execfile('path/to/activate_this.py', dict(__file__='path/to/activate_this.py'))")import sysimport osold_os_path = os.environ.get('PATH', '')os.environ['PATH'] = os.path.dirname(os.path.abspath(__file__)) + os.pathsep + old_os_pathbase = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))if sys.platform == 'win32':site_packages = os.path.join(base, 'Lib', 'site-packages')else:site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages')prev_sys_path = list(sys.path)import sitesite.addsitedir(site_packages)sys.real_prefix = sys.prefixsys.prefix = base# Move the added items to the front of the path:new_sys_path = []for item in list(sys.path):if item not in prev_sys_path:new_sys_path.append(item)sys.path.remove(item)sys.path[:0] = new_sys_path
# This file must be used using `. bin/activate.fish` *within a running fish ( http://fishshell.com ) session*.# Do not run it directly.function deactivate -d 'Exit virtualenv mode and return to the normal environment.'# reset old environment variablesif test -n "$_OLD_VIRTUAL_PATH"set -gx PATH $_OLD_VIRTUAL_PATHset -e _OLD_VIRTUAL_PATHendif test -n "$_OLD_VIRTUAL_PYTHONHOME"set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOMEset -e _OLD_VIRTUAL_PYTHONHOMEendif test -n "$_OLD_FISH_PROMPT_OVERRIDE"# Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`.set -l fish_function_path# Erase virtualenv's `fish_prompt` and restore the original.functions -e fish_promptfunctions -c _old_fish_prompt fish_promptfunctions -e _old_fish_promptset -e _OLD_FISH_PROMPT_OVERRIDEendset -e VIRTUAL_ENVif test "$argv[1]" != 'nondestructive'# Self-destruct!functions -e pydocfunctions -e deactivateendend# Unset irrelevant variables.deactivate nondestructiveset -gx VIRTUAL_ENV "/Users/ccummings/eventbrite_github/triage_projects/csv_validator/pysrc/venv"set -gx _OLD_VIRTUAL_PATH $PATHset -gx PATH "$VIRTUAL_ENV/bin" $PATH# Unset `$PYTHONHOME` if set.if set -q PYTHONHOMEset -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOMEset -e PYTHONHOMEendfunction pydocpython -m pydoc $argvendif test -z "$VIRTUAL_ENV_DISABLE_PROMPT"# Copy the current `fish_prompt` function as `_old_fish_prompt`.functions -c fish_prompt _old_fish_promptfunction fish_prompt# Save the current $status, for fish_prompts that display it.set -l old_status $status# Prompt override provided?# If not, just prepend the environment name.if test -n ""printf '%s%s' "" (set_color normal)elseprintf '%s(%s) ' (set_color normal) (basename "$VIRTUAL_ENV")end# Restore the original $statusecho "exit $old_status" | source_old_fish_promptendset -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"end
# This file must be used with "source bin/activate.csh" *from csh*.# You cannot run it directly.# Created by Davide Di Blasi <davidedb@gmail.com>.alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc'# Unset irrelevant variables.deactivate nondestructivesetenv VIRTUAL_ENV "/Users/ccummings/eventbrite_github/triage_projects/csv_validator/pysrc/venv"set _OLD_VIRTUAL_PATH="$PATH"setenv PATH "$VIRTUAL_ENV/bin:$PATH"if ("" != "") thenset env_name = ""elseset env_name = `basename "$VIRTUAL_ENV"`endif# Could be in a non-interactive environment,# in which case, $prompt is undefined and we wouldn't# care about the prompt anyway.if ( $?prompt ) thenset _OLD_VIRTUAL_PROMPT="$prompt"set prompt = "[$env_name] $prompt"endifunset env_namealias pydoc python -m pydocrehash
# This file must be used with "source bin/activate" *from bash*# you cannot run it directlydeactivate () {unset -f pydoc >/dev/null 2>&1# reset old environment variables# ! [ -z ${VAR+_} ] returns true if VAR is declared at allif ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; thenPATH="$_OLD_VIRTUAL_PATH"export PATHunset _OLD_VIRTUAL_PATHfiif ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; thenPYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"export PYTHONHOMEunset _OLD_VIRTUAL_PYTHONHOMEfi# This should detect bash and zsh, which have a hash command that must# be called to get it to forget past commands. Without forgetting# past commands the $PATH changes we made may not be respectedif [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; thenhash -r 2>/dev/nullfiif ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; thenPS1="$_OLD_VIRTUAL_PS1"export PS1unset _OLD_VIRTUAL_PS1fiunset VIRTUAL_ENVif [ ! "${1-}" = "nondestructive" ] ; then# Self destruct!unset -f deactivatefi}# unset irrelevant variablesdeactivate nondestructiveVIRTUAL_ENV="/Users/ccummings/eventbrite_github/triage_projects/csv_validator/pysrc/venv"export VIRTUAL_ENV_OLD_VIRTUAL_PATH="$PATH"PATH="$VIRTUAL_ENV/bin:$PATH"export PATH# unset PYTHONHOME if setif ! [ -z "${PYTHONHOME+_}" ] ; then_OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"unset PYTHONHOMEfiif [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then_OLD_VIRTUAL_PS1="$PS1"if [ "x" != x ] ; thenPS1="$PS1"elsePS1="(`basename \"$VIRTUAL_ENV\"`) $PS1"fiexport PS1fi# Make sure to unalias pydoc if it's already therealias pydoc 2>/dev/null >/dev/null && unalias pydocpydoc () {python -m pydoc "$@"}# This should detect bash and zsh, which have a hash command that must# be called to get it to forget past commands. Without forgetting# past commands the $PATH changes we made may not be respectedif [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; thenhash -r 2>/dev/nullfi
from setuptools import setupsetup(name='csvcheck',version='0.0.1',py_modules=['main', 'lib'],install_requires=['Click', 'six'],entry_points='''[console_scripts]csvcheck=main:run''')
from traceback import format_excimport clickfrom lib import Validator, RuleDoesNotExist, RuleCannotBeParsed, FileNewLineError@click.group(invoke_without_command=True)@click.option('--rules', type=click.Path(exists=True), default='rules.json')@click.option('--rules-string', type=click.STRING)@click.argument('csv', type=click.File('r'))@click.argument('rule_name', type=click.STRING, required=False)@click.pass_contextdef run(context, csv, rule_name, rules, rules_string):'''csvcheck accepts a path to a csv file and will validate it against rules'''if rules_string:rules = rules_stringcontext.obj = {'file': csv,'validator': get_validator(csv, rules, rule_name)}if not context.invoked_subcommand:errors = pretty_errors(context.obj['validator'].errors,context.obj['validator'].csv_data)if not errors:click.echo('there are no errors in {}'.format(csv.name))else:print_csv_errors(errors)@run.command()@click.option('-l', '--line-endings/--no-line-endings', default=False)@click.option('-e', '--remove-errors/--no-remove-errors', default=False)@click.pass_contextdef fix(context, line_endings, remove_errors):handle_fix(line_endings, remove_errors)@click.pass_contextdef handle_fix(context, line_endings, remove_errors):fixed_file_name = ''if line_endings:fixed_file_name = fix_line_endings(context.obj['file'])elif remove_errors:csv_data = context.obj['validator'].csv_dataerrors = context.obj['validator'].errorserr_explaination = '''CONTENTS OF LINE: {line}INVALID CHARACTERS: {invalid}'''initial_msg = '''There are {count} errors in {file_name}.The first error is:{first_err}'''.format(count=len(errors),file_name=context.obj['file'].name,first_err=err_explaination.format(line=csv_data[errors[0]['line_num']],invalid=errors[0]['errors']))strategy_prompt = '''Choose a strategy:delete - remove the line completelyfix - remove the invalid characters, but keep the valid onesskip - skip this line'''invalid_input_msg = '"{}" is not a valid strategy. Please choose from above.'strategy = Noneblock_on_each_iteration = TrueREMOVE_LINE = 1STRIP_ERRORS = 2SKIP_LINE = 3allowed_inputs = {'d': REMOVE_LINE,'delete': REMOVE_LINE,'f': STRIP_ERRORS,'fix': STRIP_ERRORS,'s': SKIP_LINE,'skip': SKIP_LINE}click.echo(initial_msg)for index, err in enumerate(errors):line_contents = csv_data[err['line_num']]if block_on_each_iteration:if index != 0:click.echo(err_explaination.format(line=line_contents, invalid=err['errors']))click.echo(strategy_prompt)strategy = get_user_input(allowed_map=allowed_inputs, invalid_msg=invalid_input_msg)if index == 0:block_on_each_iteration = not click.confirm('Do you want to do this for the rest of the errors?')if strategy == STRIP_ERRORS:csv_data[err['line_num']] = fix_line(line_contents, err, strip=True)elif strategy == REMOVE_LINE:csv_data[err['line_num']] = fix_line(line_contents, err)click.echo('--------------------')fixed_file_name = 'fixed-{}'.format(context.obj['file'].name)with open(fixed_file_name, 'w') as f:new_csv_data = ''.join('{}\n'.format(l) for l in csv_dataif len(l) > 0)f.write(new_csv_data)click.echo('{} saved to current directory'.format(fixed_file_name))def fix_line(line_contents, error, strip=False):'''returns the line without invalid characters or an empty string if strategy == "line"'''result = ''if strip:errors = error['errors']result = ''.join(c for c in line_contents if c not in errors)return resultdef get_validator(csv, rules, rule_name):csv_data = csv.readlines()try:return Validator(csv_data, rules, rule_name=rule_name)except FileNewLineError:handle_file_new_line_error(promt_fix=True)except RuleDoesNotExist:handle_rule_does_not_exist_error(rule_name)except RuleCannotBeParsed:handle_rule_cannot_be_parsed_error()except Exception:print_prog_error('There was a fatal error, the file was not processed.',tb=format_exc())def handle_file_new_line_error(context, prompt_fix=False):err_message = 'CSV file has non-standard line endings and cannot be parsed as csv.'suggestion = 'Use the fix command to fix line endings.'example = 'csvcheck /path/to/file.csv rule_name fix -l'click.echo(err_message, err=True, color='red')if prompt_fix:if click.confirm('Save new file with fixed line endings to the current directory?'):handle_fix(True, False)else:click.echo(suggestion, err=True, color='yellow')click.echo(example, err=True)raise click.BadParameter('csv file contains malformed line ending characters')def handle_rule_does_not_exist_error(rule_name):err_message = 'Rule "{}" does not exist in rules json.'.format(rule_name)raise click.BadArgumentUsage(err_message)def handle_rule_cannot_be_parsed_error():raise click.BadOptionUsage('invalid json passed into --rules-string')def pretty_errors(errors, csv_data):'''returns a list of end-user friendly formatted strings describing the errors '''err_lines = []for error in errors:line_number = error['line_num']column_number = error['col_num']contents = csv_data[line_number]err_lines.append('L{line}:{col} ({errors}) --> "{contents}"'.format(line=line_number + 1,col='' if column_number == 0 else 'C{}:'.format(column_number),contents=contents,errors=error['errors']))return err_linesdef fix_line_endings(csv_file, out_path=None):'''accept file object, remove bad line endings, write to a file of the samebasename with fixed- as a prefixoptionsout_path: write file to a specific path instead of the same path as csvwindows line endings can have a carriage return in additionto the \n char - csv has trouble parsing these line endingsso just remove them here instead of opening the file inuniversal line endings mode since it's a big reason uploads fail'''contents = ''.join(line.replace('\r', '\n') for line in csv_file.readlines())out_file_name = 'fixed-{}'.format(csv_file.name)with open(out_file_name, mode='w') as out_file:out_file.write(contents)return out_file_namedef get_user_input(prompt_str='>>> ',allowed_map=None,invalid_msg='{} is invalid input please try again.'):choice = Nonewhile not choice:user_input = click.prompt(prompt_str, prompt_suffix='').lower().strip()choice = allowed_map.get(user_input) if allowed_map else user_inputif not choice:formatted = invalid_msg.format(choice) if '{}' in invalid_msg else invalid_msgclick.echo(formatted)return choicedef print_csv_errors(errors):for error in errors:click.echo(error)def print_prog_error(msg, tb=None):click.echo(msg, err=True)if tb:for line in tb:click.echo(line)
from __future__ import division, absolute_import, print_functionfrom collections import OrderedDictimport csvimport jsonimport osimport reimport sixclass FileNewLineError(Exception):passclass RuleDoesNotExist(Exception):passclass RuleCannotBeParsed(Exception):passclass RuleField():def __init__(self, field):# TODO set these attributes in stone and make this explicitfor k, v in six.iteritems(field):setattr(self, k, v)if not self.caseSensitive:self.pattern = self.pattern.lower()def errors(self, check):return [e for e in self.errors_iter(check)]def errors_iter(self, check):if self.type in ('whitelist', 'blacklist'):check = check if self.caseSensitive else check.lower()return self._iter_chars(check)elif self.type in ('regex', 'regular expression'):return self._iter_regex(check)def is_valid(self, check):'''returns true on the first error encountered'''return not any(self.errors_iter(check))def _iter_chars(self, check):for c in check:matches = c in self.patternif self.type == 'whitelist' and not matches:yield celif self.type == 'blacklist' and matches:yield cdef _iter_regex(self, check):flags = 0if not self.caseSensitive:flags = re.IGNORECASEpattern = re.compile(self.pattern, flags)does_not_match = pattern.sub('', check)if does_not_match != '':yield does_not_matchdef __repr__(self):return 'RuleField({}) -> Pattern: {}'.format(self.name, self.pattern)class Rule():def __init__(self,rule_name=None,rules='rules.json',required_rule_fields=None):if required_rule_fields is None:required_rule_fields = set(['name', 'pattern', 'caseSensitive', 'maxLength', 'minLength','type'])self.name = rule_name or ''self._field_map = self.create_field_map(self.parse_json(rules), rule_name, required_rule_fields)def __getitem__(self, key):index = Nonetry:index = int(key)except ValueError:return self._field_map[key]keys = self._field_map.keys()return self._field_map[keys[index]]def __iter__(self):for rule_field in six.itervalues(self._field_map):yield rule_fielddef __len__(self):return len(self._field_map)def __repr__(self):return '<Rule({}) -> fields: {}>'.format(self.name,self._field_map.keys())@staticmethoddef create_field_map(rules_object, rule_name, required_rule_fields):field_map = OrderedDict()field_list = rules_objecttry:field_list = rules_object[rule_name]['fields']except KeyError:raise RuleDoesNotExist()except AttributeError:# assume a single rule - object is a list of the fieldspassfor field in field_list:# assure each field has everything needed to validateif set(field.keys()) < required_rule_fields:raise ValueError('Rules must contain these keys for validation: %s' %required_rule_fields)field_map[field['name']] = RuleField(field)return field_map@staticmethoddef parse_json(rules):'''returns parsed json from string or file'''if os.path.exists(rules):with open(rules) as f:rules = f.read()try:return json.loads(rules)except ValueError:raise RuleCannotBeParsed()class Validator():'''Validates the contents of a csv file against a set of rules. The rules should be in json format andcan be read in a string or file.PARAMS:csv_file_path: STRING a filepath to a csv file to validaterules: (Optional) STRING a string or filepath to json data that contain rules. see rules.json for formattingrule_name: (Optional) STRING select a rule from the rules json to use for validationfix_line_endings: (Optional) BOOL write a fixed version of the file if it contains non-unix line endingsNOTES:arbitrary key word arguments are passed to the csv readerUSAGE:Validator('~/Desktop/discounts.csv').errorsv = Validator('~/Desktop/discounts.csv', rule_name='Event Upload', rules='~/Desktop/my_rules.json')errs = v.errorsfor err in errs:print(err)print(v.line(err['line_num']))'''def __init__(self, csv_data, rules, rule_name=None, **kwargs):self.rule = Rule(rule_name=rule_name, rules=rules)self.csv_data = self._clean_lines(csv_data)self.csv_iter = csv.reader(self.csv_data, **kwargs)self.errors = self._all_errors()def line(self, num):'''returns the contents of the line at the given index (not 0 indexed) and error inormation for that line'''num = num - 1line = self.csv_data[num]errors = filter(lambda e: e.get('line_num') == num, self.errors)return (line, errors)def _all_errors(self):'''iterates through all of the csv data and computes the errors, returns a list dict objects containingerror information'''return [{'line_num': line_num,'col_num': col_num,'errors': errs} for line_num, col_num, errs in self._errors_iter()]def _errors_iter(self):'''lazily iterates through the csv data yielding errors for each column'''for line_num, row in enumerate(self.csv_iter):for col_num, (col, rule) in enumerate(zip(row, self.rule)):errors = rule.errors(col)if errors:yield line_num, col_num, errorsdef _clean_lines(self, csv_data):non_unix_line_ending_char = '\r'if non_unix_line_ending_char in csv_data:raise FileNewLineError('File has a mix of non-unix line endings which cannot be parsed as csv')else:# remove literal newline from each linereturn [l.replace('\n', '') for l in csv_data]def __repr__(self):return '<Validator(csv) Rule({}) -> {} Errors>'.format(self.rule.name, len(self.errors))
libmain
Clicksix
[console_scripts]csvcheck=main:run
lib.pymain.pycsvcheck.egg-info/PKG-INFOcsvcheck.egg-info/SOURCES.txtcsvcheck.egg-info/dependency_links.txtcsvcheck.egg-info/entry_points.txtcsvcheck.egg-info/requires.txtcsvcheck.egg-info/top_level.txt
Metadata-Version: 1.0Name: csvcheckVersion: 0.0.1Summary: UNKNOWNHome-page: UNKNOWNAuthor: UNKNOWNAuthor-email: UNKNOWNLicense: UNKNOWNDescription-Content-Type: UNKNOWNDescription: UNKNOWNPlatform: UNKNOWN
[flake8]max-line-length = 120
{"discount": {"fields": [{"name": "code","pattern": "@.-_+!abcdefghijklmnopqrstuvwxyz1234567890","caseSensitive": false,"maxLength": 0,"minLength": 3,"type": "whitelist"}]},"regex-example-rule": {"fields": [{"name": "first","pattern": "\\w+","caseSensitive": false,"maxLength": 0,"minLength": 1,"type": "regex"},{"name": "last","pattern": "\\w+","caseSensitive": false,"maxLength": 0,"minLength": 1,"type": "regex"},{"name": "email","pattern": "[a-z]+@[a-z]+\\.[a-z]+","caseSensitive": false,"maxLength": 0,"minLength": 6,"type": "regex"}]}}
.DS_Store