From 591f73721a416e854c4b5efc3d613df7c7835eef Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Mon, 26 Jun 2017 17:05:03 -0700 Subject: [PATCH 01/23] Add typing_extensions subfolder This pull request adds a 'typing_exensions' subproject to 'typing'. The 'typing_extensions' module backports any new additions to 'typing' for Python 3.5+ users who are using older versions of 'typing' that were bundled with their standard library (and so can't update to the latest versions). See https://github.com/python/typing/issues/435 for motivation and additional context. --- typing_extensions/.gitignore | 11 + typing_extensions/README.md | 79 + typing_extensions/run_tests.py | 110 + typing_extensions/setup.py | 71 + .../src_py2/test_typing_extensions.py | 159 ++ .../src_py2/typing_extensions.py | 118 + .../src_py3/test_typing_extensions.py | 666 +++++ .../src_py3/typing_extensions.py | 618 +++++ .../test_data/python-2.7.13/_abcoll.py | 695 +++++ .../test_data/python-2.7.13/abc.py | 185 ++ .../test_data/python-2.7.13/collections.py | 742 ++++++ .../python-3.4.6/_collections_abc.py | 748 ++++++ .../test_data/python-3.4.6/abc.py | 248 ++ .../python-3.4.6/collections/__init__.py | 1161 ++++++++ .../python-3.4.6/collections/__main__.py | 38 + .../test_data/python-3.4.6/collections/abc.py | 2 + .../python-3.5.0/_collections_abc.py | 939 +++++++ .../test_data/python-3.5.0/abc.py | 248 ++ .../python-3.5.0/collections/__init__.py | 1217 +++++++++ .../python-3.5.0/collections/__main__.py | 38 + .../test_data/python-3.5.0/collections/abc.py | 2 + .../test_data/python-3.5.0/typing.py | 1648 ++++++++++++ .../python-3.5.1/_collections_abc.py | 939 +++++++ .../test_data/python-3.5.1/abc.py | 248 ++ .../python-3.5.1/collections/__init__.py | 1222 +++++++++ .../python-3.5.1/collections/__main__.py | 38 + .../test_data/python-3.5.1/collections/abc.py | 2 + .../test_data/python-3.5.1/typing.py | 1656 ++++++++++++ .../python-3.5.2/_collections_abc.py | 939 +++++++ .../test_data/python-3.5.2/abc.py | 248 ++ .../python-3.5.2/collections/__init__.py | 1239 +++++++++ .../python-3.5.2/collections/__main__.py | 38 + .../test_data/python-3.5.2/collections/abc.py | 2 + .../test_data/python-3.5.2/typing.py | 1843 +++++++++++++ .../python-3.5.3/_collections_abc.py | 941 +++++++ .../test_data/python-3.5.3/abc.py | 248 ++ .../python-3.5.3/collections/__init__.py | 1242 +++++++++ .../python-3.5.3/collections/__main__.py | 38 + .../test_data/python-3.5.3/collections/abc.py | 2 + .../test_data/python-3.5.3/typing.py | 2160 +++++++++++++++ .../python-3.6.0/_collections_abc.py | 1007 +++++++ .../test_data/python-3.6.0/abc.py | 248 ++ .../python-3.6.0/collections/__init__.py | 1243 +++++++++ .../test_data/python-3.6.0/collections/abc.py | 2 + .../test_data/python-3.6.0/typing.py | 2160 +++++++++++++++ .../python-3.6.1/_collections_abc.py | 1007 +++++++ .../test_data/python-3.6.1/abc.py | 248 ++ .../python-3.6.1/collections/__init__.py | 1246 +++++++++ .../test_data/python-3.6.1/collections/abc.py | 2 + .../test_data/python-3.6.1/typing.py | 2335 +++++++++++++++++ 50 files changed, 32286 insertions(+) create mode 100644 typing_extensions/.gitignore create mode 100644 typing_extensions/README.md create mode 100644 typing_extensions/run_tests.py create mode 100644 typing_extensions/setup.py create mode 100644 typing_extensions/src_py2/test_typing_extensions.py create mode 100644 typing_extensions/src_py2/typing_extensions.py create mode 100644 typing_extensions/src_py3/test_typing_extensions.py create mode 100644 typing_extensions/src_py3/typing_extensions.py create mode 100644 typing_extensions/test_data/python-2.7.13/_abcoll.py create mode 100644 typing_extensions/test_data/python-2.7.13/abc.py create mode 100644 typing_extensions/test_data/python-2.7.13/collections.py create mode 100644 typing_extensions/test_data/python-3.4.6/_collections_abc.py create mode 100644 typing_extensions/test_data/python-3.4.6/abc.py create mode 100644 typing_extensions/test_data/python-3.4.6/collections/__init__.py create mode 100644 typing_extensions/test_data/python-3.4.6/collections/__main__.py create mode 100644 typing_extensions/test_data/python-3.4.6/collections/abc.py create mode 100644 typing_extensions/test_data/python-3.5.0/_collections_abc.py create mode 100644 typing_extensions/test_data/python-3.5.0/abc.py create mode 100644 typing_extensions/test_data/python-3.5.0/collections/__init__.py create mode 100644 typing_extensions/test_data/python-3.5.0/collections/__main__.py create mode 100644 typing_extensions/test_data/python-3.5.0/collections/abc.py create mode 100644 typing_extensions/test_data/python-3.5.0/typing.py create mode 100644 typing_extensions/test_data/python-3.5.1/_collections_abc.py create mode 100644 typing_extensions/test_data/python-3.5.1/abc.py create mode 100644 typing_extensions/test_data/python-3.5.1/collections/__init__.py create mode 100644 typing_extensions/test_data/python-3.5.1/collections/__main__.py create mode 100644 typing_extensions/test_data/python-3.5.1/collections/abc.py create mode 100644 typing_extensions/test_data/python-3.5.1/typing.py create mode 100644 typing_extensions/test_data/python-3.5.2/_collections_abc.py create mode 100644 typing_extensions/test_data/python-3.5.2/abc.py create mode 100644 typing_extensions/test_data/python-3.5.2/collections/__init__.py create mode 100644 typing_extensions/test_data/python-3.5.2/collections/__main__.py create mode 100644 typing_extensions/test_data/python-3.5.2/collections/abc.py create mode 100644 typing_extensions/test_data/python-3.5.2/typing.py create mode 100644 typing_extensions/test_data/python-3.5.3/_collections_abc.py create mode 100644 typing_extensions/test_data/python-3.5.3/abc.py create mode 100644 typing_extensions/test_data/python-3.5.3/collections/__init__.py create mode 100644 typing_extensions/test_data/python-3.5.3/collections/__main__.py create mode 100644 typing_extensions/test_data/python-3.5.3/collections/abc.py create mode 100644 typing_extensions/test_data/python-3.5.3/typing.py create mode 100644 typing_extensions/test_data/python-3.6.0/_collections_abc.py create mode 100644 typing_extensions/test_data/python-3.6.0/abc.py create mode 100644 typing_extensions/test_data/python-3.6.0/collections/__init__.py create mode 100644 typing_extensions/test_data/python-3.6.0/collections/abc.py create mode 100644 typing_extensions/test_data/python-3.6.0/typing.py create mode 100644 typing_extensions/test_data/python-3.6.1/_collections_abc.py create mode 100644 typing_extensions/test_data/python-3.6.1/abc.py create mode 100644 typing_extensions/test_data/python-3.6.1/collections/__init__.py create mode 100644 typing_extensions/test_data/python-3.6.1/collections/abc.py create mode 100644 typing_extensions/test_data/python-3.6.1/typing.py diff --git a/typing_extensions/.gitignore b/typing_extensions/.gitignore new file mode 100644 index 00000000..46b620c1 --- /dev/null +++ b/typing_extensions/.gitignore @@ -0,0 +1,11 @@ +MANIFEST +build/ +dist/ +.tox/ +.idea/ +.cache/ +__pycache__/ +.mypy_cache/ +tmp/ +*.swp +*.pyc diff --git a/typing_extensions/README.md b/typing_extensions/README.md new file mode 100644 index 00000000..77c937f2 --- /dev/null +++ b/typing_extensions/README.md @@ -0,0 +1,79 @@ +# Typing Extensions + +The `typing_extensions` module contains backports of recent changes +to the `typing` module that were not present in older versions of +`typing`. + +This module is intended to be used mainly by people who are using +Python 3.5+, where the `typing` module is a part of the standard +library and cannot be updated to the latest version on PyPi. + +Users of other Python versions should continue to install and use +use the `typing` module from PyPi instead of using this one, unless +they are specifically writing code intended to be compatible with +multiple versions of Python. + +## Backported items + +This module contains the following backported items: + +### All Python versions: + +- `ClassVar` +- `Collection` +- `ContextManager` +- `Counter` +- `DefaultDict` +- `Deque` +- `NewType` +- `NoReturn` +- `overload` (note that older versions of `typing` only let you use `overload` in stubs) +- `Text` +- `Type` +- `TYPE_CHECKING` + +### Python 3.3+ only: + +- `ChainMap` + +### Python 3.5+ only: + +- `AsyncIterable` +- `AsyncIterator` +- `AsyncContextManager` +- `Awaitable` +- `Coroutine` + +### Python 3.6+ only: + +- `AsyncGenerator` + +## Other Notes and Limitations + +There are a few types who's interface was modified between different +versions of typing. For example, `typing.Sequence` was modified to +subclass `typing.Reversible` as of Python 3.5.3. + +These changes are _not_ backported to prevent subtle compatibility +issues when mixing the differing implementations of modified classes. + +## Running tests + +There are two different ways to test this module. The first is to simply run +each individual Python interpreter against `test_typing_extensions.py` in the +`src_py2` and `src_py3` folders. + +However, because multiple versions of Python for each individual release +can be onerous, you can instead run `run_tests.py` using a single Python +interpreter. The `run_tests.py` file will essentially "modify" the standard +library by changing `PYTHONPATH` to point to individual folders in the +`test_data` repo. + +Each individual folder contains a snapshot of the source code for the +`collections`, `typing,` and `abc` modules for that given release, letting us +test `typing` against those particular implementations. + +`run_tests.py` will assume that you have Python 3.6.1 and a reasonably +modern version of Python 2.7 installed on your system, aliased to +`py -2.7` and `py -3.6` on Windows, and `python` and `python3` on Linux and +Mac. diff --git a/typing_extensions/run_tests.py b/typing_extensions/run_tests.py new file mode 100644 index 00000000..b051f1ef --- /dev/null +++ b/typing_extensions/run_tests.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python + +from typing import List, Iterator, Tuple +from contextlib import contextmanager +import glob +import os +import os.path +import shutil +import subprocess +import sys +import textwrap + +CORE_FILES_2 = [ + "./src_py2/typing_extensions.py", + "./src_py2/test_typing_extensions.py" +] +CORE_FILES_3 = [ + "./src_py3/typing_extensions.py", + "./src_py3/test_typing_extensions.py" +] +TEST_DIR = "test_data" + +if sys.platform.startswith('win32'): + PYTHON2 = "py -2.7" + PYTHON3 = "py -3.6" +else: + PYTHON2 = "python" + PYTHON3 = "python3" + + +def get_test_dirs() -> List[str]: + """Get all folders to test inside TEST_DIR.""" + return list(glob.glob(os.path.join(TEST_DIR, "*"))) + + +@contextmanager +def temp_copy(src_files: List[str], dest_dir: str) -> Iterator[None]: + """ + A context manager that temporarily copies the given files to the + given destination directory, and deletes those temp files upon + exiting. + """ + # Copy + for src_path in src_files: + shutil.copy(src_path, dest_dir) + + yield + + # Delete + for src_path in src_files: + dst_path = os.path.join(dest_dir, os.path.basename(src_path)) + os.remove(dst_path) + + +@contextmanager +def change_directory(dir_path: str) -> Iterator[None]: + """ + A context manager that temporarily changes the working directory + to the specified directory, and changes back to the original + upon exiting. + """ + original = os.getcwd() + os.chdir(dir_path) + + yield + + os.chdir(original) + + +def run_shell(command: str) -> Tuple[bool, str]: + env = os.environ.copy() + env["PYTHONPATH"] = ":".join([os.getcwd(), env["PYTHONPATH"], env["PATH"]]) + out = subprocess.run( + command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + shell=True, + env=env) + success = out.returncode == 0 + stdout = '' if out.stdout is None else out.stdout.decode('utf-8') + return (success, stdout) + + +def main() -> int: + test_dirs = get_test_dirs() + exit_code = 0 + for test_dir in test_dirs: + _, version_number = test_dir.split('-') + py2 = version_number.startswith("2") + print("Testing Python {}".format(version_number)) + + core_files = CORE_FILES_2 if py2 else CORE_FILES_3 + python_exe = PYTHON2 if py2 else PYTHON3 + + with temp_copy(core_files, test_dir), change_directory(test_dir): + success, output = run_shell("{} {} {}".format( + python_exe, + "test_typing_extensions.py", + version_number)) + if success: + print(" All tests passed!") + else: + print(textwrap.indent(output, " ")) + exit_code = 1 + return exit_code + + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/typing_extensions/setup.py b/typing_extensions/setup.py new file mode 100644 index 00000000..afd66262 --- /dev/null +++ b/typing_extensions/setup.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python +# coding: utf-8 + +import sys +from distutils.core import setup + +if sys.version_info < (2, 7, 0) or (3, 0, 0) <= sys.version_info < (3, 3, 0): + sys.stderr.write('ERROR: You need Python 2.7 or 3.3+ ' + 'to install the typing package.\n') + exit(1) + +version = '3.6.1' +description = 'Type Hint backports for Python 3.5+' +long_description = '''\ +Typing -- Type Hints for Python + +This is a backport of the standard library typing module to Python +versions 3.5+. The typing module has seen several changes since it was +first added in Python 3.5.0, which means people who are using 3.5.0+ +but are unable to upgrade to the latest version of Python are unable +to take advantage of some new features of the typing library, such as +typing.Type or typing.Coroutine. + +This module allows those users to use the latest additions to the typing +module without worrying about naming conflicts with the standard library. +Users of Python 2.7, 3.3, and 3.4 should install the typing module +from pypi and use that directly, except when writing code that needs to +be compatible across multiple versions of Python. +''' + +classifiers = [ + 'Development Status :: 5 - Production/Stable', + 'Environment :: Console', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Python Software Foundation License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Topic :: Software Development', +] + +if sys.version_info.major == 2: + package_dir = 'src_py2' +elif sys.version_info.major == 3: + package_dir = 'src_py3' +else: + raise AssertionError() + +install_requires = [] +if sys.version_info < (3, 5): + install_requires.append('typing >= 3.6.1') + +setup(name='typing_extensions', + version=version, + description=description, + long_description=long_description, + author='Guido van Rossum, Jukka Lehtosalo, Lukasz Langa, Michael Lee', + author_email='jukka.lehtosalo@iki.fi', + # TODO: Change URL + url='https://github.com/michael0x2a/typing_extensions', + license='PSF', + keywords='typing function annotations type hints hinting checking ' + 'checker typehints typehinting typechecking backport', + package_dir={'': package_dir}, + py_modules=['typing_extensions'], + classifiers=classifiers, + install_requires=install_requires, +) diff --git a/typing_extensions/src_py2/test_typing_extensions.py b/typing_extensions/src_py2/test_typing_extensions.py new file mode 100644 index 00000000..2b86cb28 --- /dev/null +++ b/typing_extensions/src_py2/test_typing_extensions.py @@ -0,0 +1,159 @@ +# Override version info +import sys +ORIGINAL_VERSION = sys.version_info +if len(sys.argv) >= 2: + PYTHON_VERSION = tuple(map(int, sys.argv[1].split('.'))) + sys.version_info = PYTHON_VERSION + OVERRIDING_VERSION = True +else: + PYTHON_VERSION = ORIGINAL_VERSION + OVERRIDING_VERSION = False + +import os +import abc +import contextlib +import collections +from unittest import TestCase, main, skipUnless, SkipTest + +from typing_extensions import NoReturn, ClassVar, Type, NewType +import typing +import typing_extensions + + +class BaseTestCase(TestCase): + def assertIsSubclass(self, cls, class_or_tuple, msg=None): + if not issubclass(cls, class_or_tuple): + message = '%r is not a subclass of %r' % (cls, class_or_tuple) + if msg is not None: + message += ' : %s' % msg + raise self.failureException(message) + + def assertNotIsSubclass(self, cls, class_or_tuple, msg=None): + if issubclass(cls, class_or_tuple): + message = '%r is a subclass of %r' % (cls, class_or_tuple) + if msg is not None: + message += ' : %s' % msg + raise self.failureException(message) + + def clear_caches(self): + for f in typing._cleanups: + f() + + +class EnvironmentTest(BaseTestCase): + @skipUnless(OVERRIDING_VERSION, "Environment tests apply only when overriding") + def test_environment_is_ok(self): + cwd = os.path.abspath(os.getcwd()) + def correct_dir(module): + return os.path.abspath(module.__file__).startswith(cwd) + + self.assertTrue(correct_dir(abc)) + self.assertTrue(correct_dir(collections)) + self.assertTrue(correct_dir(typing_extensions)) + + def test_python_version_is_ok(self): + self.assertTrue(sys.version_info == PYTHON_VERSION) + self.assertTrue(ORIGINAL_VERSION[0] == 2) + + +class Employee: + pass + + +class NoReturnTests(BaseTestCase): + + def test_noreturn_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance(42, NoReturn) + + def test_noreturn_subclass_type_error(self): + with self.assertRaises(TypeError): + issubclass(Employee, NoReturn) + with self.assertRaises(TypeError): + issubclass(NoReturn, Employee) + + def test_repr(self): + if hasattr(typing, 'NoReturn'): + self.assertEqual(repr(NoReturn), 'typing.NoReturn') + else: + self.assertEqual(repr(NoReturn), 'typing_extensions.NoReturn') + + def test_not_generic(self): + with self.assertRaises(TypeError): + NoReturn[int] + + def test_cannot_subclass(self): + with self.assertRaises(TypeError): + class A(NoReturn): + pass + with self.assertRaises(TypeError): + class A(type(NoReturn)): + pass + + def test_cannot_instantiate(self): + with self.assertRaises(TypeError): + NoReturn() + with self.assertRaises(TypeError): + type(NoReturn)() + + +class CollectionsAbcTests(BaseTestCase): + + def test_collection(self): + self.assertIsInstance(tuple(), typing_extensions.Collection) + self.assertIsInstance(frozenset(), typing_extensions.Collection) + self.assertIsSubclass(dict, typing_extensions.Collection) + self.assertNotIsInstance(42, typing_extensions.Collection) + + def test_collection_instantiation(self): + class MyAbstractCollection(typing_extensions.Collection[int]): + pass + class MyCollection(typing_extensions.Collection[int]): + def __contains__(self, item): pass + def __iter__(self): pass + def __len__(self): pass + + self.assertIsSubclass( + type(MyCollection()), + typing_extensions.Collection) + self.assertIsSubclass( + MyCollection, + typing_extensions.Collection) + with self.assertRaises(TypeError): + MyAbstractCollection() + + def test_contextmanager(self): + @contextlib.contextmanager + def manager(): + yield 42 + + cm = manager() + self.assertIsInstance(cm, typing_extensions.ContextManager) + self.assertNotIsInstance(42, typing_extensions.ContextManager) + + +class AllTests(BaseTestCase): + def test_typing_extensions_includes_standard(self): + a = typing_extensions.__all__ + self.assertIn('ClassVar', a) + self.assertIn('Type', a) + self.assertIn('Counter', a) + self.assertIn('DefaultDict', a) + self.assertIn('Deque', a) + self.assertIn('NewType', a) + self.assertIn('overload', a) + self.assertIn('Text', a) + self.assertIn('TYPE_CHECKING', a) + + self.assertIn('Collection', a) + + def test_typing_extensions_defers_when_possible(self): + exclude = {'overload', 'Text', 'TYPE_CHECKING'} + for item in typing_extensions.__all__: + if item not in exclude and hasattr(typing, item): + self.assertIs( + getattr(typing_extensions, item), + getattr(typing, item)) + +if __name__ == '__main__': + main(argv=[sys.argv[0]] + sys.argv[2:]) diff --git a/typing_extensions/src_py2/typing_extensions.py b/typing_extensions/src_py2/typing_extensions.py new file mode 100644 index 00000000..96dd61c1 --- /dev/null +++ b/typing_extensions/src_py2/typing_extensions.py @@ -0,0 +1,118 @@ +import abc +from abc import abstractmethod, abstractproperty +import collections +import typing +from typing import ( + ClassVar, Type, + Counter, DefaultDict, Deque, + NewType, overload, Text, Type, TYPE_CHECKING, +) + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'ClassVar', + 'Type', + + # Concrete collection types. + 'Collection', + 'ContextManager', + 'Counter', + 'Deque', + 'DefaultDict', + + # One-off things. + 'NewType', + 'overload', + 'Text', + 'TYPE_CHECKING', +] + + +if hasattr(typing, 'NoReturn'): + NoReturn = typing.NoReturn +else: + # TODO: Remove once typing.py has been updated + class NoReturnMeta(typing.TypingMeta): + """Metaclass for NoReturn.""" + + def __new__(cls, name, bases, namespace): + cls.assert_no_subclassing(bases) + self = super(NoReturnMeta, cls).__new__(cls, name, bases, namespace) + return self + + + class _NoReturn(typing._FinalTypingBase): + """Special type indicating functions that never return. + Example:: + from typing import NoReturn + def stop() -> NoReturn: + raise Exception('no way') + This type is invalid in other positions, e.g., ``List[NoReturn]`` + will fail in static type checkers. + """ + __metaclass__ = NoReturnMeta + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("NoReturn cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("NoReturn cannot be used with issubclass().") + + + NoReturn = _NoReturn(_root=True) + + +T_co = typing.TypeVar('T_co', covariant=True) + +if hasattr(typing, 'ContextManager'): + ContextManager = typing.ContextManager +else: + # TODO: Remove once typing.py has been updated + class ContextManager(typing.Generic[T_co]): + __slots__ = () + + def __enter__(self): + return self + + @abc.abstractmethod + def __exit__(self, exc_type, exc_value, traceback): + return None + + @classmethod + def __subclasshook__(cls, C): + if cls is ContextManager: + # In Python 3.6+, it is possible to set a method to None to + # explicitly indicate that the class does not implement an ABC + # (https://bugs.python.org/issue25958), but we do not support + # that pattern here because this fallback class is only used + # in Python 3.5 and earlier. + if (any("__enter__" in B.__dict__ for B in C.__mro__) and + any("__exit__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + + +# Backport collections.Collection +class _CollectionAbc(collections.Sized, + collections.Iterable, + collections.Container): + pass + +_CollectionAbc.register(list) +_CollectionAbc.register(tuple) +_CollectionAbc.register(set) +_CollectionAbc.register(frozenset) +_CollectionAbc.register(basestring) +_CollectionAbc.register(buffer) +_CollectionAbc.register(dict) + + +class Collection(typing.Sized, + typing.Iterable[T_co], + typing.Container[T_co]): + __slots__ = () + __extra__ = _CollectionAbc + + diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py new file mode 100644 index 00000000..6292f443 --- /dev/null +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -0,0 +1,666 @@ +# Override version info +import sys +ORIGINAL_VERSION = sys.version_info +if len(sys.argv) >= 2: + PYTHON_VERSION = tuple(map(int, sys.argv[1].split('.'))) + sys.version_info = PYTHON_VERSION + OVERRIDING_VERSION = True +else: + PYTHON_VERSION = ORIGINAL_VERSION + OVERRIDING_VERSION = False + +import os +import abc +import contextlib +import collections +from unittest import TestCase, main, skipUnless, SkipTest + +from typing import TypeVar, Optional +from typing import T, KT, VT # Not in __all__. +from typing import Tuple, List, MutableMapping +from typing import Generic +from typing import get_type_hints +from typing import no_type_check, no_type_check_decorator +from typing import NamedTuple +from typing_extensions import NoReturn, ClassVar, Type, NewType +import typing +import typing_extensions +import collections.abc as collections_abc +import _collections_abc + +TYPING_V2 = sys.version_info >= (3, 5, 1) +TYPING_V3 = sys.version_info >= (3, 5, 3) +TYPING_V4 = sys.version_info >= (3, 6, 1) + + +class BaseTestCase(TestCase): + def assertIsSubclass(self, cls, class_or_tuple, msg=None): + if not issubclass(cls, class_or_tuple): + message = '%r is not a subclass of %r' % (cls, class_or_tuple) + if msg is not None: + message += ' : %s' % msg + raise self.failureException(message) + + def assertNotIsSubclass(self, cls, class_or_tuple, msg=None): + if issubclass(cls, class_or_tuple): + message = '%r is a subclass of %r' % (cls, class_or_tuple) + if msg is not None: + message += ' : %s' % msg + raise self.failureException(message) + + def clear_caches(self): + for f in typing._cleanups: + f() + + +class EnvironmentTest(BaseTestCase): + @skipUnless(OVERRIDING_VERSION, "Environment tests apply only when overriding") + def test_environment_is_ok(self): + cwd = os.path.abspath(os.getcwd()) + def correct_dir(module): + return os.path.abspath(module.__file__).startswith(cwd) + + self.assertTrue(correct_dir(abc)) + self.assertTrue(correct_dir(collections)) + self.assertTrue(correct_dir(collections_abc)) + self.assertTrue(correct_dir(_collections_abc)) + self.assertTrue(correct_dir(typing_extensions)) + + # Use system-installed version for other Python versions + if sys.version_info >= (3, 5, 0): + self.assertTrue(correct_dir(typing)) + + def test_python_version_is_ok(self): + self.assertTrue(sys.version_info == PYTHON_VERSION) + self.assertTrue(sys.version_info <= ORIGINAL_VERSION) + self.assertTrue(ORIGINAL_VERSION[0] == 3) + + +class Employee: + pass + + +class NoReturnTests(BaseTestCase): + + def test_noreturn_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance(42, NoReturn) + + def test_noreturn_subclass_type_error_1(self): + with self.assertRaises(TypeError): + issubclass(Employee, NoReturn) + + @skipUnless(TYPING_V3, "Behavior added in typing v3") + def test_noreturn_subclass_type_error_2(self): + with self.assertRaises(TypeError): + issubclass(NoReturn, Employee) + + def test_repr(self): + if hasattr(typing, 'NoReturn'): + self.assertEqual(repr(NoReturn), 'typing.NoReturn') + else: + self.assertEqual(repr(NoReturn), 'typing_extensions.NoReturn') + + def test_not_generic(self): + with self.assertRaises(TypeError): + NoReturn[int] + + def test_cannot_subclass(self): + with self.assertRaises(TypeError): + class A(NoReturn): + pass + if TYPING_V3: + with self.assertRaises(TypeError): + class A(type(NoReturn)): + pass + + def test_cannot_instantiate(self): + with self.assertRaises(TypeError): + NoReturn() + with self.assertRaises(TypeError): + type(NoReturn)() + + +class ClassVarTests(BaseTestCase): + + def test_basics(self): + with self.assertRaises(TypeError): + ClassVar[1] + with self.assertRaises(TypeError): + ClassVar[int, str] + with self.assertRaises(TypeError): + ClassVar[int][str] + + def test_repr(self): + if hasattr(typing, 'ClassVar'): + mod_name = 'typing' + else: + mod_name = 'typing_extensions' + self.assertEqual(repr(ClassVar), mod_name + '.ClassVar') + cv = ClassVar[int] + self.assertEqual(repr(cv), mod_name + '.ClassVar[int]') + cv = ClassVar[Employee] + self.assertEqual(repr(cv), mod_name + '.ClassVar[%s.Employee]' % __name__) + + @skipUnless(TYPING_V3, "Behavior added in typing v3") + def test_cannot_subclass(self): + with self.assertRaises(TypeError): + class C(type(ClassVar)): + pass + with self.assertRaises(TypeError): + class C(type(ClassVar[int])): + pass + + def test_cannot_init(self): + with self.assertRaises(TypeError): + ClassVar() + with self.assertRaises(TypeError): + type(ClassVar)() + with self.assertRaises(TypeError): + type(ClassVar[Optional[int]])() + + def test_no_isinstance(self): + with self.assertRaises(TypeError): + isinstance(1, ClassVar[int]) + with self.assertRaises(TypeError): + issubclass(int, ClassVar) + + +class OverloadTests(BaseTestCase): + + def test_overload_fails(self): + from typing_extensions import overload + + with self.assertRaises(RuntimeError): + + @overload + def blah(): + pass + + blah() + + def test_overload_succeeds(self): + from typing_extensions import overload + + @overload + def blah(): + pass + + def blah(): + pass + + blah() + + +ASYNCIO = sys.version_info[:2] >= (3, 5, 0) + +ASYNCIO_TESTS = """ +import asyncio +from typing import Iterable +from typing_extensions import Awaitable, AsyncIterator + +T_a = TypeVar('T_a') + +class AwaitableWrapper(Awaitable[T_a]): + + def __init__(self, value): + self.value = value + + def __await__(self) -> typing.Iterator[T_a]: + yield + return self.value + +class AsyncIteratorWrapper(AsyncIterator[T_a]): + + def __init__(self, value: Iterable[T_a]): + self.value = value + + def __aiter__(self) -> AsyncIterator[T_a]: + return self + + @asyncio.coroutine + def __anext__(self) -> T_a: + data = yield from self.value + if data: + return data + else: + raise StopAsyncIteration + +class ACM: + async def __aenter__(self) -> int: + return 42 + async def __aexit__(self, etype, eval, tb): + return None +""" + +if ASYNCIO: + try: + exec(ASYNCIO_TESTS) + except ImportError: + ASYNCIO = False +else: + # fake names for the sake of static analysis + asyncio = None + AwaitableWrapper = AsyncIteratorWrapper = ACM = object + +PY36 = sys.version_info[:2] >= (3, 6) + +PY36_TESTS = """ +from test import ann_module, ann_module2, ann_module3 +from typing_extensions import AsyncContextManager + +class A: + y: float +class B(A): + x: ClassVar[Optional['B']] = None + y: int + b: int +class CSub(B): + z: ClassVar['CSub'] = B() +class G(Generic[T]): + lst: ClassVar[List[T]] = [] + +class NoneAndForward: + parent: 'NoneAndForward' + meaning: None + +class XRepr(NamedTuple): + x: int + y: int = 1 + def __str__(self): + return f'{self.x} -> {self.y}' + def __add__(self, other): + return 0 + +async def g_with(am: AsyncContextManager[int]): + x: int + async with am as x: + return x + +try: + g_with(ACM()).send(None) +except StopIteration as e: + assert e.args[0] == 42 +""" + +if PY36: + exec(PY36_TESTS) +else: + # fake names for the sake of static analysis + ann_module = ann_module2 = ann_module3 = None + A = B = CSub = G = CoolEmployee = CoolEmployeeWithDefault = object + XMeth = XRepr = NoneAndForward = object + +gth = get_type_hints + + +class GetTypeHintTests(BaseTestCase): + @skipUnless(PY36, 'Python 3.6 required') + def test_get_type_hints_modules(self): + ann_module_type_hints = {1: 2, 'f': Tuple[int, int], 'x': int, 'y': str} + self.assertEqual(gth(ann_module), ann_module_type_hints) + self.assertEqual(gth(ann_module2), {}) + self.assertEqual(gth(ann_module3), {}) + + @skipUnless(PY36, 'Python 3.6 required') + def test_get_type_hints_classes(self): + self.assertEqual(gth(ann_module.C, ann_module.__dict__), + {'y': Optional[ann_module.C]}) + self.assertIsInstance(gth(ann_module.j_class), dict) + self.assertEqual(gth(ann_module.M), {'123': 123, 'o': type}) + self.assertEqual(gth(ann_module.D), + {'j': str, 'k': str, 'y': Optional[ann_module.C]}) + self.assertEqual(gth(ann_module.Y), {'z': int}) + self.assertEqual(gth(ann_module.h_class), + {'y': Optional[ann_module.C]}) + self.assertEqual(gth(ann_module.S), {'x': str, 'y': str}) + self.assertEqual(gth(ann_module.foo), {'x': int}) + self.assertEqual(gth(NoneAndForward, globals()), + {'parent': NoneAndForward, 'meaning': type(None)}) + + @skipUnless(PY36, 'Python 3.6 required') + def test_respect_no_type_check(self): + @no_type_check + class NoTpCheck: + class Inn: + def __init__(self, x: 'not a type'): ... + self.assertTrue(NoTpCheck.__no_type_check__) + self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__) + self.assertEqual(gth(ann_module2.NTC.meth), {}) + class ABase(Generic[T]): + def meth(x: int): ... + @no_type_check + class Der(ABase): ... + self.assertEqual(gth(ABase.meth), {'x': int}) + + @skipUnless(PY36, 'Python 3.6 required') + def test_get_type_hints_ClassVar(self): + self.assertEqual(gth(ann_module2.CV, ann_module2.__dict__), + {'var': ClassVar[ann_module2.CV]}) + self.assertEqual(gth(B, globals()), + {'y': int, 'x': ClassVar[Optional[B]], 'b': int}) + self.assertEqual(gth(CSub, globals()), + {'z': ClassVar[CSub], 'y': int, 'b': int, + 'x': ClassVar[Optional[B]]}) + self.assertEqual(gth(G), {'lst': ClassVar[List[T]]}) + + +class CollectionsAbcTests(BaseTestCase): + + @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required') + def test_awaitable(self): + ns = {} + exec( + "async def foo() -> typing_extensions.Awaitable[int]:\n" + " return await AwaitableWrapper(42)\n", + globals(), ns) + foo = ns['foo'] + g = foo() + self.assertIsInstance(g, typing_extensions.Awaitable) + self.assertNotIsInstance(foo, typing_extensions.Awaitable) + g.send(None) # Run foo() till completion, to avoid warning. + + @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required') + def test_coroutine(self): + ns = {} + exec( + "async def foo():\n" + " return\n", + globals(), ns) + foo = ns['foo'] + g = foo() + self.assertIsInstance(g, typing_extensions.Coroutine) + with self.assertRaises(TypeError): + isinstance(g, typing_extensions.Coroutine[int]) + self.assertNotIsInstance(foo, typing_extensions.Coroutine) + try: + g.send(None) + except StopIteration: + pass + + @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required') + def test_async_iterable(self): + base_it = range(10) # type: Iterator[int] + it = AsyncIteratorWrapper(base_it) + self.assertIsInstance(it, typing_extensions.AsyncIterable) + self.assertIsInstance(it, typing_extensions.AsyncIterable) + self.assertNotIsInstance(42, typing_extensions.AsyncIterable) + + @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required') + def test_async_iterator(self): + base_it = range(10) # type: Iterator[int] + it = AsyncIteratorWrapper(base_it) + self.assertIsInstance(it, typing_extensions.AsyncIterator) + self.assertNotIsInstance(42, typing_extensions.AsyncIterator) + + def test_collection(self): + #if hasattr(collections_abc, 'Collection'): + self.assertIsInstance(tuple(), typing_extensions.Collection) + self.assertIsInstance(frozenset(), typing_extensions.Collection) + self.assertIsSubclass(dict, typing_extensions.Collection) + self.assertNotIsInstance(42, typing_extensions.Collection) + + @skipUnless(TYPING_V2, "Behavior added in typing v2") + def test_collection_instantiation(self): + class MyCollection(typing_extensions.Collection[int]): + def __contains__(self, item): ... + def __iter__(self): ... + def __len__(self): ... + + self.assertIsSubclass( + type(MyCollection()), + typing_extensions.Collection) + self.assertIsSubclass( + MyCollection, + typing_extensions.Collection) + + def test_deque(self): + self.assertIsSubclass(collections.deque, typing_extensions.Deque) + class MyDeque(typing_extensions.Deque[int]): ... + self.assertIsInstance(MyDeque(), collections.deque) + + def test_counter(self): + self.assertIsSubclass(collections.Counter, typing_extensions.Counter) + + @skipUnless(TYPING_V4, "Behavior added in typing v4") + def test_defaultdict_instantiation(self): + self.assertIs( + type(typing_extensions.DefaultDict()), + collections.defaultdict) + self.assertIs( + type(typing_extensions.DefaultDict[KT, VT]()), + collections.defaultdict) + self.assertIs( + type(typing_extensions.DefaultDict[str, int]()), + collections.defaultdict) + + def test_defaultdict_subclass(self): + + class MyDefDict(typing_extensions.DefaultDict[str, int]): + pass + + dd = MyDefDict() + self.assertIsInstance(dd, MyDefDict) + + self.assertIsSubclass(MyDefDict, collections.defaultdict) + if TYPING_V3: + self.assertNotIsSubclass(collections.defaultdict, MyDefDict) + + def test_chainmap_instantiation(self): + self.assertIs(type(typing_extensions.ChainMap()), collections.ChainMap) + self.assertIs(type(typing_extensions.ChainMap[KT, VT]()), collections.ChainMap) + self.assertIs(type(typing_extensions.ChainMap[str, int]()), collections.ChainMap) + class CM(typing_extensions.ChainMap[KT, VT]): ... + if TYPING_V3: + self.assertIs(type(CM[int, str]()), CM) + + def test_chainmap_subclass(self): + + class MyChainMap(typing_extensions.ChainMap[str, int]): + pass + + cm = MyChainMap() + self.assertIsInstance(cm, MyChainMap) + + self.assertIsSubclass(MyChainMap, collections.ChainMap) + if TYPING_V3: + self.assertNotIsSubclass(collections.ChainMap, MyChainMap) + + def test_deque_instantiation(self): + self.assertIs(type(typing_extensions.Deque()), collections.deque) + self.assertIs(type(typing_extensions.Deque[T]()), collections.deque) + self.assertIs(type(typing_extensions.Deque[int]()), collections.deque) + class D(typing_extensions.Deque[T]): ... + if TYPING_V3: + self.assertIs(type(D[int]()), D) + + def test_counter_instantiation(self): + self.assertIs(type(typing_extensions.Counter()), collections.Counter) + self.assertIs(type(typing_extensions.Counter[T]()), collections.Counter) + self.assertIs(type(typing_extensions.Counter[int]()), collections.Counter) + class C(typing_extensions.Counter[T]): ... + if TYPING_V3: + self.assertIs(type(C[int]()), C) + + def test_counter_subclass_instantiation(self): + + class MyCounter(typing_extensions.Counter[int]): + pass + + d = MyCounter() + self.assertIsInstance(d, MyCounter) + self.assertIsInstance(d, collections.Counter) + if TYPING_V2: + self.assertIsInstance(d, typing_extensions.Counter) + + @skipUnless(PY36, 'Python 3.6 required') + def test_async_generator(self): + ns = {} + exec("async def f():\n" + " yield 42\n", globals(), ns) + g = ns['f']() + self.assertIsSubclass(type(g), typing_extensions.AsyncGenerator) + + @skipUnless(PY36, 'Python 3.6 required') + def test_no_async_generator_instantiation(self): + with self.assertRaises(TypeError): + typing_extensions.AsyncGenerator() + with self.assertRaises(TypeError): + typing_extensions.AsyncGenerator[T, T]() + with self.assertRaises(TypeError): + typing_extensions.AsyncGenerator[int, int]() + + + @skipUnless(PY36, 'Python 3.6 required') + def test_subclassing_async_generator(self): + class G(typing_extensions.AsyncGenerator[int, int]): + def asend(self, value): + pass + def athrow(self, typ, val=None, tb=None): + pass + + ns = {} + exec('async def g(): yield 0', globals(), ns) + g = ns['g'] + self.assertIsSubclass(G, typing_extensions.AsyncGenerator) + self.assertIsSubclass(G, typing_extensions.AsyncIterable) + self.assertIsSubclass(G, collections_abc.AsyncGenerator) + self.assertIsSubclass(G, collections_abc.AsyncIterable) + self.assertNotIsSubclass(type(g), G) + + instance = G() + self.assertIsInstance(instance, typing_extensions.AsyncGenerator) + self.assertIsInstance(instance, typing_extensions.AsyncIterable) + self.assertIsInstance(instance, collections_abc.AsyncGenerator) + self.assertIsInstance(instance, collections_abc.AsyncIterable) + self.assertNotIsInstance(type(g), G) + self.assertNotIsInstance(g, G) + + +class OtherABCTests(BaseTestCase): + + def test_contextmanager(self): + @contextlib.contextmanager + def manager(): + yield 42 + + cm = manager() + self.assertIsInstance(cm, typing_extensions.ContextManager) + self.assertNotIsInstance(42, typing_extensions.ContextManager) + + @skipUnless(ASYNCIO, 'Python 3.5 required') + def test_async_contextmanager(self): + class NotACM: + pass + self.assertIsInstance(ACM(), typing_extensions.AsyncContextManager) + self.assertNotIsInstance(NotACM(), typing_extensions.AsyncContextManager) + @contextlib.contextmanager + def manager(): + yield 42 + + cm = manager() + self.assertNotIsInstance(cm, typing_extensions.AsyncContextManager) + if TYPING_V3: + self.assertEqual(typing_extensions.AsyncContextManager[int].__args__, (int,)) + if TYPING_V4: + with self.assertRaises(TypeError): + isinstance(42, typing_extensions.AsyncContextManager[int]) + with self.assertRaises(TypeError): + typing_extensions.AsyncContextManager[int, str] + + +class TypeTests(BaseTestCase): + + def test_type_basic(self): + + class User: pass + class BasicUser(User): pass + class ProUser(User): pass + + def new_user(user_class: Type[User]) -> User: + return user_class() + + new_user(BasicUser) + + def test_type_typevar(self): + + class User: pass + class BasicUser(User): pass + class ProUser(User): pass + + U = TypeVar('U', bound=User) + + def new_user(user_class: Type[U]) -> U: + return user_class() + + new_user(BasicUser) + + @skipUnless(sys.version_info != (3, 5, 2), 'Python 3.5.2 has a somewhat buggy Type impl') + def test_type_optional(self): + A = Optional[Type[BaseException]] + + def foo(a: A) -> Optional[BaseException]: + if a is None: + return None + else: + return a() + + assert isinstance(foo(KeyboardInterrupt), KeyboardInterrupt) + assert foo(None) is None + + +class NewTypeTests(BaseTestCase): + + def test_basic(self): + UserId = NewType('UserId', int) + UserName = NewType('UserName', str) + self.assertIsInstance(UserId(5), int) + self.assertIsInstance(UserName('Joe'), str) + self.assertEqual(UserId(5) + 1, 6) + + def test_errors(self): + UserId = NewType('UserId', int) + UserName = NewType('UserName', str) + with self.assertRaises(TypeError): + issubclass(UserId, int) + with self.assertRaises(TypeError): + class D(UserName): + pass + +class AllTests(BaseTestCase): + def test_typing_extensions_includes_standard(self): + a = typing_extensions.__all__ + self.assertIn('ClassVar', a) + self.assertIn('Type', a) + self.assertIn('ChainMap', a) + self.assertIn('ContextManager', a) + self.assertIn('Counter', a) + self.assertIn('DefaultDict', a) + self.assertIn('Deque', a) + self.assertIn('NewType', a) + self.assertIn('overload', a) + self.assertIn('Text', a) + self.assertIn('TYPE_CHECKING', a) + + if ASYNCIO: + self.assertIn('Awaitable', a) + self.assertIn('AsyncIterator', a) + self.assertIn('AsyncIterable', a) + self.assertIn('Coroutine', a) + self.assertIn('AsyncContextManager', a) + + if PY36: + self.assertIn('AsyncGenerator', a) + if hasattr(collections_abc, 'Collection'): + self.assertIn('Collection', a) + + def test_typing_extensions_defers_when_possible(self): + exclude = {'overload', 'Text', 'TYPE_CHECKING'} + for item in typing_extensions.__all__: + if item not in exclude and hasattr(typing, item): + self.assertIs( + getattr(typing_extensions, item), + getattr(typing, item)) + +if __name__ == '__main__': + main(argv=[sys.argv[0]] + sys.argv[2:]) diff --git a/typing_extensions/src_py3/typing_extensions.py b/typing_extensions/src_py3/typing_extensions.py new file mode 100644 index 00000000..907866f3 --- /dev/null +++ b/typing_extensions/src_py3/typing_extensions.py @@ -0,0 +1,618 @@ +import abc +from abc import abstractmethod, abstractproperty +import collections +import contextlib +import sys +import typing +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. + +if hasattr(typing, '_generic_new'): + _generic_new = typing._generic_new +else: + def _generic_new(base_cls, cls, *args, **kwargs): + return base_cls.__new__(cls, *args, **kwargs) + +if sys.version_info[:2] >= (3, 6): + import _collections_abc + _check_methods_in_mro = _collections_abc._check_methods +else: + def _check_methods_in_mro(C, *methods): + mro = C.__mro__ + for method in methods: + for B in mro: + if method in B.__dict__: + if B.__dict__[method] is None: + return NotImplemented + break + else: + return NotImplemented + return True + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'ClassVar', + 'Type', + + # ABCs (from collections.abc). + # The following are added depending on presence + # of their non-generic counterparts in stdlib: + #'Awaitable', + #'AsyncIterator', + #'AsyncIterable', + #'Coroutine', + #'AsyncGenerator', + #'AsyncContextManager', + #'ChainMap', + + # Concrete collection types. + 'Collection', + 'ContextManager', + 'Counter', + 'Deque', + 'DefaultDict', + + # One-off things. + 'NewType', + 'overload', + 'Text', + 'TYPE_CHECKING', +] + + +# TODO +if hasattr(typing, 'NoReturn'): + NoReturn = typing.NoReturn +elif hasattr(typing, '_FinalTypingBase'): + class _NoReturn(typing._FinalTypingBase, _root=True): + """Special type indicating functions that never return. + Example:: + + from typing import NoReturn + + def stop() -> NoReturn: + raise Exception('no way') + + This type is invalid in other positions, e.g., ``List[NoReturn]`` + will fail in static type checkers. + """ + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("NoReturn cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("NoReturn cannot be used with issubclass().") + + NoReturn = _NoReturn(_root=True) +else: + class NoReturnMeta(typing.TypingMeta): + """Metaclass for NoReturn""" + def __new__(cls, name, bases, namespace, _root=False): + return super().__new__(cls, name, bases, namespace, _root=_root) + + def __instancecheck__(self, obj): + raise TypeError("NoReturn cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("NoReturn cannot be used with issubclass().") + + class NoReturn(typing.Final, metaclass=NoReturnMeta, _root=True): + """Special type indicating functions that never return. + Example:: + + from typing import NoReturn + + def stop() -> NoReturn: + raise Exception('no way') + + This type is invalid in other positions, e.g., ``List[NoReturn]`` + will fail in static type checkers. + """ + __slots__ = () + + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = typing.TypeVar('T') # Any type. +KT = typing.TypeVar('KT') # Key type. +VT = typing.TypeVar('VT') # Value type. +T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. +V_co = typing.TypeVar('V_co', covariant=True) # Any type covariant containers. +VT_co = typing.TypeVar('VT_co', covariant=True) # Value type covariant containers. +T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. + + +def _gorg(a): + """Return the farthest origin of a generic class (internal helper).""" + assert isinstance(a, typing.GenericMeta) + while a.__origin__ is not None: + a = a.__origin__ + return a + + +def _geqv(a, b): + """Return whether two generic classes are equivalent (internal helper). + + The intention is to consider generic class X and any of its + parameterized forms (X[T], X[int], etc.) as equivalent. + + However, X is not equivalent to a subclass of X. + + The relation is reflexive, symmetric and transitive. + """ + assert isinstance(a, typing.GenericMeta) and isinstance(b, typing.GenericMeta) + # Reduce each to its origin. + return _gorg(a) is _gorg(b) + + +if hasattr(typing, 'ClassVar'): + ClassVar = typing.ClassVar +elif hasattr(typing, '_FinalTypingBase'): + class _ClassVar(typing._FinalTypingBase, _root=True): + """Special type construct to mark class variables. + + An annotation wrapped in ClassVar indicates that a given + attribute is intended to be used as a class variable and + should not be set on instances of that class. Usage:: + + class Starship: + stats: ClassVar[Dict[str, int]] = {} # class variable + damage: int = 10 # instance variable + + ClassVar accepts only types and cannot be further subscribed. + + Note that ClassVar is not a class itself, and should not + be used with isinstance() or issubclass(). + """ + + __slots__ = ('__type__',) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls(_type_check(item, + '{} accepts only single type.'.format(cls.__name__[1:])), + _root=True) + raise TypeError('{} cannot be further subscripted' + .format(cls.__name__[1:])) + + def _eval_type(self, globalns, localns): + new_tp = _eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += '[{}]'.format(typing._type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _ClassVar): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + + ClassVar = _ClassVar(_root=True) +else: + class ClassVarMeta(typing.TypingMeta): + """Metaclass for ClassVar""" + + def __new__(cls, name, bases, namespace, tp=None, _root=False): + self = super().__new__(cls, name, bases, namespace, _root=_root) + if tp is not None: + self.__type__ = tp + return self + + def __instancecheck__(self, obj): + raise TypeError("ClassVar cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("ClassVar cannot be used with issubclass().") + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is not None: + raise TypeError('{} cannot be further subscripted' + .format(cls.__name__[1:])) + + param = typing._type_check( + item, + '{} accepts only single type.'.format(cls.__name__[1:])) + return cls(self.__name__, self.__bases__, + dict(self.__dict__), tp=param, _root=True) + + def _eval_type(self, globalns, localns): + new_tp = _eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(self.__name__, self.__bases__, + dict(self.__dict__), tp=self.__type__, + _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += '[{}]'.format(typing._type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, ClassVar): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + + class ClassVar(typing.Final, metaclass=ClassVarMeta, _root=True): + """Special type construct to mark class variables. + + An annotation wrapped in ClassVar indicates that a given + attribute is intended to be used as a class variable and + should not be set on instances of that class. Usage:: + + class Starship: + stats: ClassVar[Dict[str, int]] = {} # class variable + damage: int = 10 # instance variable + + ClassVar accepts only types and cannot be further subscribed. + + Note that ClassVar is not a class itself, and should not + be used with isinstance() or issubclass(). + """ + + __type__ = None + + +def _overload_dummy(*args, **kwds): + """Helper for @overload to raise when called.""" + raise NotImplementedError( + "You should not call an overloaded function. " + "A series of @overload-decorated functions " + "outside a stub module should always be followed " + "by an implementation that is not @overload-ed.") + + +def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + """ + return _overload_dummy + + + + +# This is not a real generic class. Don't use outside annotations. +if hasattr(typing, 'Type'): + Type = typing.Type +else: + # Internal type variable used for Type[]. + CT_co = typing.TypeVar('CT_co', covariant=True, bound=type) + + class Type(typing.Generic[CT_co], extra=type): + """A special construct usable to annotate class objects. + + For example, suppose we have the following classes:: + + class User: ... # Abstract base for User classes + class BasicUser(User): ... + class ProUser(User): ... + class TeamUser(User): ... + + And a function that takes a class argument that's a subclass of + User and returns an instance of the corresponding class:: + + U = TypeVar('U', bound=User) + def new_user(user_class: Type[U]) -> U: + user = user_class() + # (Here we could write the user object to a database) + return user + joe = new_user(BasicUser) + + At this point the type checker knows that joe has type BasicUser. + """ + + __slots__ = () + + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + +def _define_guard(type_name): + """ + Returns True if the given type isn't defined in typing but + is defined in collections_abc. + + Adds the type to __all__ if the collection is found in either + typing or collection_abc. + """ + if hasattr(typing, type_name): + __all__.append(type_name) + globals()[type_name] = getattr(typing, type_name) + return False + elif hasattr(collections_abc, type_name): + __all__.append(type_name) + return True + else: + return False + + +if _define_guard('Awaitable'): + class Awaitable(typing.Generic[T_co], extra=collections_abc.Awaitable): + __slots__ = () + + +if _define_guard('Coroutine'): + class Coroutine(Awaitable[V_co], typing.Generic[T_co, T_contra, V_co], + extra=collections_abc.Coroutine): + __slots__ = () + + +if _define_guard('AsyncIterable'): + class AsyncIterable(typing.Generic[T_co], + extra=collections_abc.AsyncIterable): + __slots__ = () + + +if _define_guard('AsyncIterator'): + class AsyncIterator(AsyncIterable[T_co], + extra=collections_abc.AsyncIterator): + __slots__ = () + + +if hasattr(typing, 'Collection'): + Collection = typing.Collection +else: + __all__.append('Collection') + + # Backport collections.abc.Collections + class _CollectionAbc(collections.Sized, + collections.Iterable, + collections.Container): + __slots__ = () + + @classmethod + def __subclasshook__(cls, C): + if cls is _CollectionAbc: + return _check_methods_in_mro( + C, "__len__", "__iter__", "__contains__") + return NotImplemented + + extra = getattr(collections_abc, 'Collection', _CollectionAbc) + + class Collection(typing.Sized, + typing.Iterable[T_co], + typing.Container[T_co], + extra=extra): + __slots__ = () + + +if hasattr(typing, 'Deque'): + Deque = typing.Deque +else: + class Deque(collections.deque, typing.MutableSequence[T], + extra=collections.deque): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Deque): + return collections.deque(*args, **kwds) + return _generic_new(collections.deque, cls, *args, **kwds) + + +if hasattr(typing, 'ContextManager'): + ContextManager = typing.ContextManager +elif hasattr(contextlib, 'AbstractContextManager'): + class ContextManager(typing.Generic[T_co], + extra=contextlib.AbstractContextManager): + __slots__ = () +else: + class ContextManager(typing.Generic[T_co]): + __slots__ = () + + def __enter__(self): + return self + + @abc.abstractmethod + def __exit__(self, exc_type, exc_value, traceback): + return None + + @classmethod + def __subclasshook__(cls, C): + if cls is ContextManager: + # In Python 3.6+, it is possible to set a method to None to + # explicitly indicate that the class does not implement an ABC + # (https://bugs.python.org/issue25958), but we do not support + # that pattern here because this fallback class is only used + # in Python 3.5 and earlier. + if (any("__enter__" in B.__dict__ for B in C.__mro__) and + any("__exit__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + + +if hasattr(typing, 'AsyncContextManager'): + AsyncContextManager = typing.AsyncContextManager + __all__.append('AsyncContextManager') +elif hasattr(contextlib, 'AbstractAsyncContextManager'): + class AsyncContextManager(typing.Generic[T_co], + extra=contextlib.AbstractAsyncContextManager): + __slots__ = () + + __all__.append('AsyncContextManager') +elif sys.version_info[:2] >= (3, 5): + exec(""" +class AsyncContextManager(typing.Generic[T_co]): + __slots__ = () + + async def __aenter__(self): + return self + + @abc.abstractmethod + async def __aexit__(self, exc_type, exc_value, traceback): + return None + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncContextManager: + return _check_methods_in_mro(C, "__aenter__", "__aexit__") + return NotImplemented + +__all__.append('AsyncContextManager') +""") + + +if hasattr(typing, 'DefaultDict'): + DefaultDict = typing.DefaultDict +else: + class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT], + extra=collections.defaultdict): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, DefaultDict): + return collections.defaultdict(*args, **kwds) + return _generic_new(collections.defaultdict, cls, *args, **kwds) + + +if hasattr(typing, 'Counter'): + Counter = typing.Counter +elif (3, 5, 0) <= sys.version_info <= (3, 5, 1): + _TInt = typing.TypeVar('_TInt') + + class CounterMeta(typing.GenericMeta): + """Metaclass for Counter""" + def __getitem__(self, item): + return super().__getitem__((item, int)) + + class Counter(collections.Counter, + typing.Dict[T, int], + metaclass=CounterMeta, + extra=collections.Counter): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Counter): + return collections.Counter(*args, **kwds) + return _generic_new(collections.Counter, cls, *args, **kwds) + +else: + class Counter(collections.Counter, + typing.Dict[T, int], + extra=collections.Counter): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Counter): + return collections.Counter(*args, **kwds) + return _generic_new(collections.Counter, cls, *args, **kwds) + + +if hasattr(typing, 'ChainMap'): + ChainMap = typing.ChainMap + __all__.append('ChainMap') +elif hasattr(collections, 'ChainMap'): + # ChainMap only exists in 3.3+ + class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], + extra=collections.ChainMap): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, ChainMap): + return collections.ChainMap(*args, **kwds) + return _generic_new(collections.ChainMap, cls, *args, **kwds) + + __all__.append('ChainMap') + + +if _define_guard('AsyncGenerator'): + class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra], + extra=collections_abc.AsyncGenerator): + __slots__ = () + + +if hasattr(typing, 'NewType'): + NewType = typing.NewType +else: + def NewType(name, tp): + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy function that simply returns its argument. Usage:: + + UserId = NewType('UserId', int) + + def name_by_id(user_id: UserId) -> str: + ... + + UserId('user') # Fails type check + + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + + num = UserId(5) + 1 # type: int + """ + + def new_type(x): + return x + + new_type.__name__ = name + new_type.__supertype__ = tp + return new_type + + +if hasattr(typing, 'Text'): + Text = typing.Text +else: + Text = str + + +if hasattr(typing, 'TYPE_CHECKING'): + TYPE_CHECKING = typing.TYPE_CHECKING +else: + # Constant that's True when type checking, but False here. + TYPE_CHECKING = False + diff --git a/typing_extensions/test_data/python-2.7.13/_abcoll.py b/typing_extensions/test_data/python-2.7.13/_abcoll.py new file mode 100644 index 00000000..b643692e --- /dev/null +++ b/typing_extensions/test_data/python-2.7.13/_abcoll.py @@ -0,0 +1,695 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. + +DON'T USE THIS MODULE DIRECTLY! The classes here should be imported +via collections; they are defined here only to alleviate certain +bootstrapping issues. Unit tests are in test_collections. +""" + +from abc import ABCMeta, abstractmethod +import sys + +__all__ = ["Hashable", "Iterable", "Iterator", + "Sized", "Container", "Callable", + "Set", "MutableSet", + "Mapping", "MutableMapping", + "MappingView", "KeysView", "ItemsView", "ValuesView", + "Sequence", "MutableSequence", + ] + +### ONE-TRICK PONIES ### + +def _hasattr(C, attr): + try: + return any(attr in B.__dict__ for B in C.__mro__) + except AttributeError: + # Old-style class + return hasattr(C, attr) + + +class Hashable: + __metaclass__ = ABCMeta + + @abstractmethod + def __hash__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Hashable: + try: + for B in C.__mro__: + if "__hash__" in B.__dict__: + if B.__dict__["__hash__"]: + return True + break + except AttributeError: + # Old-style class + if getattr(C, "__hash__", None): + return True + return NotImplemented + + +class Iterable: + __metaclass__ = ABCMeta + + @abstractmethod + def __iter__(self): + while False: + yield None + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterable: + if _hasattr(C, "__iter__"): + return True + return NotImplemented + +Iterable.register(str) + + +class Iterator(Iterable): + + @abstractmethod + def next(self): + 'Return the next item from the iterator. When exhausted, raise StopIteration' + raise StopIteration + + def __iter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterator: + if _hasattr(C, "next") and _hasattr(C, "__iter__"): + return True + return NotImplemented + + +class Sized: + __metaclass__ = ABCMeta + + @abstractmethod + def __len__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Sized: + if _hasattr(C, "__len__"): + return True + return NotImplemented + + +class Container: + __metaclass__ = ABCMeta + + @abstractmethod + def __contains__(self, x): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Container: + if _hasattr(C, "__contains__"): + return True + return NotImplemented + + +class Callable: + __metaclass__ = ABCMeta + + @abstractmethod + def __call__(self, *args, **kwds): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Callable: + if _hasattr(C, "__call__"): + return True + return NotImplemented + + +### SETS ### + + +class Set(Sized, Iterable, Container): + """A set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__ and __len__. + + To override the comparisons (presumably for speed, as the + semantics are fixed), redefine __le__ and __ge__, + then the other operations will automatically follow suit. + """ + + def __le__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) > len(other): + return False + for elem in self: + if elem not in other: + return False + return True + + def __lt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) < len(other) and self.__le__(other) + + def __gt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) > len(other) and self.__ge__(other) + + def __ge__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) < len(other): + return False + for elem in other: + if elem not in self: + return False + return True + + def __eq__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) == len(other) and self.__le__(other) + + def __ne__(self, other): + return not (self == other) + + @classmethod + def _from_iterable(cls, it): + '''Construct an instance of the class from any iterable input. + + Must override this method if the class constructor signature + does not accept an iterable for an input. + ''' + return cls(it) + + def __and__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + return self._from_iterable(value for value in other if value in self) + + __rand__ = __and__ + + def isdisjoint(self, other): + 'Return True if two sets have a null intersection.' + for value in other: + if value in self: + return False + return True + + def __or__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + chain = (e for s in (self, other) for e in s) + return self._from_iterable(chain) + + __ror__ = __or__ + + def __sub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in self + if value not in other) + + def __rsub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in other + if value not in self) + + def __xor__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return (self - other) | (other - self) + + __rxor__ = __xor__ + + # Sets are not hashable by default, but subclasses can change this + __hash__ = None + + def _hash(self): + """Compute the hash value of a set. + + Note that we don't define __hash__: not all sets are hashable. + But if you define a hashable set type, its __hash__ should + call this function. + + This must be compatible __eq__. + + All sets ought to compare equal if they contain the same + elements, regardless of how they are implemented, and + regardless of the order of the elements; so there's not much + freedom for __eq__ or __hash__. We match the algorithm used + by the built-in frozenset type. + """ + MAX = sys.maxint + MASK = 2 * MAX + 1 + n = len(self) + h = 1927868237 * (n + 1) + h &= MASK + for x in self: + hx = hash(x) + h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 + h &= MASK + h = h * 69069 + 907133923 + h &= MASK + if h > MAX: + h -= MASK + 1 + if h == -1: + h = 590923713 + return h + +Set.register(frozenset) + + +class MutableSet(Set): + """A mutable set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__, __len__, + add(), and discard(). + + To override the comparisons (presumably for speed, as the + semantics are fixed), all you have to do is redefine __le__ and + then the other operations will automatically follow suit. + """ + + @abstractmethod + def add(self, value): + """Add an element.""" + raise NotImplementedError + + @abstractmethod + def discard(self, value): + """Remove an element. Do not raise an exception if absent.""" + raise NotImplementedError + + def remove(self, value): + """Remove an element. If not a member, raise a KeyError.""" + if value not in self: + raise KeyError(value) + self.discard(value) + + def pop(self): + """Return the popped value. Raise KeyError if empty.""" + it = iter(self) + try: + value = next(it) + except StopIteration: + raise KeyError + self.discard(value) + return value + + def clear(self): + """This is slow (creates N new iterators!) but effective.""" + try: + while True: + self.pop() + except KeyError: + pass + + def __ior__(self, it): + for value in it: + self.add(value) + return self + + def __iand__(self, it): + for value in (self - it): + self.discard(value) + return self + + def __ixor__(self, it): + if it is self: + self.clear() + else: + if not isinstance(it, Set): + it = self._from_iterable(it) + for value in it: + if value in self: + self.discard(value) + else: + self.add(value) + return self + + def __isub__(self, it): + if it is self: + self.clear() + else: + for value in it: + self.discard(value) + return self + +MutableSet.register(set) + + +### MAPPINGS ### + + +class Mapping(Sized, Iterable, Container): + + """A Mapping is a generic container for associating key/value + pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __iter__, and __len__. + + """ + + @abstractmethod + def __getitem__(self, key): + raise KeyError + + def get(self, key, default=None): + 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' + try: + return self[key] + except KeyError: + return default + + def __contains__(self, key): + try: + self[key] + except KeyError: + return False + else: + return True + + def iterkeys(self): + 'D.iterkeys() -> an iterator over the keys of D' + return iter(self) + + def itervalues(self): + 'D.itervalues() -> an iterator over the values of D' + for key in self: + yield self[key] + + def iteritems(self): + 'D.iteritems() -> an iterator over the (key, value) items of D' + for key in self: + yield (key, self[key]) + + def keys(self): + "D.keys() -> list of D's keys" + return list(self) + + def items(self): + "D.items() -> list of D's (key, value) pairs, as 2-tuples" + return [(key, self[key]) for key in self] + + def values(self): + "D.values() -> list of D's values" + return [self[key] for key in self] + + # Mappings are not hashable by default, but subclasses can change this + __hash__ = None + + def __eq__(self, other): + if not isinstance(other, Mapping): + return NotImplemented + return dict(self.items()) == dict(other.items()) + + def __ne__(self, other): + return not (self == other) + +class MappingView(Sized): + + def __init__(self, mapping): + self._mapping = mapping + + def __len__(self): + return len(self._mapping) + + def __repr__(self): + return '{0.__class__.__name__}({0._mapping!r})'.format(self) + + +class KeysView(MappingView, Set): + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, key): + return key in self._mapping + + def __iter__(self): + for key in self._mapping: + yield key + +KeysView.register(type({}.viewkeys())) + +class ItemsView(MappingView, Set): + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, item): + key, value = item + try: + v = self._mapping[key] + except KeyError: + return False + else: + return v == value + + def __iter__(self): + for key in self._mapping: + yield (key, self._mapping[key]) + +ItemsView.register(type({}.viewitems())) + +class ValuesView(MappingView): + + def __contains__(self, value): + for key in self._mapping: + if value == self._mapping[key]: + return True + return False + + def __iter__(self): + for key in self._mapping: + yield self._mapping[key] + +ValuesView.register(type({}.viewvalues())) + +class MutableMapping(Mapping): + + """A MutableMapping is a generic container for associating + key/value pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __setitem__, __delitem__, + __iter__, and __len__. + + """ + + @abstractmethod + def __setitem__(self, key, value): + raise KeyError + + @abstractmethod + def __delitem__(self, key): + raise KeyError + + __marker = object() + + def pop(self, key, default=__marker): + '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + ''' + try: + value = self[key] + except KeyError: + if default is self.__marker: + raise + return default + else: + del self[key] + return value + + def popitem(self): + '''D.popitem() -> (k, v), remove and return some (key, value) pair + as a 2-tuple; but raise KeyError if D is empty. + ''' + try: + key = next(iter(self)) + except StopIteration: + raise KeyError + value = self[key] + del self[key] + return key, value + + def clear(self): + 'D.clear() -> None. Remove all items from D.' + try: + while True: + self.popitem() + except KeyError: + pass + + def update(*args, **kwds): + ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. + If E present and has a .keys() method, does: for k in E: D[k] = E[k] + If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v + In either case, this is followed by: for k, v in F.items(): D[k] = v + ''' + if not args: + raise TypeError("descriptor 'update' of 'MutableMapping' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('update expected at most 1 arguments, got %d' % + len(args)) + if args: + other = args[0] + if isinstance(other, Mapping): + for key in other: + self[key] = other[key] + elif hasattr(other, "keys"): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + def setdefault(self, key, default=None): + 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' + try: + return self[key] + except KeyError: + self[key] = default + return default + +MutableMapping.register(dict) + + +### SEQUENCES ### + + +class Sequence(Sized, Iterable, Container): + """All the operations on a read-only sequence. + + Concrete subclasses must override __new__ or __init__, + __getitem__, and __len__. + """ + + @abstractmethod + def __getitem__(self, index): + raise IndexError + + def __iter__(self): + i = 0 + try: + while True: + v = self[i] + yield v + i += 1 + except IndexError: + return + + def __contains__(self, value): + for v in self: + if v == value: + return True + return False + + def __reversed__(self): + for i in reversed(range(len(self))): + yield self[i] + + def index(self, value): + '''S.index(value) -> integer -- return first index of value. + Raises ValueError if the value is not present. + ''' + for i, v in enumerate(self): + if v == value: + return i + raise ValueError + + def count(self, value): + 'S.count(value) -> integer -- return number of occurrences of value' + return sum(1 for v in self if v == value) + +Sequence.register(tuple) +Sequence.register(basestring) +Sequence.register(buffer) +Sequence.register(xrange) + + +class MutableSequence(Sequence): + + """All the operations on a read-only sequence. + + Concrete subclasses must provide __new__ or __init__, + __getitem__, __setitem__, __delitem__, __len__, and insert(). + + """ + + @abstractmethod + def __setitem__(self, index, value): + raise IndexError + + @abstractmethod + def __delitem__(self, index): + raise IndexError + + @abstractmethod + def insert(self, index, value): + 'S.insert(index, object) -- insert object before index' + raise IndexError + + def append(self, value): + 'S.append(object) -- append object to the end of the sequence' + self.insert(len(self), value) + + def reverse(self): + 'S.reverse() -- reverse *IN PLACE*' + n = len(self) + for i in range(n//2): + self[i], self[n-i-1] = self[n-i-1], self[i] + + def extend(self, values): + 'S.extend(iterable) -- extend sequence by appending elements from the iterable' + for v in values: + self.append(v) + + def pop(self, index=-1): + '''S.pop([index]) -> item -- remove and return item at index (default last). + Raise IndexError if list is empty or index is out of range. + ''' + v = self[index] + del self[index] + return v + + def remove(self, value): + '''S.remove(value) -- remove first occurrence of value. + Raise ValueError if the value is not present. + ''' + del self[self.index(value)] + + def __iadd__(self, values): + self.extend(values) + return self + +MutableSequence.register(list) diff --git a/typing_extensions/test_data/python-2.7.13/abc.py b/typing_extensions/test_data/python-2.7.13/abc.py new file mode 100644 index 00000000..02e48a1b --- /dev/null +++ b/typing_extensions/test_data/python-2.7.13/abc.py @@ -0,0 +1,185 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) according to PEP 3119.""" + +import types + +from _weakrefset import WeakSet + +# Instance of old-style class +class _C: pass +_InstanceType = type(_C()) + + +def abstractmethod(funcobj): + """A decorator indicating abstract methods. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract methods are overridden. + The abstract methods can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C: + __metaclass__ = ABCMeta + @abstractmethod + def my_abstract_method(self, ...): + ... + """ + funcobj.__isabstractmethod__ = True + return funcobj + + +class abstractproperty(property): + """A decorator indicating abstract properties. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract properties are overridden. + The abstract properties can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C: + __metaclass__ = ABCMeta + @abstractproperty + def my_abstract_property(self): + ... + + This defines a read-only property; you can also define a read-write + abstract property using the 'long' form of property declaration: + + class C: + __metaclass__ = ABCMeta + def getx(self): ... + def setx(self, value): ... + x = abstractproperty(getx, setx) + """ + __isabstractmethod__ = True + + +class ABCMeta(type): + + """Metaclass for defining Abstract Base Classes (ABCs). + + Use this metaclass to create an ABC. An ABC can be subclassed + directly, and then acts as a mix-in class. You can also register + unrelated concrete classes (even built-in classes) and unrelated + ABCs as 'virtual subclasses' -- these and their descendants will + be considered subclasses of the registering ABC by the built-in + issubclass() function, but the registering ABC won't show up in + their MRO (Method Resolution Order) nor will method + implementations defined by the registering ABC be callable (not + even via super()). + + """ + + # A global counter that is incremented each time a class is + # registered as a virtual subclass of anything. It forces the + # negative cache to be cleared before its next use. + _abc_invalidation_counter = 0 + + def __new__(mcls, name, bases, namespace): + cls = super(ABCMeta, mcls).__new__(mcls, name, bases, namespace) + # Compute set of abstract method names + abstracts = set(name + for name, value in namespace.items() + if getattr(value, "__isabstractmethod__", False)) + for base in bases: + for name in getattr(base, "__abstractmethods__", set()): + value = getattr(cls, name, None) + if getattr(value, "__isabstractmethod__", False): + abstracts.add(name) + cls.__abstractmethods__ = frozenset(abstracts) + # Set up inheritance registry + cls._abc_registry = WeakSet() + cls._abc_cache = WeakSet() + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + return cls + + def register(cls, subclass): + """Register a virtual subclass of an ABC.""" + if not isinstance(subclass, (type, types.ClassType)): + raise TypeError("Can only register classes") + if issubclass(subclass, cls): + return # Already a subclass + # Subtle: test for cycles *after* testing for "already a subclass"; + # this means we allow X.register(X) and interpret it as a no-op. + if issubclass(cls, subclass): + # This would create a cycle, which is bad for the algorithm below + raise RuntimeError("Refusing to create an inheritance cycle") + cls._abc_registry.add(subclass) + ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache + + def _dump_registry(cls, file=None): + """Debug helper to print the ABC registry.""" + print >> file, "Class: %s.%s" % (cls.__module__, cls.__name__) + print >> file, "Inv.counter: %s" % ABCMeta._abc_invalidation_counter + for name in sorted(cls.__dict__.keys()): + if name.startswith("_abc_"): + value = getattr(cls, name) + print >> file, "%s: %r" % (name, value) + + def __instancecheck__(cls, instance): + """Override for isinstance(instance, cls).""" + # Inline the cache checking when it's simple. + subclass = getattr(instance, '__class__', None) + if subclass is not None and subclass in cls._abc_cache: + return True + subtype = type(instance) + # Old-style instances + if subtype is _InstanceType: + subtype = subclass + if subtype is subclass or subclass is None: + if (cls._abc_negative_cache_version == + ABCMeta._abc_invalidation_counter and + subtype in cls._abc_negative_cache): + return False + # Fall back to the subclass check. + return cls.__subclasscheck__(subtype) + return (cls.__subclasscheck__(subclass) or + cls.__subclasscheck__(subtype)) + + def __subclasscheck__(cls, subclass): + """Override for issubclass(subclass, cls).""" + # Check cache + if subclass in cls._abc_cache: + return True + # Check negative cache; may have to invalidate + if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: + # Invalidate the negative cache + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + elif subclass in cls._abc_negative_cache: + return False + # Check the subclass hook + ok = cls.__subclasshook__(subclass) + if ok is not NotImplemented: + assert isinstance(ok, bool) + if ok: + cls._abc_cache.add(subclass) + else: + cls._abc_negative_cache.add(subclass) + return ok + # Check if it's a direct subclass + if cls in getattr(subclass, '__mro__', ()): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a registered class (recursive) + for rcls in cls._abc_registry: + if issubclass(subclass, rcls): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a subclass (recursive) + for scls in cls.__subclasses__(): + if issubclass(subclass, scls): + cls._abc_cache.add(subclass) + return True + # No dice; update negative cache + cls._abc_negative_cache.add(subclass) + return False diff --git a/typing_extensions/test_data/python-2.7.13/collections.py b/typing_extensions/test_data/python-2.7.13/collections.py new file mode 100644 index 00000000..f2ad9726 --- /dev/null +++ b/typing_extensions/test_data/python-2.7.13/collections.py @@ -0,0 +1,742 @@ +'''This module implements specialized container datatypes providing +alternatives to Python's general purpose built-in containers, dict, +list, set, and tuple. + +* namedtuple factory function for creating tuple subclasses with named fields +* deque list-like container with fast appends and pops on either end +* Counter dict subclass for counting hashable objects +* OrderedDict dict subclass that remembers the order entries were added +* defaultdict dict subclass that calls a factory function to supply missing values + +''' + +__all__ = ['Counter', 'deque', 'defaultdict', 'namedtuple', 'OrderedDict'] +# For bootstrapping reasons, the collection ABCs are defined in _abcoll.py. +# They should however be considered an integral part of collections.py. +from _abcoll import * +import _abcoll +__all__ += _abcoll.__all__ + +from _collections import deque, defaultdict +from operator import itemgetter as _itemgetter, eq as _eq +from keyword import iskeyword as _iskeyword +import sys as _sys +import heapq as _heapq +from itertools import repeat as _repeat, chain as _chain, starmap as _starmap +from itertools import imap as _imap + +try: + from thread import get_ident as _get_ident +except ImportError: + from dummy_thread import get_ident as _get_ident + + +################################################################################ +### OrderedDict +################################################################################ + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as regular dictionaries. + + # The internal self.__map dict maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(*args, **kwds): + '''Initialize an ordered dictionary. The signature is the same as + regular dictionaries, but keyword arguments are not recommended because + their insertion order is arbitrary. + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'OrderedDict' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link at the end of the linked list, + # and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + return dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which gets + # removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, _ = self.__map.pop(key) + link_prev[1] = link_next # update link_prev[NEXT] + link_next[0] = link_prev # update link_next[PREV] + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + # Traverse the linked list in order. + root = self.__root + curr = root[1] # start at the first node + while curr is not root: + yield curr[2] # yield the curr[KEY] + curr = curr[1] # move to next node + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + # Traverse the linked list in reverse order. + root = self.__root + curr = root[0] # start at the last node + while curr is not root: + yield curr[2] # yield the curr[KEY] + curr = curr[0] # move to previous node + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + dict.clear(self) + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) pairs in od' + for k in self: + yield (k, self[k]) + + update = MutableMapping.update + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding + value. If key is not found, d is returned if given, otherwise KeyError + is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + key = next(reversed(self) if last else iter(self)) + value = self.pop(key) + return key, value + + def __repr__(self, _repr_running={}): + 'od.__repr__() <==> repr(od)' + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. + If not specified, the value defaults to None. + + ''' + self = cls() + for key in iterable: + self[key] = value + return self + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return dict.__eq__(self, other) and all(_imap(_eq, self, other)) + return dict.__eq__(self, other) + + def __ne__(self, other): + 'od.__ne__(y) <==> od!=y' + return not self == other + + # -- the following methods support python 3.x style dictionary views -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) + + +################################################################################ +### namedtuple +################################################################################ + +_class_template = '''\ +class {typename}(tuple): + '{typename}({arg_list})' + + __slots__ = () + + _fields = {field_names!r} + + def __new__(_cls, {arg_list}): + 'Create new instance of {typename}({arg_list})' + return _tuple.__new__(_cls, ({arg_list})) + + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + 'Make a new {typename} object from a sequence or iterable' + result = new(cls, iterable) + if len(result) != {num_fields:d}: + raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) + return result + + def __repr__(self): + 'Return a nicely formatted representation string' + return '{typename}({repr_fmt})' % self + + def _asdict(self): + 'Return a new OrderedDict which maps field names to their values' + return OrderedDict(zip(self._fields, self)) + + def _replace(_self, **kwds): + 'Return a new {typename} object replacing specified fields with new values' + result = _self._make(map(kwds.pop, {field_names!r}, _self)) + if kwds: + raise ValueError('Got unexpected field names: %r' % kwds.keys()) + return result + + def __getnewargs__(self): + 'Return self as a plain tuple. Used by copy and pickle.' + return tuple(self) + + __dict__ = _property(_asdict) + + def __getstate__(self): + 'Exclude the OrderedDict from pickling' + pass + +{field_defs} +''' + +_repr_template = '{name}=%r' + +_field_template = '''\ + {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') +''' + +def namedtuple(typename, field_names, verbose=False, rename=False): + """Returns a new subclass of tuple with named fields. + + >>> Point = namedtuple('Point', ['x', 'y']) + >>> Point.__doc__ # docstring for the new class + 'Point(x, y)' + >>> p = Point(11, y=22) # instantiate with positional args or keywords + >>> p[0] + p[1] # indexable like a plain tuple + 33 + >>> x, y = p # unpack like a regular tuple + >>> x, y + (11, 22) + >>> p.x + p.y # fields also accessible by name + 33 + >>> d = p._asdict() # convert to a dictionary + >>> d['x'] + 11 + >>> Point(**d) # convert from a dictionary + Point(x=11, y=22) + >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields + Point(x=100, y=22) + + """ + + # Validate the field names. At the user's option, either generate an error + # message or automatically replace the field name with a valid name. + if isinstance(field_names, basestring): + field_names = field_names.replace(',', ' ').split() + field_names = map(str, field_names) + typename = str(typename) + if rename: + seen = set() + for index, name in enumerate(field_names): + if (not all(c.isalnum() or c=='_' for c in name) + or _iskeyword(name) + or not name + or name[0].isdigit() + or name.startswith('_') + or name in seen): + field_names[index] = '_%d' % index + seen.add(name) + for name in [typename] + field_names: + if type(name) != str: + raise TypeError('Type names and field names must be strings') + if not all(c.isalnum() or c=='_' for c in name): + raise ValueError('Type names and field names can only contain ' + 'alphanumeric characters and underscores: %r' % name) + if _iskeyword(name): + raise ValueError('Type names and field names cannot be a ' + 'keyword: %r' % name) + if name[0].isdigit(): + raise ValueError('Type names and field names cannot start with ' + 'a number: %r' % name) + seen = set() + for name in field_names: + if name.startswith('_') and not rename: + raise ValueError('Field names cannot start with an underscore: ' + '%r' % name) + if name in seen: + raise ValueError('Encountered duplicate field name: %r' % name) + seen.add(name) + + # Fill-in the class template + class_definition = _class_template.format( + typename = typename, + field_names = tuple(field_names), + num_fields = len(field_names), + arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], + repr_fmt = ', '.join(_repr_template.format(name=name) + for name in field_names), + field_defs = '\n'.join(_field_template.format(index=index, name=name) + for index, name in enumerate(field_names)) + ) + if verbose: + print class_definition + + # Execute the template string in a temporary namespace and support + # tracing utilities by setting a value for frame.f_globals['__name__'] + namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename, + OrderedDict=OrderedDict, _property=property, _tuple=tuple) + try: + exec class_definition in namespace + except SyntaxError as e: + raise SyntaxError(e.message + ':\n' + class_definition) + result = namespace[typename] + + # For pickling to work, the __module__ variable needs to be set to the frame + # where the named tuple is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + + return result + + +######################################################################## +### Counter +######################################################################## + +class Counter(dict): + '''Dict subclass for counting hashable items. Sometimes called a bag + or multiset. Elements are stored as dictionary keys and their counts + are stored as dictionary values. + + >>> c = Counter('abcdeabcdabcaba') # count elements from a string + + >>> c.most_common(3) # three most common elements + [('a', 5), ('b', 4), ('c', 3)] + >>> sorted(c) # list all unique elements + ['a', 'b', 'c', 'd', 'e'] + >>> ''.join(sorted(c.elements())) # list elements with repetitions + 'aaaaabbbbcccdde' + >>> sum(c.values()) # total of all counts + 15 + + >>> c['a'] # count of letter 'a' + 5 + >>> for elem in 'shazam': # update counts from an iterable + ... c[elem] += 1 # by adding 1 to each element's count + >>> c['a'] # now there are seven 'a' + 7 + >>> del c['b'] # remove all 'b' + >>> c['b'] # now there are zero 'b' + 0 + + >>> d = Counter('simsalabim') # make another counter + >>> c.update(d) # add in the second counter + >>> c['a'] # now there are nine 'a' + 9 + + >>> c.clear() # empty the counter + >>> c + Counter() + + Note: If a count is set to zero or reduced to zero, it will remain + in the counter until the entry is deleted or the counter is cleared: + + >>> c = Counter('aaabbc') + >>> c['b'] -= 2 # reduce the count of 'b' by two + >>> c.most_common() # 'b' is still in, but its count is zero + [('a', 3), ('c', 1), ('b', 0)] + + ''' + # References: + # http://en.wikipedia.org/wiki/Multiset + # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html + # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm + # http://code.activestate.com/recipes/259174/ + # Knuth, TAOCP Vol. II section 4.6.3 + + def __init__(*args, **kwds): + '''Create a new, empty Counter object. And if given, count elements + from an input iterable. Or, initialize the count from another mapping + of elements to their counts. + + >>> c = Counter() # a new, empty counter + >>> c = Counter('gallahad') # a new counter from an iterable + >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping + >>> c = Counter(a=4, b=2) # a new counter from keyword args + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'Counter' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + super(Counter, self).__init__() + self.update(*args, **kwds) + + def __missing__(self, key): + 'The count of elements not in the Counter is zero.' + # Needed so that self[missing_item] does not raise KeyError + return 0 + + def most_common(self, n=None): + '''List the n most common elements and their counts from the most + common to the least. If n is None, then list all element counts. + + >>> Counter('abcdeabcdabcaba').most_common(3) + [('a', 5), ('b', 4), ('c', 3)] + + ''' + # Emulate Bag.sortedByCount from Smalltalk + if n is None: + return sorted(self.iteritems(), key=_itemgetter(1), reverse=True) + return _heapq.nlargest(n, self.iteritems(), key=_itemgetter(1)) + + def elements(self): + '''Iterator over elements repeating each as many times as its count. + + >>> c = Counter('ABCABC') + >>> sorted(c.elements()) + ['A', 'A', 'B', 'B', 'C', 'C'] + + # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) + >>> product = 1 + >>> for factor in prime_factors.elements(): # loop over factors + ... product *= factor # and multiply them + >>> product + 1836 + + Note, if an element's count has been set to zero or is a negative + number, elements() will ignore it. + + ''' + # Emulate Bag.do from Smalltalk and Multiset.begin from C++. + return _chain.from_iterable(_starmap(_repeat, self.iteritems())) + + # Override dict methods where necessary + + @classmethod + def fromkeys(cls, iterable, v=None): + # There is no equivalent method for counters because setting v=1 + # means that no element can have a count greater than one. + raise NotImplementedError( + 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + + def update(*args, **kwds): + '''Like dict.update() but add counts instead of replacing them. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.update('witch') # add elements from another iterable + >>> d = Counter('watch') + >>> c.update(d) # add elements from another counter + >>> c['h'] # four 'h' in which, witch, and watch + 4 + + ''' + # The regular dict.update() operation makes no sense here because the + # replace behavior results in the some of original untouched counts + # being mixed-in with all of the other counts for a mismash that + # doesn't have a straight-forward interpretation in most counting + # contexts. Instead, we implement straight-addition. Both the inputs + # and outputs are allowed to contain zero and negative counts. + + if not args: + raise TypeError("descriptor 'update' of 'Counter' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + if isinstance(iterable, Mapping): + if self: + self_get = self.get + for elem, count in iterable.iteritems(): + self[elem] = self_get(elem, 0) + count + else: + super(Counter, self).update(iterable) # fast path when counter is empty + else: + self_get = self.get + for elem in iterable: + self[elem] = self_get(elem, 0) + 1 + if kwds: + self.update(kwds) + + def subtract(*args, **kwds): + '''Like dict.update() but subtracts counts instead of replacing them. + Counts can be reduced below zero. Both the inputs and outputs are + allowed to contain zero and negative counts. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.subtract('witch') # subtract elements from another iterable + >>> c.subtract(Counter('watch')) # subtract elements from another counter + >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch + 0 + >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch + -1 + + ''' + if not args: + raise TypeError("descriptor 'subtract' of 'Counter' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + self_get = self.get + if isinstance(iterable, Mapping): + for elem, count in iterable.items(): + self[elem] = self_get(elem, 0) - count + else: + for elem in iterable: + self[elem] = self_get(elem, 0) - 1 + if kwds: + self.subtract(kwds) + + def copy(self): + 'Return a shallow copy.' + return self.__class__(self) + + def __reduce__(self): + return self.__class__, (dict(self),) + + def __delitem__(self, elem): + 'Like dict.__delitem__() but does not raise KeyError for missing values.' + if elem in self: + super(Counter, self).__delitem__(elem) + + def __repr__(self): + if not self: + return '%s()' % self.__class__.__name__ + items = ', '.join(map('%r: %r'.__mod__, self.most_common())) + return '%s({%s})' % (self.__class__.__name__, items) + + # Multiset-style mathematical operations discussed in: + # Knuth TAOCP Volume II section 4.6.3 exercise 19 + # and at http://en.wikipedia.org/wiki/Multiset + # + # Outputs guaranteed to only include positive counts. + # + # To strip negative and zero counts, add-in an empty counter: + # c += Counter() + + def __add__(self, other): + '''Add counts from two counters. + + >>> Counter('abbb') + Counter('bcc') + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count + other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __sub__(self, other): + ''' Subtract count, but keep only results with positive counts. + + >>> Counter('abbbc') - Counter('bccd') + Counter({'b': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count - other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count < 0: + result[elem] = 0 - count + return result + + def __or__(self, other): + '''Union is the maximum of value in either of the input counters. + + >>> Counter('abbb') | Counter('bcc') + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = other_count if count < other_count else count + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __and__(self, other): + ''' Intersection is the minimum of corresponding counts. + + >>> Counter('abbb') & Counter('bcc') + Counter({'b': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = count if count < other_count else other_count + if newcount > 0: + result[elem] = newcount + return result + + +if __name__ == '__main__': + # verify that instances can be pickled + from cPickle import loads, dumps + Point = namedtuple('Point', 'x, y', True) + p = Point(x=10, y=20) + assert p == loads(dumps(p)) + + # test and demonstrate ability to override methods + class Point(namedtuple('Point', 'x y')): + __slots__ = () + @property + def hypot(self): + return (self.x ** 2 + self.y ** 2) ** 0.5 + def __str__(self): + return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) + + for p in Point(3, 4), Point(14, 5/7.): + print p + + class Point(namedtuple('Point', 'x y')): + 'Point class with optimized _make() and _replace() without error-checking' + __slots__ = () + _make = classmethod(tuple.__new__) + def _replace(self, _map=map, **kwds): + return self._make(_map(kwds.get, ('x', 'y'), self)) + + print Point(11, 22)._replace(x=100) + + Point3D = namedtuple('Point3D', Point._fields + ('z',)) + print Point3D.__doc__ + + import doctest + TestResults = namedtuple('TestResults', 'failed attempted') + print TestResults(*doctest.testmod()) diff --git a/typing_extensions/test_data/python-3.4.6/_collections_abc.py b/typing_extensions/test_data/python-3.4.6/_collections_abc.py new file mode 100644 index 00000000..33b59aba --- /dev/null +++ b/typing_extensions/test_data/python-3.4.6/_collections_abc.py @@ -0,0 +1,748 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. + +Unit tests are in test_collections. +""" + +from abc import ABCMeta, abstractmethod +import sys + +__all__ = ["Hashable", "Iterable", "Iterator", + "Sized", "Container", "Callable", + "Set", "MutableSet", + "Mapping", "MutableMapping", + "MappingView", "KeysView", "ItemsView", "ValuesView", + "Sequence", "MutableSequence", + "ByteString", + ] + +# This module has been renamed from collections.abc to _collections_abc to +# speed up interpreter startup. Some of the types such as MutableMapping are +# required early but collections module imports a lot of other modules. +# See issue #19218 +__name__ = "collections.abc" + +# Private list of types that we want to register with the various ABCs +# so that they will pass tests like: +# it = iter(somebytearray) +# assert isinstance(it, Iterable) +# Note: in other implementations, these types many not be distinct +# and they make have their own implementation specific types that +# are not included on this list. +bytes_iterator = type(iter(b'')) +bytearray_iterator = type(iter(bytearray())) +#callable_iterator = ??? +dict_keyiterator = type(iter({}.keys())) +dict_valueiterator = type(iter({}.values())) +dict_itemiterator = type(iter({}.items())) +list_iterator = type(iter([])) +list_reverseiterator = type(iter(reversed([]))) +range_iterator = type(iter(range(0))) +set_iterator = type(iter(set())) +str_iterator = type(iter("")) +tuple_iterator = type(iter(())) +zip_iterator = type(iter(zip())) +## views ## +dict_keys = type({}.keys()) +dict_values = type({}.values()) +dict_items = type({}.items()) +## misc ## +mappingproxy = type(type.__dict__) + + +### ONE-TRICK PONIES ### + +class Hashable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __hash__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Hashable: + for B in C.__mro__: + if "__hash__" in B.__dict__: + if B.__dict__["__hash__"]: + return True + break + return NotImplemented + + +class Iterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __iter__(self): + while False: + yield None + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterable: + if any("__iter__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Iterator(Iterable): + + __slots__ = () + + @abstractmethod + def __next__(self): + 'Return the next item from the iterator. When exhausted, raise StopIteration' + raise StopIteration + + def __iter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterator: + if (any("__next__" in B.__dict__ for B in C.__mro__) and + any("__iter__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + +Iterator.register(bytes_iterator) +Iterator.register(bytearray_iterator) +#Iterator.register(callable_iterator) +Iterator.register(dict_keyiterator) +Iterator.register(dict_valueiterator) +Iterator.register(dict_itemiterator) +Iterator.register(list_iterator) +Iterator.register(list_reverseiterator) +Iterator.register(range_iterator) +Iterator.register(set_iterator) +Iterator.register(str_iterator) +Iterator.register(tuple_iterator) +Iterator.register(zip_iterator) + +class Sized(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __len__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Sized: + if any("__len__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Container(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __contains__(self, x): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Container: + if any("__contains__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Callable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __call__(self, *args, **kwds): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Callable: + if any("__call__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +### SETS ### + + +class Set(Sized, Iterable, Container): + + """A set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__ and __len__. + + To override the comparisons (presumably for speed, as the + semantics are fixed), redefine __le__ and __ge__, + then the other operations will automatically follow suit. + """ + + __slots__ = () + + def __le__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) > len(other): + return False + for elem in self: + if elem not in other: + return False + return True + + def __lt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) < len(other) and self.__le__(other) + + def __gt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) > len(other) and self.__ge__(other) + + def __ge__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) < len(other): + return False + for elem in other: + if elem not in self: + return False + return True + + def __eq__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) == len(other) and self.__le__(other) + + @classmethod + def _from_iterable(cls, it): + '''Construct an instance of the class from any iterable input. + + Must override this method if the class constructor signature + does not accept an iterable for an input. + ''' + return cls(it) + + def __and__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + return self._from_iterable(value for value in other if value in self) + + __rand__ = __and__ + + def isdisjoint(self, other): + 'Return True if two sets have a null intersection.' + for value in other: + if value in self: + return False + return True + + def __or__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + chain = (e for s in (self, other) for e in s) + return self._from_iterable(chain) + + __ror__ = __or__ + + def __sub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in self + if value not in other) + + def __rsub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in other + if value not in self) + + def __xor__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return (self - other) | (other - self) + + __rxor__ = __xor__ + + def _hash(self): + """Compute the hash value of a set. + + Note that we don't define __hash__: not all sets are hashable. + But if you define a hashable set type, its __hash__ should + call this function. + + This must be compatible __eq__. + + All sets ought to compare equal if they contain the same + elements, regardless of how they are implemented, and + regardless of the order of the elements; so there's not much + freedom for __eq__ or __hash__. We match the algorithm used + by the built-in frozenset type. + """ + MAX = sys.maxsize + MASK = 2 * MAX + 1 + n = len(self) + h = 1927868237 * (n + 1) + h &= MASK + for x in self: + hx = hash(x) + h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 + h &= MASK + h = h * 69069 + 907133923 + h &= MASK + if h > MAX: + h -= MASK + 1 + if h == -1: + h = 590923713 + return h + +Set.register(frozenset) + + +class MutableSet(Set): + """A mutable set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__, __len__, + add(), and discard(). + + To override the comparisons (presumably for speed, as the + semantics are fixed), all you have to do is redefine __le__ and + then the other operations will automatically follow suit. + """ + + __slots__ = () + + @abstractmethod + def add(self, value): + """Add an element.""" + raise NotImplementedError + + @abstractmethod + def discard(self, value): + """Remove an element. Do not raise an exception if absent.""" + raise NotImplementedError + + def remove(self, value): + """Remove an element. If not a member, raise a KeyError.""" + if value not in self: + raise KeyError(value) + self.discard(value) + + def pop(self): + """Return the popped value. Raise KeyError if empty.""" + it = iter(self) + try: + value = next(it) + except StopIteration: + raise KeyError + self.discard(value) + return value + + def clear(self): + """This is slow (creates N new iterators!) but effective.""" + try: + while True: + self.pop() + except KeyError: + pass + + def __ior__(self, it): + for value in it: + self.add(value) + return self + + def __iand__(self, it): + for value in (self - it): + self.discard(value) + return self + + def __ixor__(self, it): + if it is self: + self.clear() + else: + if not isinstance(it, Set): + it = self._from_iterable(it) + for value in it: + if value in self: + self.discard(value) + else: + self.add(value) + return self + + def __isub__(self, it): + if it is self: + self.clear() + else: + for value in it: + self.discard(value) + return self + +MutableSet.register(set) + + +### MAPPINGS ### + + +class Mapping(Sized, Iterable, Container): + + __slots__ = () + + """A Mapping is a generic container for associating key/value + pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __iter__, and __len__. + + """ + + @abstractmethod + def __getitem__(self, key): + raise KeyError + + def get(self, key, default=None): + 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' + try: + return self[key] + except KeyError: + return default + + def __contains__(self, key): + try: + self[key] + except KeyError: + return False + else: + return True + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return KeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return ItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return ValuesView(self) + + def __eq__(self, other): + if not isinstance(other, Mapping): + return NotImplemented + return dict(self.items()) == dict(other.items()) + +Mapping.register(mappingproxy) + + +class MappingView(Sized): + + def __init__(self, mapping): + self._mapping = mapping + + def __len__(self): + return len(self._mapping) + + def __repr__(self): + return '{0.__class__.__name__}({0._mapping!r})'.format(self) + + +class KeysView(MappingView, Set): + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, key): + return key in self._mapping + + def __iter__(self): + yield from self._mapping + +KeysView.register(dict_keys) + + +class ItemsView(MappingView, Set): + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, item): + key, value = item + try: + v = self._mapping[key] + except KeyError: + return False + else: + return v == value + + def __iter__(self): + for key in self._mapping: + yield (key, self._mapping[key]) + +ItemsView.register(dict_items) + + +class ValuesView(MappingView): + + def __contains__(self, value): + for key in self._mapping: + if value == self._mapping[key]: + return True + return False + + def __iter__(self): + for key in self._mapping: + yield self._mapping[key] + +ValuesView.register(dict_values) + + +class MutableMapping(Mapping): + + __slots__ = () + + """A MutableMapping is a generic container for associating + key/value pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __setitem__, __delitem__, + __iter__, and __len__. + + """ + + @abstractmethod + def __setitem__(self, key, value): + raise KeyError + + @abstractmethod + def __delitem__(self, key): + raise KeyError + + __marker = object() + + def pop(self, key, default=__marker): + '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + ''' + try: + value = self[key] + except KeyError: + if default is self.__marker: + raise + return default + else: + del self[key] + return value + + def popitem(self): + '''D.popitem() -> (k, v), remove and return some (key, value) pair + as a 2-tuple; but raise KeyError if D is empty. + ''' + try: + key = next(iter(self)) + except StopIteration: + raise KeyError + value = self[key] + del self[key] + return key, value + + def clear(self): + 'D.clear() -> None. Remove all items from D.' + try: + while True: + self.popitem() + except KeyError: + pass + + def update(*args, **kwds): + ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. + If E present and has a .keys() method, does: for k in E: D[k] = E[k] + If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v + In either case, this is followed by: for k, v in F.items(): D[k] = v + ''' + if not args: + raise TypeError("descriptor 'update' of 'MutableMapping' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('update expected at most 1 arguments, got %d' % + len(args)) + if args: + other = args[0] + if isinstance(other, Mapping): + for key in other: + self[key] = other[key] + elif hasattr(other, "keys"): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + def setdefault(self, key, default=None): + 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' + try: + return self[key] + except KeyError: + self[key] = default + return default + +MutableMapping.register(dict) + + +### SEQUENCES ### + + +class Sequence(Sized, Iterable, Container): + + """All the operations on a read-only sequence. + + Concrete subclasses must override __new__ or __init__, + __getitem__, and __len__. + """ + + __slots__ = () + + @abstractmethod + def __getitem__(self, index): + raise IndexError + + def __iter__(self): + i = 0 + try: + while True: + v = self[i] + yield v + i += 1 + except IndexError: + return + + def __contains__(self, value): + for v in self: + if v == value: + return True + return False + + def __reversed__(self): + for i in reversed(range(len(self))): + yield self[i] + + def index(self, value): + '''S.index(value) -> integer -- return first index of value. + Raises ValueError if the value is not present. + ''' + for i, v in enumerate(self): + if v == value: + return i + raise ValueError + + def count(self, value): + 'S.count(value) -> integer -- return number of occurrences of value' + return sum(1 for v in self if v == value) + +Sequence.register(tuple) +Sequence.register(str) +Sequence.register(range) +Sequence.register(memoryview) + + +class ByteString(Sequence): + + """This unifies bytes and bytearray. + + XXX Should add all their methods. + """ + + __slots__ = () + +ByteString.register(bytes) +ByteString.register(bytearray) + + +class MutableSequence(Sequence): + + __slots__ = () + + """All the operations on a read-write sequence. + + Concrete subclasses must provide __new__ or __init__, + __getitem__, __setitem__, __delitem__, __len__, and insert(). + + """ + + @abstractmethod + def __setitem__(self, index, value): + raise IndexError + + @abstractmethod + def __delitem__(self, index): + raise IndexError + + @abstractmethod + def insert(self, index, value): + 'S.insert(index, value) -- insert value before index' + raise IndexError + + def append(self, value): + 'S.append(value) -- append value to the end of the sequence' + self.insert(len(self), value) + + def clear(self): + 'S.clear() -> None -- remove all items from S' + try: + while True: + self.pop() + except IndexError: + pass + + def reverse(self): + 'S.reverse() -- reverse *IN PLACE*' + n = len(self) + for i in range(n//2): + self[i], self[n-i-1] = self[n-i-1], self[i] + + def extend(self, values): + 'S.extend(iterable) -- extend sequence by appending elements from the iterable' + for v in values: + self.append(v) + + def pop(self, index=-1): + '''S.pop([index]) -> item -- remove and return item at index (default last). + Raise IndexError if list is empty or index is out of range. + ''' + v = self[index] + del self[index] + return v + + def remove(self, value): + '''S.remove(value) -- remove first occurrence of value. + Raise ValueError if the value is not present. + ''' + del self[self.index(value)] + + def __iadd__(self, values): + self.extend(values) + return self + +MutableSequence.register(list) +MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.4.6/abc.py b/typing_extensions/test_data/python-3.4.6/abc.py new file mode 100644 index 00000000..0358a469 --- /dev/null +++ b/typing_extensions/test_data/python-3.4.6/abc.py @@ -0,0 +1,248 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) according to PEP 3119.""" + +from _weakrefset import WeakSet + + +def abstractmethod(funcobj): + """A decorator indicating abstract methods. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract methods are overridden. + The abstract methods can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractmethod + def my_abstract_method(self, ...): + ... + """ + funcobj.__isabstractmethod__ = True + return funcobj + + +class abstractclassmethod(classmethod): + """ + A decorator indicating abstract classmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractclassmethod + def my_abstract_classmethod(cls, ...): + ... + + 'abstractclassmethod' is deprecated. Use 'classmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractstaticmethod(staticmethod): + """ + A decorator indicating abstract staticmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractstaticmethod + def my_abstract_staticmethod(...): + ... + + 'abstractstaticmethod' is deprecated. Use 'staticmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractproperty(property): + """ + A decorator indicating abstract properties. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract properties are overridden. + The abstract properties can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractproperty + def my_abstract_property(self): + ... + + This defines a read-only property; you can also define a read-write + abstract property using the 'long' form of property declaration: + + class C(metaclass=ABCMeta): + def getx(self): ... + def setx(self, value): ... + x = abstractproperty(getx, setx) + + 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' + instead. + """ + + __isabstractmethod__ = True + + +class ABCMeta(type): + + """Metaclass for defining Abstract Base Classes (ABCs). + + Use this metaclass to create an ABC. An ABC can be subclassed + directly, and then acts as a mix-in class. You can also register + unrelated concrete classes (even built-in classes) and unrelated + ABCs as 'virtual subclasses' -- these and their descendants will + be considered subclasses of the registering ABC by the built-in + issubclass() function, but the registering ABC won't show up in + their MRO (Method Resolution Order) nor will method + implementations defined by the registering ABC be callable (not + even via super()). + + """ + + # A global counter that is incremented each time a class is + # registered as a virtual subclass of anything. It forces the + # negative cache to be cleared before its next use. + # Note: this counter is private. Use `abc.get_cache_token()` for + # external code. + _abc_invalidation_counter = 0 + + def __new__(mcls, name, bases, namespace): + cls = super().__new__(mcls, name, bases, namespace) + # Compute set of abstract method names + abstracts = {name + for name, value in namespace.items() + if getattr(value, "__isabstractmethod__", False)} + for base in bases: + for name in getattr(base, "__abstractmethods__", set()): + value = getattr(cls, name, None) + if getattr(value, "__isabstractmethod__", False): + abstracts.add(name) + cls.__abstractmethods__ = frozenset(abstracts) + # Set up inheritance registry + cls._abc_registry = WeakSet() + cls._abc_cache = WeakSet() + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + return cls + + def register(cls, subclass): + """Register a virtual subclass of an ABC. + + Returns the subclass, to allow usage as a class decorator. + """ + if not isinstance(subclass, type): + raise TypeError("Can only register classes") + if issubclass(subclass, cls): + return subclass # Already a subclass + # Subtle: test for cycles *after* testing for "already a subclass"; + # this means we allow X.register(X) and interpret it as a no-op. + if issubclass(cls, subclass): + # This would create a cycle, which is bad for the algorithm below + raise RuntimeError("Refusing to create an inheritance cycle") + cls._abc_registry.add(subclass) + ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache + return subclass + + def _dump_registry(cls, file=None): + """Debug helper to print the ABC registry.""" + print("Class: %s.%s" % (cls.__module__, cls.__name__), file=file) + print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) + for name in sorted(cls.__dict__.keys()): + if name.startswith("_abc_"): + value = getattr(cls, name) + print("%s: %r" % (name, value), file=file) + + def __instancecheck__(cls, instance): + """Override for isinstance(instance, cls).""" + # Inline the cache checking + subclass = instance.__class__ + if subclass in cls._abc_cache: + return True + subtype = type(instance) + if subtype is subclass: + if (cls._abc_negative_cache_version == + ABCMeta._abc_invalidation_counter and + subclass in cls._abc_negative_cache): + return False + # Fall back to the subclass check. + return cls.__subclasscheck__(subclass) + return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) + + def __subclasscheck__(cls, subclass): + """Override for issubclass(subclass, cls).""" + # Check cache + if subclass in cls._abc_cache: + return True + # Check negative cache; may have to invalidate + if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: + # Invalidate the negative cache + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + elif subclass in cls._abc_negative_cache: + return False + # Check the subclass hook + ok = cls.__subclasshook__(subclass) + if ok is not NotImplemented: + assert isinstance(ok, bool) + if ok: + cls._abc_cache.add(subclass) + else: + cls._abc_negative_cache.add(subclass) + return ok + # Check if it's a direct subclass + if cls in getattr(subclass, '__mro__', ()): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a registered class (recursive) + for rcls in cls._abc_registry: + if issubclass(subclass, rcls): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a subclass (recursive) + for scls in cls.__subclasses__(): + if issubclass(subclass, scls): + cls._abc_cache.add(subclass) + return True + # No dice; update negative cache + cls._abc_negative_cache.add(subclass) + return False + + +class ABC(metaclass=ABCMeta): + """Helper class that provides a standard way to create an ABC using + inheritance. + """ + pass + + +def get_cache_token(): + """Returns the current ABC cache token. + + The token is an opaque object (supporting equality testing) identifying the + current version of the ABC cache for virtual subclasses. The token changes + with every call to ``register()`` on any ABC. + """ + return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.4.6/collections/__init__.py b/typing_extensions/test_data/python-3.4.6/collections/__init__.py new file mode 100644 index 00000000..f94210ef --- /dev/null +++ b/typing_extensions/test_data/python-3.4.6/collections/__init__.py @@ -0,0 +1,1161 @@ +__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', + 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] + +# For backwards compatibility, continue to make the collections ABCs +# available through the collections module. +from _collections_abc import * +import _collections_abc +__all__ += _collections_abc.__all__ + +from _collections import deque, defaultdict +from operator import itemgetter as _itemgetter, eq as _eq +from keyword import iskeyword as _iskeyword +import sys as _sys +import heapq as _heapq +from _weakref import proxy as _proxy +from itertools import repeat as _repeat, chain as _chain, starmap as _starmap +from reprlib import recursive_repr as _recursive_repr + +################################################################################ +### OrderedDict +################################################################################ + +class _Link(object): + __slots__ = 'prev', 'next', 'key', '__weakref__' + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as regular dictionaries. + + # The internal self.__map dict maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # The sentinel is in self.__hardroot with a weakref proxy in self.__root. + # The prev links are weakref proxies (to prevent circular references). + # Individual links are kept alive by the hard reference in self.__map. + # Those hard references disappear when a key is deleted from an OrderedDict. + + def __init__(*args, **kwds): + '''Initialize an ordered dictionary. The signature is the same as + regular dictionaries, but keyword arguments are not recommended because + their insertion order is arbitrary. + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'OrderedDict' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__hardroot = _Link() + self.__root = root = _proxy(self.__hardroot) + root.prev = root.next = root + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, + dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link at the end of the linked list, + # and the inherited dictionary is updated with the new key/value pair. + if key not in self: + self.__map[key] = link = Link() + root = self.__root + last = root.prev + link.prev, link.next, link.key = last, root, key + last.next = link + root.prev = proxy(link) + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which gets + # removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link = self.__map.pop(key) + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + # Traverse the linked list in order. + root = self.__root + curr = root.next + while curr is not root: + yield curr.key + curr = curr.next + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + # Traverse the linked list in reverse order. + root = self.__root + curr = root.prev + while curr is not root: + yield curr.key + curr = curr.prev + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + root = self.__root + root.prev = root.next = root + self.__map.clear() + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root.prev + link_prev = link.prev + link_prev.next = root + root.prev = link_prev + else: + link = root.next + link_next = link.next + root.next = link_next + link_next.prev = root + key = link.key + del self.__map[key] + value = dict.pop(self, key) + return key, value + + def move_to_end(self, key, last=True): + '''Move an existing element to the end (or beginning if last==False). + + Raises KeyError if the element does not exist. + When last=True, acts like a fast version of self[key]=self.pop(key). + + ''' + link = self.__map[key] + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + root = self.__root + if last: + last = root.prev + link.prev = last + link.next = root + last.next = root.prev = link + else: + first = root.next + link.prev = root + link.next = first + root.next = first.prev = link + + def __sizeof__(self): + sizeof = _sys.getsizeof + n = len(self) + 1 # number of links including root + size = sizeof(self.__dict__) # instance dictionary + size += sizeof(self.__map) * 2 # internal dict and inherited dict + size += sizeof(self.__hardroot) * n # link objects + size += sizeof(self.__root) * n # proxy objects + return size + + update = __update = MutableMapping.update + keys = MutableMapping.keys + values = MutableMapping.values + items = MutableMapping.items + __ne__ = MutableMapping.__ne__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding + value. If key is not found, d is returned if given, otherwise KeyError + is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + @_recursive_repr() + def __repr__(self): + 'od.__repr__() <==> repr(od)' + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self.items())) + + def __reduce__(self): + 'Return state information for pickling' + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + return self.__class__, (), inst_dict or None, None, iter(self.items()) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. + If not specified, the value defaults to None. + + ''' + self = cls() + for key in iterable: + self[key] = value + return self + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return dict.__eq__(self, other) and all(map(_eq, self, other)) + return dict.__eq__(self, other) + + +################################################################################ +### namedtuple +################################################################################ + +_class_template = """\ +from builtins import property as _property, tuple as _tuple +from operator import itemgetter as _itemgetter +from collections import OrderedDict + +class {typename}(tuple): + '{typename}({arg_list})' + + __slots__ = () + + _fields = {field_names!r} + + def __new__(_cls, {arg_list}): + 'Create new instance of {typename}({arg_list})' + return _tuple.__new__(_cls, ({arg_list})) + + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + 'Make a new {typename} object from a sequence or iterable' + result = new(cls, iterable) + if len(result) != {num_fields:d}: + raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) + return result + + def _replace(_self, **kwds): + 'Return a new {typename} object replacing specified fields with new values' + result = _self._make(map(kwds.pop, {field_names!r}, _self)) + if kwds: + raise ValueError('Got unexpected field names: %r' % list(kwds)) + return result + + def __repr__(self): + 'Return a nicely formatted representation string' + return self.__class__.__name__ + '({repr_fmt})' % self + + def _asdict(self): + 'Return a new OrderedDict which maps field names to their values.' + return OrderedDict(zip(self._fields, self)) + + def __getnewargs__(self): + 'Return self as a plain tuple. Used by copy and pickle.' + return tuple(self) + +{field_defs} +""" + +_repr_template = '{name}=%r' + +_field_template = '''\ + {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') +''' + +def namedtuple(typename, field_names, verbose=False, rename=False): + """Returns a new subclass of tuple with named fields. + + >>> Point = namedtuple('Point', ['x', 'y']) + >>> Point.__doc__ # docstring for the new class + 'Point(x, y)' + >>> p = Point(11, y=22) # instantiate with positional args or keywords + >>> p[0] + p[1] # indexable like a plain tuple + 33 + >>> x, y = p # unpack like a regular tuple + >>> x, y + (11, 22) + >>> p.x + p.y # fields also accessable by name + 33 + >>> d = p._asdict() # convert to a dictionary + >>> d['x'] + 11 + >>> Point(**d) # convert from a dictionary + Point(x=11, y=22) + >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields + Point(x=100, y=22) + + """ + + # Validate the field names. At the user's option, either generate an error + # message or automatically replace the field name with a valid name. + if isinstance(field_names, str): + field_names = field_names.replace(',', ' ').split() + field_names = list(map(str, field_names)) + typename = str(typename) + if rename: + seen = set() + for index, name in enumerate(field_names): + if (not name.isidentifier() + or _iskeyword(name) + or name.startswith('_') + or name in seen): + field_names[index] = '_%d' % index + seen.add(name) + for name in [typename] + field_names: + if type(name) != str: + raise TypeError('Type names and field names must be strings') + if not name.isidentifier(): + raise ValueError('Type names and field names must be valid ' + 'identifiers: %r' % name) + if _iskeyword(name): + raise ValueError('Type names and field names cannot be a ' + 'keyword: %r' % name) + seen = set() + for name in field_names: + if name.startswith('_') and not rename: + raise ValueError('Field names cannot start with an underscore: ' + '%r' % name) + if name in seen: + raise ValueError('Encountered duplicate field name: %r' % name) + seen.add(name) + + # Fill-in the class template + class_definition = _class_template.format( + typename = typename, + field_names = tuple(field_names), + num_fields = len(field_names), + arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], + repr_fmt = ', '.join(_repr_template.format(name=name) + for name in field_names), + field_defs = '\n'.join(_field_template.format(index=index, name=name) + for index, name in enumerate(field_names)) + ) + + # Execute the template string in a temporary namespace and support + # tracing utilities by setting a value for frame.f_globals['__name__'] + namespace = dict(__name__='namedtuple_%s' % typename) + exec(class_definition, namespace) + result = namespace[typename] + result._source = class_definition + if verbose: + print(result._source) + + # For pickling to work, the __module__ variable needs to be set to the frame + # where the named tuple is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + + return result + + +######################################################################## +### Counter +######################################################################## + +def _count_elements(mapping, iterable): + 'Tally elements from the iterable.' + mapping_get = mapping.get + for elem in iterable: + mapping[elem] = mapping_get(elem, 0) + 1 + +try: # Load C helper function if available + from _collections import _count_elements +except ImportError: + pass + +class Counter(dict): + '''Dict subclass for counting hashable items. Sometimes called a bag + or multiset. Elements are stored as dictionary keys and their counts + are stored as dictionary values. + + >>> c = Counter('abcdeabcdabcaba') # count elements from a string + + >>> c.most_common(3) # three most common elements + [('a', 5), ('b', 4), ('c', 3)] + >>> sorted(c) # list all unique elements + ['a', 'b', 'c', 'd', 'e'] + >>> ''.join(sorted(c.elements())) # list elements with repetitions + 'aaaaabbbbcccdde' + >>> sum(c.values()) # total of all counts + 15 + + >>> c['a'] # count of letter 'a' + 5 + >>> for elem in 'shazam': # update counts from an iterable + ... c[elem] += 1 # by adding 1 to each element's count + >>> c['a'] # now there are seven 'a' + 7 + >>> del c['b'] # remove all 'b' + >>> c['b'] # now there are zero 'b' + 0 + + >>> d = Counter('simsalabim') # make another counter + >>> c.update(d) # add in the second counter + >>> c['a'] # now there are nine 'a' + 9 + + >>> c.clear() # empty the counter + >>> c + Counter() + + Note: If a count is set to zero or reduced to zero, it will remain + in the counter until the entry is deleted or the counter is cleared: + + >>> c = Counter('aaabbc') + >>> c['b'] -= 2 # reduce the count of 'b' by two + >>> c.most_common() # 'b' is still in, but its count is zero + [('a', 3), ('c', 1), ('b', 0)] + + ''' + # References: + # http://en.wikipedia.org/wiki/Multiset + # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html + # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm + # http://code.activestate.com/recipes/259174/ + # Knuth, TAOCP Vol. II section 4.6.3 + + def __init__(*args, **kwds): + '''Create a new, empty Counter object. And if given, count elements + from an input iterable. Or, initialize the count from another mapping + of elements to their counts. + + >>> c = Counter() # a new, empty counter + >>> c = Counter('gallahad') # a new counter from an iterable + >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping + >>> c = Counter(a=4, b=2) # a new counter from keyword args + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + super(Counter, self).__init__() + self.update(*args, **kwds) + + def __missing__(self, key): + 'The count of elements not in the Counter is zero.' + # Needed so that self[missing_item] does not raise KeyError + return 0 + + def most_common(self, n=None): + '''List the n most common elements and their counts from the most + common to the least. If n is None, then list all element counts. + + >>> Counter('abcdeabcdabcaba').most_common(3) + [('a', 5), ('b', 4), ('c', 3)] + + ''' + # Emulate Bag.sortedByCount from Smalltalk + if n is None: + return sorted(self.items(), key=_itemgetter(1), reverse=True) + return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) + + def elements(self): + '''Iterator over elements repeating each as many times as its count. + + >>> c = Counter('ABCABC') + >>> sorted(c.elements()) + ['A', 'A', 'B', 'B', 'C', 'C'] + + # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) + >>> product = 1 + >>> for factor in prime_factors.elements(): # loop over factors + ... product *= factor # and multiply them + >>> product + 1836 + + Note, if an element's count has been set to zero or is a negative + number, elements() will ignore it. + + ''' + # Emulate Bag.do from Smalltalk and Multiset.begin from C++. + return _chain.from_iterable(_starmap(_repeat, self.items())) + + # Override dict methods where necessary + + @classmethod + def fromkeys(cls, iterable, v=None): + # There is no equivalent method for counters because setting v=1 + # means that no element can have a count greater than one. + raise NotImplementedError( + 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + + def update(*args, **kwds): + '''Like dict.update() but add counts instead of replacing them. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.update('witch') # add elements from another iterable + >>> d = Counter('watch') + >>> c.update(d) # add elements from another counter + >>> c['h'] # four 'h' in which, witch, and watch + 4 + + ''' + # The regular dict.update() operation makes no sense here because the + # replace behavior results in the some of original untouched counts + # being mixed-in with all of the other counts for a mismash that + # doesn't have a straight-forward interpretation in most counting + # contexts. Instead, we implement straight-addition. Both the inputs + # and outputs are allowed to contain zero and negative counts. + + if not args: + raise TypeError("descriptor 'update' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + if isinstance(iterable, Mapping): + if self: + self_get = self.get + for elem, count in iterable.items(): + self[elem] = count + self_get(elem, 0) + else: + super(Counter, self).update(iterable) # fast path when counter is empty + else: + _count_elements(self, iterable) + if kwds: + self.update(kwds) + + def subtract(*args, **kwds): + '''Like dict.update() but subtracts counts instead of replacing them. + Counts can be reduced below zero. Both the inputs and outputs are + allowed to contain zero and negative counts. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.subtract('witch') # subtract elements from another iterable + >>> c.subtract(Counter('watch')) # subtract elements from another counter + >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch + 0 + >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch + -1 + + ''' + if not args: + raise TypeError("descriptor 'subtract' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + self_get = self.get + if isinstance(iterable, Mapping): + for elem, count in iterable.items(): + self[elem] = self_get(elem, 0) - count + else: + for elem in iterable: + self[elem] = self_get(elem, 0) - 1 + if kwds: + self.subtract(kwds) + + def copy(self): + 'Return a shallow copy.' + return self.__class__(self) + + def __reduce__(self): + return self.__class__, (dict(self),) + + def __delitem__(self, elem): + 'Like dict.__delitem__() but does not raise KeyError for missing values.' + if elem in self: + super().__delitem__(elem) + + def __repr__(self): + if not self: + return '%s()' % self.__class__.__name__ + try: + items = ', '.join(map('%r: %r'.__mod__, self.most_common())) + return '%s({%s})' % (self.__class__.__name__, items) + except TypeError: + # handle case where values are not orderable + return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) + + # Multiset-style mathematical operations discussed in: + # Knuth TAOCP Volume II section 4.6.3 exercise 19 + # and at http://en.wikipedia.org/wiki/Multiset + # + # Outputs guaranteed to only include positive counts. + # + # To strip negative and zero counts, add-in an empty counter: + # c += Counter() + + def __add__(self, other): + '''Add counts from two counters. + + >>> Counter('abbb') + Counter('bcc') + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count + other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __sub__(self, other): + ''' Subtract count, but keep only results with positive counts. + + >>> Counter('abbbc') - Counter('bccd') + Counter({'b': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count - other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count < 0: + result[elem] = 0 - count + return result + + def __or__(self, other): + '''Union is the maximum of value in either of the input counters. + + >>> Counter('abbb') | Counter('bcc') + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = other_count if count < other_count else count + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __and__(self, other): + ''' Intersection is the minimum of corresponding counts. + + >>> Counter('abbb') & Counter('bcc') + Counter({'b': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = count if count < other_count else other_count + if newcount > 0: + result[elem] = newcount + return result + + def __pos__(self): + 'Adds an empty counter, effectively stripping negative and zero counts' + return self + Counter() + + def __neg__(self): + '''Subtracts from an empty counter. Strips positive and zero counts, + and flips the sign on negative counts. + + ''' + return Counter() - self + + def _keep_positive(self): + '''Internal method to strip elements with a negative or zero count''' + nonpositive = [elem for elem, count in self.items() if not count > 0] + for elem in nonpositive: + del self[elem] + return self + + def __iadd__(self, other): + '''Inplace add from another counter, keeping only positive counts. + + >>> c = Counter('abbb') + >>> c += Counter('bcc') + >>> c + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] += count + return self._keep_positive() + + def __isub__(self, other): + '''Inplace subtract counter, but keep only results with positive counts. + + >>> c = Counter('abbbc') + >>> c -= Counter('bccd') + >>> c + Counter({'b': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] -= count + return self._keep_positive() + + def __ior__(self, other): + '''Inplace union is the maximum of value from either counter. + + >>> c = Counter('abbb') + >>> c |= Counter('bcc') + >>> c + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + for elem, other_count in other.items(): + count = self[elem] + if other_count > count: + self[elem] = other_count + return self._keep_positive() + + def __iand__(self, other): + '''Inplace intersection is the minimum of corresponding counts. + + >>> c = Counter('abbb') + >>> c &= Counter('bcc') + >>> c + Counter({'b': 1}) + + ''' + for elem, count in self.items(): + other_count = other[elem] + if other_count < count: + self[elem] = other_count + return self._keep_positive() + + +######################################################################## +### ChainMap (helper for configparser and string.Template) +######################################################################## + +class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self, m=None): # like Django's Context.push() + '''New ChainMap with a new map followed by all previous maps. + If no map is provided, an empty dict is used. + ''' + if m is None: + m = {} + return self.__class__(m, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +################################################################################ +### UserDict +################################################################################ + +class UserDict(MutableMapping): + + # Start by filling-out the abstract methods + def __init__(*args, **kwargs): + if not args: + raise TypeError("descriptor '__init__' of 'UserDict' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + if args: + dict = args[0] + elif 'dict' in kwargs: + dict = kwargs.pop('dict') + import warnings + warnings.warn("Passing 'dict' as keyword argument is deprecated", + PendingDeprecationWarning, stacklevel=2) + else: + dict = None + self.data = {} + if dict is not None: + self.update(dict) + if len(kwargs): + self.update(kwargs) + def __len__(self): return len(self.data) + def __getitem__(self, key): + if key in self.data: + return self.data[key] + if hasattr(self.__class__, "__missing__"): + return self.__class__.__missing__(self, key) + raise KeyError(key) + def __setitem__(self, key, item): self.data[key] = item + def __delitem__(self, key): del self.data[key] + def __iter__(self): + return iter(self.data) + + # Modify __contains__ to work correctly when __missing__ is present + def __contains__(self, key): + return key in self.data + + # Now, add the methods in dicts but not in MutableMapping + def __repr__(self): return repr(self.data) + def copy(self): + if self.__class__ is UserDict: + return UserDict(self.data.copy()) + import copy + data = self.data + try: + self.data = {} + c = copy.copy(self) + finally: + self.data = data + c.update(self) + return c + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + + +################################################################################ +### UserList +################################################################################ + +class UserList(MutableSequence): + """A more or less complete user-defined wrapper around list objects.""" + def __init__(self, initlist=None): + self.data = [] + if initlist is not None: + # XXX should this accept an arbitrary sequence? + if type(initlist) == type(self.data): + self.data[:] = initlist + elif isinstance(initlist, UserList): + self.data[:] = initlist.data[:] + else: + self.data = list(initlist) + def __repr__(self): return repr(self.data) + def __lt__(self, other): return self.data < self.__cast(other) + def __le__(self, other): return self.data <= self.__cast(other) + def __eq__(self, other): return self.data == self.__cast(other) + def __ne__(self, other): return self.data != self.__cast(other) + def __gt__(self, other): return self.data > self.__cast(other) + def __ge__(self, other): return self.data >= self.__cast(other) + def __cast(self, other): + return other.data if isinstance(other, UserList) else other + def __contains__(self, item): return item in self.data + def __len__(self): return len(self.data) + def __getitem__(self, i): return self.data[i] + def __setitem__(self, i, item): self.data[i] = item + def __delitem__(self, i): del self.data[i] + def __add__(self, other): + if isinstance(other, UserList): + return self.__class__(self.data + other.data) + elif isinstance(other, type(self.data)): + return self.__class__(self.data + other) + return self.__class__(self.data + list(other)) + def __radd__(self, other): + if isinstance(other, UserList): + return self.__class__(other.data + self.data) + elif isinstance(other, type(self.data)): + return self.__class__(other + self.data) + return self.__class__(list(other) + self.data) + def __iadd__(self, other): + if isinstance(other, UserList): + self.data += other.data + elif isinstance(other, type(self.data)): + self.data += other + else: + self.data += list(other) + return self + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __imul__(self, n): + self.data *= n + return self + def append(self, item): self.data.append(item) + def insert(self, i, item): self.data.insert(i, item) + def pop(self, i=-1): return self.data.pop(i) + def remove(self, item): self.data.remove(item) + def clear(self): self.data.clear() + def copy(self): return self.__class__(self) + def count(self, item): return self.data.count(item) + def index(self, item, *args): return self.data.index(item, *args) + def reverse(self): self.data.reverse() + def sort(self, *args, **kwds): self.data.sort(*args, **kwds) + def extend(self, other): + if isinstance(other, UserList): + self.data.extend(other.data) + else: + self.data.extend(other) + + + +################################################################################ +### UserString +################################################################################ + +class UserString(Sequence): + def __init__(self, seq): + if isinstance(seq, str): + self.data = seq + elif isinstance(seq, UserString): + self.data = seq.data[:] + else: + self.data = str(seq) + def __str__(self): return str(self.data) + def __repr__(self): return repr(self.data) + def __int__(self): return int(self.data) + def __float__(self): return float(self.data) + def __complex__(self): return complex(self.data) + def __hash__(self): return hash(self.data) + + def __eq__(self, string): + if isinstance(string, UserString): + return self.data == string.data + return self.data == string + def __ne__(self, string): + if isinstance(string, UserString): + return self.data != string.data + return self.data != string + def __lt__(self, string): + if isinstance(string, UserString): + return self.data < string.data + return self.data < string + def __le__(self, string): + if isinstance(string, UserString): + return self.data <= string.data + return self.data <= string + def __gt__(self, string): + if isinstance(string, UserString): + return self.data > string.data + return self.data > string + def __ge__(self, string): + if isinstance(string, UserString): + return self.data >= string.data + return self.data >= string + + def __contains__(self, char): + if isinstance(char, UserString): + char = char.data + return char in self.data + + def __len__(self): return len(self.data) + def __getitem__(self, index): return self.__class__(self.data[index]) + def __add__(self, other): + if isinstance(other, UserString): + return self.__class__(self.data + other.data) + elif isinstance(other, str): + return self.__class__(self.data + other) + return self.__class__(self.data + str(other)) + def __radd__(self, other): + if isinstance(other, str): + return self.__class__(other + self.data) + return self.__class__(str(other) + self.data) + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __mod__(self, args): + return self.__class__(self.data % args) + + # the following methods are defined in alphabetical order: + def capitalize(self): return self.__class__(self.data.capitalize()) + def center(self, width, *args): + return self.__class__(self.data.center(width, *args)) + def count(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.count(sub, start, end) + def encode(self, encoding=None, errors=None): # XXX improve this? + if encoding: + if errors: + return self.__class__(self.data.encode(encoding, errors)) + return self.__class__(self.data.encode(encoding)) + return self.__class__(self.data.encode()) + def endswith(self, suffix, start=0, end=_sys.maxsize): + return self.data.endswith(suffix, start, end) + def expandtabs(self, tabsize=8): + return self.__class__(self.data.expandtabs(tabsize)) + def find(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.find(sub, start, end) + def format(self, *args, **kwds): + return self.data.format(*args, **kwds) + def index(self, sub, start=0, end=_sys.maxsize): + return self.data.index(sub, start, end) + def isalpha(self): return self.data.isalpha() + def isalnum(self): return self.data.isalnum() + def isdecimal(self): return self.data.isdecimal() + def isdigit(self): return self.data.isdigit() + def isidentifier(self): return self.data.isidentifier() + def islower(self): return self.data.islower() + def isnumeric(self): return self.data.isnumeric() + def isspace(self): return self.data.isspace() + def istitle(self): return self.data.istitle() + def isupper(self): return self.data.isupper() + def join(self, seq): return self.data.join(seq) + def ljust(self, width, *args): + return self.__class__(self.data.ljust(width, *args)) + def lower(self): return self.__class__(self.data.lower()) + def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) + def partition(self, sep): + return self.data.partition(sep) + def replace(self, old, new, maxsplit=-1): + if isinstance(old, UserString): + old = old.data + if isinstance(new, UserString): + new = new.data + return self.__class__(self.data.replace(old, new, maxsplit)) + def rfind(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.rfind(sub, start, end) + def rindex(self, sub, start=0, end=_sys.maxsize): + return self.data.rindex(sub, start, end) + def rjust(self, width, *args): + return self.__class__(self.data.rjust(width, *args)) + def rpartition(self, sep): + return self.data.rpartition(sep) + def rstrip(self, chars=None): + return self.__class__(self.data.rstrip(chars)) + def split(self, sep=None, maxsplit=-1): + return self.data.split(sep, maxsplit) + def rsplit(self, sep=None, maxsplit=-1): + return self.data.rsplit(sep, maxsplit) + def splitlines(self, keepends=False): return self.data.splitlines(keepends) + def startswith(self, prefix, start=0, end=_sys.maxsize): + return self.data.startswith(prefix, start, end) + def strip(self, chars=None): return self.__class__(self.data.strip(chars)) + def swapcase(self): return self.__class__(self.data.swapcase()) + def title(self): return self.__class__(self.data.title()) + def translate(self, *args): + return self.__class__(self.data.translate(*args)) + def upper(self): return self.__class__(self.data.upper()) + def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.4.6/collections/__main__.py b/typing_extensions/test_data/python-3.4.6/collections/__main__.py new file mode 100644 index 00000000..763e38e0 --- /dev/null +++ b/typing_extensions/test_data/python-3.4.6/collections/__main__.py @@ -0,0 +1,38 @@ +################################################################################ +### Simple tests +################################################################################ + +# verify that instances can be pickled +from collections import namedtuple +from pickle import loads, dumps +Point = namedtuple('Point', 'x, y', True) +p = Point(x=10, y=20) +assert p == loads(dumps(p)) + +# test and demonstrate ability to override methods +class Point(namedtuple('Point', 'x y')): + __slots__ = () + @property + def hypot(self): + return (self.x ** 2 + self.y ** 2) ** 0.5 + def __str__(self): + return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) + +for p in Point(3, 4), Point(14, 5/7.): + print (p) + +class Point(namedtuple('Point', 'x y')): + 'Point class with optimized _make() and _replace() without error-checking' + __slots__ = () + _make = classmethod(tuple.__new__) + def _replace(self, _map=map, **kwds): + return self._make(_map(kwds.get, ('x', 'y'), self)) + +print(Point(11, 22)._replace(x=100)) + +Point3D = namedtuple('Point3D', Point._fields + ('z',)) +print(Point3D.__doc__) + +import doctest, collections +TestResults = namedtuple('TestResults', 'failed attempted') +print(TestResults(*doctest.testmod(collections))) diff --git a/typing_extensions/test_data/python-3.4.6/collections/abc.py b/typing_extensions/test_data/python-3.4.6/collections/abc.py new file mode 100644 index 00000000..891600d1 --- /dev/null +++ b/typing_extensions/test_data/python-3.4.6/collections/abc.py @@ -0,0 +1,2 @@ +from _collections_abc import * +from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.5.0/_collections_abc.py b/typing_extensions/test_data/python-3.5.0/_collections_abc.py new file mode 100644 index 00000000..f89bb6f0 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.0/_collections_abc.py @@ -0,0 +1,939 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. + +Unit tests are in test_collections. +""" + +from abc import ABCMeta, abstractmethod +import sys + +__all__ = ["Awaitable", "Coroutine", "AsyncIterable", "AsyncIterator", + "Hashable", "Iterable", "Iterator", "Generator", + "Sized", "Container", "Callable", + "Set", "MutableSet", + "Mapping", "MutableMapping", + "MappingView", "KeysView", "ItemsView", "ValuesView", + "Sequence", "MutableSequence", + "ByteString", + ] + +# This module has been renamed from collections.abc to _collections_abc to +# speed up interpreter startup. Some of the types such as MutableMapping are +# required early but collections module imports a lot of other modules. +# See issue #19218 +__name__ = "collections.abc" + +# Private list of types that we want to register with the various ABCs +# so that they will pass tests like: +# it = iter(somebytearray) +# assert isinstance(it, Iterable) +# Note: in other implementations, these types many not be distinct +# and they make have their own implementation specific types that +# are not included on this list. +bytes_iterator = type(iter(b'')) +bytearray_iterator = type(iter(bytearray())) +#callable_iterator = ??? +dict_keyiterator = type(iter({}.keys())) +dict_valueiterator = type(iter({}.values())) +dict_itemiterator = type(iter({}.items())) +list_iterator = type(iter([])) +list_reverseiterator = type(iter(reversed([]))) +range_iterator = type(iter(range(0))) +set_iterator = type(iter(set())) +str_iterator = type(iter("")) +tuple_iterator = type(iter(())) +zip_iterator = type(iter(zip())) +## views ## +dict_keys = type({}.keys()) +dict_values = type({}.values()) +dict_items = type({}.items()) +## misc ## +mappingproxy = type(type.__dict__) +generator = type((lambda: (yield))()) +## coroutine ## +async def _coro(): pass +_coro = _coro() +coroutine = type(_coro) +_coro.close() # Prevent ResourceWarning +del _coro + + +### ONE-TRICK PONIES ### + +class Hashable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __hash__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Hashable: + for B in C.__mro__: + if "__hash__" in B.__dict__: + if B.__dict__["__hash__"]: + return True + break + return NotImplemented + + +class Awaitable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __await__(self): + yield + + @classmethod + def __subclasshook__(cls, C): + if cls is Awaitable: + for B in C.__mro__: + if "__await__" in B.__dict__: + if B.__dict__["__await__"]: + return True + break + return NotImplemented + + +class Coroutine(Awaitable): + + __slots__ = () + + @abstractmethod + def send(self, value): + """Send a value into the coroutine. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the coroutine. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside coroutine. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("coroutine ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Coroutine: + mro = C.__mro__ + for method in ('__await__', 'send', 'throw', 'close'): + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + +Coroutine.register(coroutine) + + +class AsyncIterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + async def __aiter__(self): + return AsyncIterator() + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterable: + if any("__aiter__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class AsyncIterator(AsyncIterable): + + __slots__ = () + + @abstractmethod + async def __anext__(self): + """Return the next item or raise StopAsyncIteration when exhausted.""" + raise StopAsyncIteration + + async def __aiter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterator: + if (any("__anext__" in B.__dict__ for B in C.__mro__) and + any("__aiter__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + + +class Iterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __iter__(self): + while False: + yield None + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterable: + if any("__iter__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Iterator(Iterable): + + __slots__ = () + + @abstractmethod + def __next__(self): + 'Return the next item from the iterator. When exhausted, raise StopIteration' + raise StopIteration + + def __iter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterator: + if (any("__next__" in B.__dict__ for B in C.__mro__) and + any("__iter__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + +Iterator.register(bytes_iterator) +Iterator.register(bytearray_iterator) +#Iterator.register(callable_iterator) +Iterator.register(dict_keyiterator) +Iterator.register(dict_valueiterator) +Iterator.register(dict_itemiterator) +Iterator.register(list_iterator) +Iterator.register(list_reverseiterator) +Iterator.register(range_iterator) +Iterator.register(set_iterator) +Iterator.register(str_iterator) +Iterator.register(tuple_iterator) +Iterator.register(zip_iterator) + + +class Generator(Iterator): + + __slots__ = () + + def __next__(self): + """Return the next item from the generator. + When exhausted, raise StopIteration. + """ + return self.send(None) + + @abstractmethod + def send(self, value): + """Send a value into the generator. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the generator. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside generator. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("generator ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Generator: + mro = C.__mro__ + for method in ('__iter__', '__next__', 'send', 'throw', 'close'): + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + +Generator.register(generator) + + +class Sized(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __len__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Sized: + if any("__len__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Container(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __contains__(self, x): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Container: + if any("__contains__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Callable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __call__(self, *args, **kwds): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Callable: + if any("__call__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +### SETS ### + + +class Set(Sized, Iterable, Container): + + """A set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__ and __len__. + + To override the comparisons (presumably for speed, as the + semantics are fixed), redefine __le__ and __ge__, + then the other operations will automatically follow suit. + """ + + __slots__ = () + + def __le__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) > len(other): + return False + for elem in self: + if elem not in other: + return False + return True + + def __lt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) < len(other) and self.__le__(other) + + def __gt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) > len(other) and self.__ge__(other) + + def __ge__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) < len(other): + return False + for elem in other: + if elem not in self: + return False + return True + + def __eq__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) == len(other) and self.__le__(other) + + @classmethod + def _from_iterable(cls, it): + '''Construct an instance of the class from any iterable input. + + Must override this method if the class constructor signature + does not accept an iterable for an input. + ''' + return cls(it) + + def __and__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + return self._from_iterable(value for value in other if value in self) + + __rand__ = __and__ + + def isdisjoint(self, other): + 'Return True if two sets have a null intersection.' + for value in other: + if value in self: + return False + return True + + def __or__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + chain = (e for s in (self, other) for e in s) + return self._from_iterable(chain) + + __ror__ = __or__ + + def __sub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in self + if value not in other) + + def __rsub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in other + if value not in self) + + def __xor__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return (self - other) | (other - self) + + __rxor__ = __xor__ + + def _hash(self): + """Compute the hash value of a set. + + Note that we don't define __hash__: not all sets are hashable. + But if you define a hashable set type, its __hash__ should + call this function. + + This must be compatible __eq__. + + All sets ought to compare equal if they contain the same + elements, regardless of how they are implemented, and + regardless of the order of the elements; so there's not much + freedom for __eq__ or __hash__. We match the algorithm used + by the built-in frozenset type. + """ + MAX = sys.maxsize + MASK = 2 * MAX + 1 + n = len(self) + h = 1927868237 * (n + 1) + h &= MASK + for x in self: + hx = hash(x) + h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 + h &= MASK + h = h * 69069 + 907133923 + h &= MASK + if h > MAX: + h -= MASK + 1 + if h == -1: + h = 590923713 + return h + +Set.register(frozenset) + + +class MutableSet(Set): + """A mutable set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__, __len__, + add(), and discard(). + + To override the comparisons (presumably for speed, as the + semantics are fixed), all you have to do is redefine __le__ and + then the other operations will automatically follow suit. + """ + + __slots__ = () + + @abstractmethod + def add(self, value): + """Add an element.""" + raise NotImplementedError + + @abstractmethod + def discard(self, value): + """Remove an element. Do not raise an exception if absent.""" + raise NotImplementedError + + def remove(self, value): + """Remove an element. If not a member, raise a KeyError.""" + if value not in self: + raise KeyError(value) + self.discard(value) + + def pop(self): + """Return the popped value. Raise KeyError if empty.""" + it = iter(self) + try: + value = next(it) + except StopIteration: + raise KeyError + self.discard(value) + return value + + def clear(self): + """This is slow (creates N new iterators!) but effective.""" + try: + while True: + self.pop() + except KeyError: + pass + + def __ior__(self, it): + for value in it: + self.add(value) + return self + + def __iand__(self, it): + for value in (self - it): + self.discard(value) + return self + + def __ixor__(self, it): + if it is self: + self.clear() + else: + if not isinstance(it, Set): + it = self._from_iterable(it) + for value in it: + if value in self: + self.discard(value) + else: + self.add(value) + return self + + def __isub__(self, it): + if it is self: + self.clear() + else: + for value in it: + self.discard(value) + return self + +MutableSet.register(set) + + +### MAPPINGS ### + + +class Mapping(Sized, Iterable, Container): + + __slots__ = () + + """A Mapping is a generic container for associating key/value + pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __iter__, and __len__. + + """ + + @abstractmethod + def __getitem__(self, key): + raise KeyError + + def get(self, key, default=None): + 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' + try: + return self[key] + except KeyError: + return default + + def __contains__(self, key): + try: + self[key] + except KeyError: + return False + else: + return True + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return KeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return ItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return ValuesView(self) + + def __eq__(self, other): + if not isinstance(other, Mapping): + return NotImplemented + return dict(self.items()) == dict(other.items()) + +Mapping.register(mappingproxy) + + +class MappingView(Sized): + + __slots__ = '_mapping', + + def __init__(self, mapping): + self._mapping = mapping + + def __len__(self): + return len(self._mapping) + + def __repr__(self): + return '{0.__class__.__name__}({0._mapping!r})'.format(self) + + +class KeysView(MappingView, Set): + + __slots__ = () + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, key): + return key in self._mapping + + def __iter__(self): + yield from self._mapping + +KeysView.register(dict_keys) + + +class ItemsView(MappingView, Set): + + __slots__ = () + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, item): + key, value = item + try: + v = self._mapping[key] + except KeyError: + return False + else: + return v == value + + def __iter__(self): + for key in self._mapping: + yield (key, self._mapping[key]) + +ItemsView.register(dict_items) + + +class ValuesView(MappingView): + + __slots__ = () + + def __contains__(self, value): + for key in self._mapping: + if value == self._mapping[key]: + return True + return False + + def __iter__(self): + for key in self._mapping: + yield self._mapping[key] + +ValuesView.register(dict_values) + + +class MutableMapping(Mapping): + + __slots__ = () + + """A MutableMapping is a generic container for associating + key/value pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __setitem__, __delitem__, + __iter__, and __len__. + + """ + + @abstractmethod + def __setitem__(self, key, value): + raise KeyError + + @abstractmethod + def __delitem__(self, key): + raise KeyError + + __marker = object() + + def pop(self, key, default=__marker): + '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + ''' + try: + value = self[key] + except KeyError: + if default is self.__marker: + raise + return default + else: + del self[key] + return value + + def popitem(self): + '''D.popitem() -> (k, v), remove and return some (key, value) pair + as a 2-tuple; but raise KeyError if D is empty. + ''' + try: + key = next(iter(self)) + except StopIteration: + raise KeyError + value = self[key] + del self[key] + return key, value + + def clear(self): + 'D.clear() -> None. Remove all items from D.' + try: + while True: + self.popitem() + except KeyError: + pass + + def update(*args, **kwds): + ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. + If E present and has a .keys() method, does: for k in E: D[k] = E[k] + If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v + In either case, this is followed by: for k, v in F.items(): D[k] = v + ''' + if not args: + raise TypeError("descriptor 'update' of 'MutableMapping' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('update expected at most 1 arguments, got %d' % + len(args)) + if args: + other = args[0] + if isinstance(other, Mapping): + for key in other: + self[key] = other[key] + elif hasattr(other, "keys"): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + def setdefault(self, key, default=None): + 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' + try: + return self[key] + except KeyError: + self[key] = default + return default + +MutableMapping.register(dict) + + +### SEQUENCES ### + + +class Sequence(Sized, Iterable, Container): + + """All the operations on a read-only sequence. + + Concrete subclasses must override __new__ or __init__, + __getitem__, and __len__. + """ + + __slots__ = () + + @abstractmethod + def __getitem__(self, index): + raise IndexError + + def __iter__(self): + i = 0 + try: + while True: + v = self[i] + yield v + i += 1 + except IndexError: + return + + def __contains__(self, value): + for v in self: + if v == value: + return True + return False + + def __reversed__(self): + for i in reversed(range(len(self))): + yield self[i] + + def index(self, value, start=0, stop=None): + '''S.index(value, [start, [stop]]) -> integer -- return first index of value. + Raises ValueError if the value is not present. + ''' + if start is not None and start < 0: + start = max(len(self) + start, 0) + if stop is not None and stop < 0: + stop += len(self) + + i = start + while stop is None or i < stop: + try: + if self[i] == value: + return i + except IndexError: + break + i += 1 + raise ValueError + + def count(self, value): + 'S.count(value) -> integer -- return number of occurrences of value' + return sum(1 for v in self if v == value) + +Sequence.register(tuple) +Sequence.register(str) +Sequence.register(range) +Sequence.register(memoryview) + + +class ByteString(Sequence): + + """This unifies bytes and bytearray. + + XXX Should add all their methods. + """ + + __slots__ = () + +ByteString.register(bytes) +ByteString.register(bytearray) + + +class MutableSequence(Sequence): + + __slots__ = () + + """All the operations on a read-write sequence. + + Concrete subclasses must provide __new__ or __init__, + __getitem__, __setitem__, __delitem__, __len__, and insert(). + + """ + + @abstractmethod + def __setitem__(self, index, value): + raise IndexError + + @abstractmethod + def __delitem__(self, index): + raise IndexError + + @abstractmethod + def insert(self, index, value): + 'S.insert(index, value) -- insert value before index' + raise IndexError + + def append(self, value): + 'S.append(value) -- append value to the end of the sequence' + self.insert(len(self), value) + + def clear(self): + 'S.clear() -> None -- remove all items from S' + try: + while True: + self.pop() + except IndexError: + pass + + def reverse(self): + 'S.reverse() -- reverse *IN PLACE*' + n = len(self) + for i in range(n//2): + self[i], self[n-i-1] = self[n-i-1], self[i] + + def extend(self, values): + 'S.extend(iterable) -- extend sequence by appending elements from the iterable' + for v in values: + self.append(v) + + def pop(self, index=-1): + '''S.pop([index]) -> item -- remove and return item at index (default last). + Raise IndexError if list is empty or index is out of range. + ''' + v = self[index] + del self[index] + return v + + def remove(self, value): + '''S.remove(value) -- remove first occurrence of value. + Raise ValueError if the value is not present. + ''' + del self[self.index(value)] + + def __iadd__(self, values): + self.extend(values) + return self + +MutableSequence.register(list) +MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.5.0/abc.py b/typing_extensions/test_data/python-3.5.0/abc.py new file mode 100644 index 00000000..1cbf96a6 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.0/abc.py @@ -0,0 +1,248 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) according to PEP 3119.""" + +from _weakrefset import WeakSet + + +def abstractmethod(funcobj): + """A decorator indicating abstract methods. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract methods are overridden. + The abstract methods can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractmethod + def my_abstract_method(self, ...): + ... + """ + funcobj.__isabstractmethod__ = True + return funcobj + + +class abstractclassmethod(classmethod): + """ + A decorator indicating abstract classmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractclassmethod + def my_abstract_classmethod(cls, ...): + ... + + 'abstractclassmethod' is deprecated. Use 'classmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractstaticmethod(staticmethod): + """ + A decorator indicating abstract staticmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractstaticmethod + def my_abstract_staticmethod(...): + ... + + 'abstractstaticmethod' is deprecated. Use 'staticmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractproperty(property): + """ + A decorator indicating abstract properties. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract properties are overridden. + The abstract properties can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractproperty + def my_abstract_property(self): + ... + + This defines a read-only property; you can also define a read-write + abstract property using the 'long' form of property declaration: + + class C(metaclass=ABCMeta): + def getx(self): ... + def setx(self, value): ... + x = abstractproperty(getx, setx) + + 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' + instead. + """ + + __isabstractmethod__ = True + + +class ABCMeta(type): + + """Metaclass for defining Abstract Base Classes (ABCs). + + Use this metaclass to create an ABC. An ABC can be subclassed + directly, and then acts as a mix-in class. You can also register + unrelated concrete classes (even built-in classes) and unrelated + ABCs as 'virtual subclasses' -- these and their descendants will + be considered subclasses of the registering ABC by the built-in + issubclass() function, but the registering ABC won't show up in + their MRO (Method Resolution Order) nor will method + implementations defined by the registering ABC be callable (not + even via super()). + + """ + + # A global counter that is incremented each time a class is + # registered as a virtual subclass of anything. It forces the + # negative cache to be cleared before its next use. + # Note: this counter is private. Use `abc.get_cache_token()` for + # external code. + _abc_invalidation_counter = 0 + + def __new__(mcls, name, bases, namespace): + cls = super().__new__(mcls, name, bases, namespace) + # Compute set of abstract method names + abstracts = {name + for name, value in namespace.items() + if getattr(value, "__isabstractmethod__", False)} + for base in bases: + for name in getattr(base, "__abstractmethods__", set()): + value = getattr(cls, name, None) + if getattr(value, "__isabstractmethod__", False): + abstracts.add(name) + cls.__abstractmethods__ = frozenset(abstracts) + # Set up inheritance registry + cls._abc_registry = WeakSet() + cls._abc_cache = WeakSet() + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + return cls + + def register(cls, subclass): + """Register a virtual subclass of an ABC. + + Returns the subclass, to allow usage as a class decorator. + """ + if not isinstance(subclass, type): + raise TypeError("Can only register classes") + if issubclass(subclass, cls): + return subclass # Already a subclass + # Subtle: test for cycles *after* testing for "already a subclass"; + # this means we allow X.register(X) and interpret it as a no-op. + if issubclass(cls, subclass): + # This would create a cycle, which is bad for the algorithm below + raise RuntimeError("Refusing to create an inheritance cycle") + cls._abc_registry.add(subclass) + ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache + return subclass + + def _dump_registry(cls, file=None): + """Debug helper to print the ABC registry.""" + print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) + print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) + for name in sorted(cls.__dict__.keys()): + if name.startswith("_abc_"): + value = getattr(cls, name) + print("%s: %r" % (name, value), file=file) + + def __instancecheck__(cls, instance): + """Override for isinstance(instance, cls).""" + # Inline the cache checking + subclass = instance.__class__ + if subclass in cls._abc_cache: + return True + subtype = type(instance) + if subtype is subclass: + if (cls._abc_negative_cache_version == + ABCMeta._abc_invalidation_counter and + subclass in cls._abc_negative_cache): + return False + # Fall back to the subclass check. + return cls.__subclasscheck__(subclass) + return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) + + def __subclasscheck__(cls, subclass): + """Override for issubclass(subclass, cls).""" + # Check cache + if subclass in cls._abc_cache: + return True + # Check negative cache; may have to invalidate + if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: + # Invalidate the negative cache + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + elif subclass in cls._abc_negative_cache: + return False + # Check the subclass hook + ok = cls.__subclasshook__(subclass) + if ok is not NotImplemented: + assert isinstance(ok, bool) + if ok: + cls._abc_cache.add(subclass) + else: + cls._abc_negative_cache.add(subclass) + return ok + # Check if it's a direct subclass + if cls in getattr(subclass, '__mro__', ()): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a registered class (recursive) + for rcls in cls._abc_registry: + if issubclass(subclass, rcls): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a subclass (recursive) + for scls in cls.__subclasses__(): + if issubclass(subclass, scls): + cls._abc_cache.add(subclass) + return True + # No dice; update negative cache + cls._abc_negative_cache.add(subclass) + return False + + +class ABC(metaclass=ABCMeta): + """Helper class that provides a standard way to create an ABC using + inheritance. + """ + pass + + +def get_cache_token(): + """Returns the current ABC cache token. + + The token is an opaque object (supporting equality testing) identifying the + current version of the ABC cache for virtual subclasses. The token changes + with every call to ``register()`` on any ABC. + """ + return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.5.0/collections/__init__.py b/typing_extensions/test_data/python-3.5.0/collections/__init__.py new file mode 100644 index 00000000..80dc4f6d --- /dev/null +++ b/typing_extensions/test_data/python-3.5.0/collections/__init__.py @@ -0,0 +1,1217 @@ +__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', + 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] + +# For backwards compatibility, continue to make the collections ABCs +# available through the collections module. +from _collections_abc import * +import _collections_abc +__all__ += _collections_abc.__all__ + +from operator import itemgetter as _itemgetter, eq as _eq +from keyword import iskeyword as _iskeyword +import sys as _sys +import heapq as _heapq +from _weakref import proxy as _proxy +from itertools import repeat as _repeat, chain as _chain, starmap as _starmap +from reprlib import recursive_repr as _recursive_repr + +try: + from _collections import deque +except ImportError: + pass +else: + MutableSequence.register(deque) + +try: + from _collections import defaultdict +except ImportError: + pass + + +################################################################################ +### OrderedDict +################################################################################ + +class _OrderedDictKeysView(KeysView): + + def __reversed__(self): + yield from reversed(self._mapping) + +class _OrderedDictItemsView(ItemsView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield (key, self._mapping[key]) + +class _OrderedDictValuesView(ValuesView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield self._mapping[key] + +class _Link(object): + __slots__ = 'prev', 'next', 'key', '__weakref__' + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as regular dictionaries. + + # The internal self.__map dict maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # The sentinel is in self.__hardroot with a weakref proxy in self.__root. + # The prev links are weakref proxies (to prevent circular references). + # Individual links are kept alive by the hard reference in self.__map. + # Those hard references disappear when a key is deleted from an OrderedDict. + + def __init__(*args, **kwds): + '''Initialize an ordered dictionary. The signature is the same as + regular dictionaries, but keyword arguments are not recommended because + their insertion order is arbitrary. + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'OrderedDict' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__hardroot = _Link() + self.__root = root = _proxy(self.__hardroot) + root.prev = root.next = root + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, + dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link at the end of the linked list, + # and the inherited dictionary is updated with the new key/value pair. + if key not in self: + self.__map[key] = link = Link() + root = self.__root + last = root.prev + link.prev, link.next, link.key = last, root, key + last.next = link + root.prev = proxy(link) + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which gets + # removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link = self.__map.pop(key) + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + link.prev = None + link.next = None + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + # Traverse the linked list in order. + root = self.__root + curr = root.next + while curr is not root: + yield curr.key + curr = curr.next + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + # Traverse the linked list in reverse order. + root = self.__root + curr = root.prev + while curr is not root: + yield curr.key + curr = curr.prev + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + root = self.__root + root.prev = root.next = root + self.__map.clear() + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root.prev + link_prev = link.prev + link_prev.next = root + root.prev = link_prev + else: + link = root.next + link_next = link.next + root.next = link_next + link_next.prev = root + key = link.key + del self.__map[key] + value = dict.pop(self, key) + return key, value + + def move_to_end(self, key, last=True): + '''Move an existing element to the end (or beginning if last==False). + + Raises KeyError if the element does not exist. + When last=True, acts like a fast version of self[key]=self.pop(key). + + ''' + link = self.__map[key] + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + root = self.__root + if last: + last = root.prev + link.prev = last + link.next = root + last.next = root.prev = link + else: + first = root.next + link.prev = root + link.next = first + root.next = first.prev = link + + def __sizeof__(self): + sizeof = _sys.getsizeof + n = len(self) + 1 # number of links including root + size = sizeof(self.__dict__) # instance dictionary + size += sizeof(self.__map) * 2 # internal dict and inherited dict + size += sizeof(self.__hardroot) * n # link objects + size += sizeof(self.__root) * n # proxy objects + return size + + update = __update = MutableMapping.update + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return _OrderedDictKeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return _OrderedDictItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return _OrderedDictValuesView(self) + + __ne__ = MutableMapping.__ne__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding + value. If key is not found, d is returned if given, otherwise KeyError + is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + @_recursive_repr() + def __repr__(self): + 'od.__repr__() <==> repr(od)' + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self.items())) + + def __reduce__(self): + 'Return state information for pickling' + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + return self.__class__, (), inst_dict or None, None, iter(self.items()) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. + If not specified, the value defaults to None. + + ''' + self = cls() + for key in iterable: + self[key] = value + return self + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return dict.__eq__(self, other) and all(map(_eq, self, other)) + return dict.__eq__(self, other) + + +try: + from _collections import OrderedDict +except ImportError: + # Leave the pure Python version in place. + pass + + +################################################################################ +### namedtuple +################################################################################ + +_class_template = """\ +from builtins import property as _property, tuple as _tuple +from operator import itemgetter as _itemgetter +from collections import OrderedDict + +class {typename}(tuple): + '{typename}({arg_list})' + + __slots__ = () + + _fields = {field_names!r} + + def __new__(_cls, {arg_list}): + 'Create new instance of {typename}({arg_list})' + return _tuple.__new__(_cls, ({arg_list})) + + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + 'Make a new {typename} object from a sequence or iterable' + result = new(cls, iterable) + if len(result) != {num_fields:d}: + raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) + return result + + def _replace(_self, **kwds): + 'Return a new {typename} object replacing specified fields with new values' + result = _self._make(map(kwds.pop, {field_names!r}, _self)) + if kwds: + raise ValueError('Got unexpected field names: %r' % list(kwds)) + return result + + def __repr__(self): + 'Return a nicely formatted representation string' + return self.__class__.__name__ + '({repr_fmt})' % self + + @property + def __dict__(self): + 'A new OrderedDict mapping field names to their values' + return OrderedDict(zip(self._fields, self)) + + def _asdict(self): + 'Return a new OrderedDict which maps field names to their values.' + return self.__dict__ + + def __getnewargs__(self): + 'Return self as a plain tuple. Used by copy and pickle.' + return tuple(self) + + def __getstate__(self): + 'Exclude the OrderedDict from pickling' + return None + +{field_defs} +""" + +_repr_template = '{name}=%r' + +_field_template = '''\ + {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') +''' + +def namedtuple(typename, field_names, verbose=False, rename=False): + """Returns a new subclass of tuple with named fields. + + >>> Point = namedtuple('Point', ['x', 'y']) + >>> Point.__doc__ # docstring for the new class + 'Point(x, y)' + >>> p = Point(11, y=22) # instantiate with positional args or keywords + >>> p[0] + p[1] # indexable like a plain tuple + 33 + >>> x, y = p # unpack like a regular tuple + >>> x, y + (11, 22) + >>> p.x + p.y # fields also accessable by name + 33 + >>> d = p._asdict() # convert to a dictionary + >>> d['x'] + 11 + >>> Point(**d) # convert from a dictionary + Point(x=11, y=22) + >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields + Point(x=100, y=22) + + """ + + # Validate the field names. At the user's option, either generate an error + # message or automatically replace the field name with a valid name. + if isinstance(field_names, str): + field_names = field_names.replace(',', ' ').split() + field_names = list(map(str, field_names)) + typename = str(typename) + if rename: + seen = set() + for index, name in enumerate(field_names): + if (not name.isidentifier() + or _iskeyword(name) + or name.startswith('_') + or name in seen): + field_names[index] = '_%d' % index + seen.add(name) + for name in [typename] + field_names: + if type(name) != str: + raise TypeError('Type names and field names must be strings') + if not name.isidentifier(): + raise ValueError('Type names and field names must be valid ' + 'identifiers: %r' % name) + if _iskeyword(name): + raise ValueError('Type names and field names cannot be a ' + 'keyword: %r' % name) + seen = set() + for name in field_names: + if name.startswith('_') and not rename: + raise ValueError('Field names cannot start with an underscore: ' + '%r' % name) + if name in seen: + raise ValueError('Encountered duplicate field name: %r' % name) + seen.add(name) + + # Fill-in the class template + class_definition = _class_template.format( + typename = typename, + field_names = tuple(field_names), + num_fields = len(field_names), + arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], + repr_fmt = ', '.join(_repr_template.format(name=name) + for name in field_names), + field_defs = '\n'.join(_field_template.format(index=index, name=name) + for index, name in enumerate(field_names)) + ) + + # Execute the template string in a temporary namespace and support + # tracing utilities by setting a value for frame.f_globals['__name__'] + namespace = dict(__name__='namedtuple_%s' % typename) + exec(class_definition, namespace) + result = namespace[typename] + result._source = class_definition + if verbose: + print(result._source) + + # For pickling to work, the __module__ variable needs to be set to the frame + # where the named tuple is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + + return result + + +######################################################################## +### Counter +######################################################################## + +def _count_elements(mapping, iterable): + 'Tally elements from the iterable.' + mapping_get = mapping.get + for elem in iterable: + mapping[elem] = mapping_get(elem, 0) + 1 + +try: # Load C helper function if available + from _collections import _count_elements +except ImportError: + pass + +class Counter(dict): + '''Dict subclass for counting hashable items. Sometimes called a bag + or multiset. Elements are stored as dictionary keys and their counts + are stored as dictionary values. + + >>> c = Counter('abcdeabcdabcaba') # count elements from a string + + >>> c.most_common(3) # three most common elements + [('a', 5), ('b', 4), ('c', 3)] + >>> sorted(c) # list all unique elements + ['a', 'b', 'c', 'd', 'e'] + >>> ''.join(sorted(c.elements())) # list elements with repetitions + 'aaaaabbbbcccdde' + >>> sum(c.values()) # total of all counts + 15 + + >>> c['a'] # count of letter 'a' + 5 + >>> for elem in 'shazam': # update counts from an iterable + ... c[elem] += 1 # by adding 1 to each element's count + >>> c['a'] # now there are seven 'a' + 7 + >>> del c['b'] # remove all 'b' + >>> c['b'] # now there are zero 'b' + 0 + + >>> d = Counter('simsalabim') # make another counter + >>> c.update(d) # add in the second counter + >>> c['a'] # now there are nine 'a' + 9 + + >>> c.clear() # empty the counter + >>> c + Counter() + + Note: If a count is set to zero or reduced to zero, it will remain + in the counter until the entry is deleted or the counter is cleared: + + >>> c = Counter('aaabbc') + >>> c['b'] -= 2 # reduce the count of 'b' by two + >>> c.most_common() # 'b' is still in, but its count is zero + [('a', 3), ('c', 1), ('b', 0)] + + ''' + # References: + # http://en.wikipedia.org/wiki/Multiset + # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html + # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm + # http://code.activestate.com/recipes/259174/ + # Knuth, TAOCP Vol. II section 4.6.3 + + def __init__(*args, **kwds): + '''Create a new, empty Counter object. And if given, count elements + from an input iterable. Or, initialize the count from another mapping + of elements to their counts. + + >>> c = Counter() # a new, empty counter + >>> c = Counter('gallahad') # a new counter from an iterable + >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping + >>> c = Counter(a=4, b=2) # a new counter from keyword args + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + super(Counter, self).__init__() + self.update(*args, **kwds) + + def __missing__(self, key): + 'The count of elements not in the Counter is zero.' + # Needed so that self[missing_item] does not raise KeyError + return 0 + + def most_common(self, n=None): + '''List the n most common elements and their counts from the most + common to the least. If n is None, then list all element counts. + + >>> Counter('abcdeabcdabcaba').most_common(3) + [('a', 5), ('b', 4), ('c', 3)] + + ''' + # Emulate Bag.sortedByCount from Smalltalk + if n is None: + return sorted(self.items(), key=_itemgetter(1), reverse=True) + return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) + + def elements(self): + '''Iterator over elements repeating each as many times as its count. + + >>> c = Counter('ABCABC') + >>> sorted(c.elements()) + ['A', 'A', 'B', 'B', 'C', 'C'] + + # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) + >>> product = 1 + >>> for factor in prime_factors.elements(): # loop over factors + ... product *= factor # and multiply them + >>> product + 1836 + + Note, if an element's count has been set to zero or is a negative + number, elements() will ignore it. + + ''' + # Emulate Bag.do from Smalltalk and Multiset.begin from C++. + return _chain.from_iterable(_starmap(_repeat, self.items())) + + # Override dict methods where necessary + + @classmethod + def fromkeys(cls, iterable, v=None): + # There is no equivalent method for counters because setting v=1 + # means that no element can have a count greater than one. + raise NotImplementedError( + 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + + def update(*args, **kwds): + '''Like dict.update() but add counts instead of replacing them. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.update('witch') # add elements from another iterable + >>> d = Counter('watch') + >>> c.update(d) # add elements from another counter + >>> c['h'] # four 'h' in which, witch, and watch + 4 + + ''' + # The regular dict.update() operation makes no sense here because the + # replace behavior results in the some of original untouched counts + # being mixed-in with all of the other counts for a mismash that + # doesn't have a straight-forward interpretation in most counting + # contexts. Instead, we implement straight-addition. Both the inputs + # and outputs are allowed to contain zero and negative counts. + + if not args: + raise TypeError("descriptor 'update' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + if isinstance(iterable, Mapping): + if self: + self_get = self.get + for elem, count in iterable.items(): + self[elem] = count + self_get(elem, 0) + else: + super(Counter, self).update(iterable) # fast path when counter is empty + else: + _count_elements(self, iterable) + if kwds: + self.update(kwds) + + def subtract(*args, **kwds): + '''Like dict.update() but subtracts counts instead of replacing them. + Counts can be reduced below zero. Both the inputs and outputs are + allowed to contain zero and negative counts. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.subtract('witch') # subtract elements from another iterable + >>> c.subtract(Counter('watch')) # subtract elements from another counter + >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch + 0 + >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch + -1 + + ''' + if not args: + raise TypeError("descriptor 'subtract' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + self_get = self.get + if isinstance(iterable, Mapping): + for elem, count in iterable.items(): + self[elem] = self_get(elem, 0) - count + else: + for elem in iterable: + self[elem] = self_get(elem, 0) - 1 + if kwds: + self.subtract(kwds) + + def copy(self): + 'Return a shallow copy.' + return self.__class__(self) + + def __reduce__(self): + return self.__class__, (dict(self),) + + def __delitem__(self, elem): + 'Like dict.__delitem__() but does not raise KeyError for missing values.' + if elem in self: + super().__delitem__(elem) + + def __repr__(self): + if not self: + return '%s()' % self.__class__.__name__ + try: + items = ', '.join(map('%r: %r'.__mod__, self.most_common())) + return '%s({%s})' % (self.__class__.__name__, items) + except TypeError: + # handle case where values are not orderable + return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) + + # Multiset-style mathematical operations discussed in: + # Knuth TAOCP Volume II section 4.6.3 exercise 19 + # and at http://en.wikipedia.org/wiki/Multiset + # + # Outputs guaranteed to only include positive counts. + # + # To strip negative and zero counts, add-in an empty counter: + # c += Counter() + + def __add__(self, other): + '''Add counts from two counters. + + >>> Counter('abbb') + Counter('bcc') + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count + other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __sub__(self, other): + ''' Subtract count, but keep only results with positive counts. + + >>> Counter('abbbc') - Counter('bccd') + Counter({'b': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count - other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count < 0: + result[elem] = 0 - count + return result + + def __or__(self, other): + '''Union is the maximum of value in either of the input counters. + + >>> Counter('abbb') | Counter('bcc') + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = other_count if count < other_count else count + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __and__(self, other): + ''' Intersection is the minimum of corresponding counts. + + >>> Counter('abbb') & Counter('bcc') + Counter({'b': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = count if count < other_count else other_count + if newcount > 0: + result[elem] = newcount + return result + + def __pos__(self): + 'Adds an empty counter, effectively stripping negative and zero counts' + result = Counter() + for elem, count in self.items(): + if count > 0: + result[elem] = count + return result + + def __neg__(self): + '''Subtracts from an empty counter. Strips positive and zero counts, + and flips the sign on negative counts. + + ''' + result = Counter() + for elem, count in self.items(): + if count < 0: + result[elem] = 0 - count + return result + + def _keep_positive(self): + '''Internal method to strip elements with a negative or zero count''' + nonpositive = [elem for elem, count in self.items() if not count > 0] + for elem in nonpositive: + del self[elem] + return self + + def __iadd__(self, other): + '''Inplace add from another counter, keeping only positive counts. + + >>> c = Counter('abbb') + >>> c += Counter('bcc') + >>> c + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] += count + return self._keep_positive() + + def __isub__(self, other): + '''Inplace subtract counter, but keep only results with positive counts. + + >>> c = Counter('abbbc') + >>> c -= Counter('bccd') + >>> c + Counter({'b': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] -= count + return self._keep_positive() + + def __ior__(self, other): + '''Inplace union is the maximum of value from either counter. + + >>> c = Counter('abbb') + >>> c |= Counter('bcc') + >>> c + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + for elem, other_count in other.items(): + count = self[elem] + if other_count > count: + self[elem] = other_count + return self._keep_positive() + + def __iand__(self, other): + '''Inplace intersection is the minimum of corresponding counts. + + >>> c = Counter('abbb') + >>> c &= Counter('bcc') + >>> c + Counter({'b': 1}) + + ''' + for elem, count in self.items(): + other_count = other[elem] + if other_count < count: + self[elem] = other_count + return self._keep_positive() + + +######################################################################## +### ChainMap (helper for configparser and string.Template) +######################################################################## + +class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self, m=None): # like Django's Context.push() + ''' + New ChainMap with a new map followed by all previous maps. If no + map is provided, an empty dict is used. + ''' + if m is None: + m = {} + return self.__class__(m, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +################################################################################ +### UserDict +################################################################################ + +class UserDict(MutableMapping): + + # Start by filling-out the abstract methods + def __init__(self, dict=None, **kwargs): + self.data = {} + if dict is not None: + self.update(dict) + if len(kwargs): + self.update(kwargs) + def __len__(self): return len(self.data) + def __getitem__(self, key): + if key in self.data: + return self.data[key] + if hasattr(self.__class__, "__missing__"): + return self.__class__.__missing__(self, key) + raise KeyError(key) + def __setitem__(self, key, item): self.data[key] = item + def __delitem__(self, key): del self.data[key] + def __iter__(self): + return iter(self.data) + + # Modify __contains__ to work correctly when __missing__ is present + def __contains__(self, key): + return key in self.data + + # Now, add the methods in dicts but not in MutableMapping + def __repr__(self): return repr(self.data) + def copy(self): + if self.__class__ is UserDict: + return UserDict(self.data.copy()) + import copy + data = self.data + try: + self.data = {} + c = copy.copy(self) + finally: + self.data = data + c.update(self) + return c + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + + +################################################################################ +### UserList +################################################################################ + +class UserList(MutableSequence): + """A more or less complete user-defined wrapper around list objects.""" + def __init__(self, initlist=None): + self.data = [] + if initlist is not None: + # XXX should this accept an arbitrary sequence? + if type(initlist) == type(self.data): + self.data[:] = initlist + elif isinstance(initlist, UserList): + self.data[:] = initlist.data[:] + else: + self.data = list(initlist) + def __repr__(self): return repr(self.data) + def __lt__(self, other): return self.data < self.__cast(other) + def __le__(self, other): return self.data <= self.__cast(other) + def __eq__(self, other): return self.data == self.__cast(other) + def __gt__(self, other): return self.data > self.__cast(other) + def __ge__(self, other): return self.data >= self.__cast(other) + def __cast(self, other): + return other.data if isinstance(other, UserList) else other + def __contains__(self, item): return item in self.data + def __len__(self): return len(self.data) + def __getitem__(self, i): return self.data[i] + def __setitem__(self, i, item): self.data[i] = item + def __delitem__(self, i): del self.data[i] + def __add__(self, other): + if isinstance(other, UserList): + return self.__class__(self.data + other.data) + elif isinstance(other, type(self.data)): + return self.__class__(self.data + other) + return self.__class__(self.data + list(other)) + def __radd__(self, other): + if isinstance(other, UserList): + return self.__class__(other.data + self.data) + elif isinstance(other, type(self.data)): + return self.__class__(other + self.data) + return self.__class__(list(other) + self.data) + def __iadd__(self, other): + if isinstance(other, UserList): + self.data += other.data + elif isinstance(other, type(self.data)): + self.data += other + else: + self.data += list(other) + return self + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __imul__(self, n): + self.data *= n + return self + def append(self, item): self.data.append(item) + def insert(self, i, item): self.data.insert(i, item) + def pop(self, i=-1): return self.data.pop(i) + def remove(self, item): self.data.remove(item) + def clear(self): self.data.clear() + def copy(self): return self.__class__(self) + def count(self, item): return self.data.count(item) + def index(self, item, *args): return self.data.index(item, *args) + def reverse(self): self.data.reverse() + def sort(self, *args, **kwds): self.data.sort(*args, **kwds) + def extend(self, other): + if isinstance(other, UserList): + self.data.extend(other.data) + else: + self.data.extend(other) + + + +################################################################################ +### UserString +################################################################################ + +class UserString(Sequence): + def __init__(self, seq): + if isinstance(seq, str): + self.data = seq + elif isinstance(seq, UserString): + self.data = seq.data[:] + else: + self.data = str(seq) + def __str__(self): return str(self.data) + def __repr__(self): return repr(self.data) + def __int__(self): return int(self.data) + def __float__(self): return float(self.data) + def __complex__(self): return complex(self.data) + def __hash__(self): return hash(self.data) + def __getnewargs__(self): + return (self.data[:],) + + def __eq__(self, string): + if isinstance(string, UserString): + return self.data == string.data + return self.data == string + def __lt__(self, string): + if isinstance(string, UserString): + return self.data < string.data + return self.data < string + def __le__(self, string): + if isinstance(string, UserString): + return self.data <= string.data + return self.data <= string + def __gt__(self, string): + if isinstance(string, UserString): + return self.data > string.data + return self.data > string + def __ge__(self, string): + if isinstance(string, UserString): + return self.data >= string.data + return self.data >= string + + def __contains__(self, char): + if isinstance(char, UserString): + char = char.data + return char in self.data + + def __len__(self): return len(self.data) + def __getitem__(self, index): return self.__class__(self.data[index]) + def __add__(self, other): + if isinstance(other, UserString): + return self.__class__(self.data + other.data) + elif isinstance(other, str): + return self.__class__(self.data + other) + return self.__class__(self.data + str(other)) + def __radd__(self, other): + if isinstance(other, str): + return self.__class__(other + self.data) + return self.__class__(str(other) + self.data) + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __mod__(self, args): + return self.__class__(self.data % args) + def __rmod__(self, format): + return self.__class__(format % args) + + # the following methods are defined in alphabetical order: + def capitalize(self): return self.__class__(self.data.capitalize()) + def casefold(self): + return self.__class__(self.data.casefold()) + def center(self, width, *args): + return self.__class__(self.data.center(width, *args)) + def count(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.count(sub, start, end) + def encode(self, encoding=None, errors=None): # XXX improve this? + if encoding: + if errors: + return self.__class__(self.data.encode(encoding, errors)) + return self.__class__(self.data.encode(encoding)) + return self.__class__(self.data.encode()) + def endswith(self, suffix, start=0, end=_sys.maxsize): + return self.data.endswith(suffix, start, end) + def expandtabs(self, tabsize=8): + return self.__class__(self.data.expandtabs(tabsize)) + def find(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.find(sub, start, end) + def format(self, *args, **kwds): + return self.data.format(*args, **kwds) + def format_map(self, mapping): + return self.data.format_map(mapping) + def index(self, sub, start=0, end=_sys.maxsize): + return self.data.index(sub, start, end) + def isalpha(self): return self.data.isalpha() + def isalnum(self): return self.data.isalnum() + def isdecimal(self): return self.data.isdecimal() + def isdigit(self): return self.data.isdigit() + def isidentifier(self): return self.data.isidentifier() + def islower(self): return self.data.islower() + def isnumeric(self): return self.data.isnumeric() + def isprintable(self): return self.data.isprintable() + def isspace(self): return self.data.isspace() + def istitle(self): return self.data.istitle() + def isupper(self): return self.data.isupper() + def join(self, seq): return self.data.join(seq) + def ljust(self, width, *args): + return self.__class__(self.data.ljust(width, *args)) + def lower(self): return self.__class__(self.data.lower()) + def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) + maketrans = str.maketrans + def partition(self, sep): + return self.data.partition(sep) + def replace(self, old, new, maxsplit=-1): + if isinstance(old, UserString): + old = old.data + if isinstance(new, UserString): + new = new.data + return self.__class__(self.data.replace(old, new, maxsplit)) + def rfind(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.rfind(sub, start, end) + def rindex(self, sub, start=0, end=_sys.maxsize): + return self.data.rindex(sub, start, end) + def rjust(self, width, *args): + return self.__class__(self.data.rjust(width, *args)) + def rpartition(self, sep): + return self.data.rpartition(sep) + def rstrip(self, chars=None): + return self.__class__(self.data.rstrip(chars)) + def split(self, sep=None, maxsplit=-1): + return self.data.split(sep, maxsplit) + def rsplit(self, sep=None, maxsplit=-1): + return self.data.rsplit(sep, maxsplit) + def splitlines(self, keepends=False): return self.data.splitlines(keepends) + def startswith(self, prefix, start=0, end=_sys.maxsize): + return self.data.startswith(prefix, start, end) + def strip(self, chars=None): return self.__class__(self.data.strip(chars)) + def swapcase(self): return self.__class__(self.data.swapcase()) + def title(self): return self.__class__(self.data.title()) + def translate(self, *args): + return self.__class__(self.data.translate(*args)) + def upper(self): return self.__class__(self.data.upper()) + def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.5.0/collections/__main__.py b/typing_extensions/test_data/python-3.5.0/collections/__main__.py new file mode 100644 index 00000000..763e38e0 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.0/collections/__main__.py @@ -0,0 +1,38 @@ +################################################################################ +### Simple tests +################################################################################ + +# verify that instances can be pickled +from collections import namedtuple +from pickle import loads, dumps +Point = namedtuple('Point', 'x, y', True) +p = Point(x=10, y=20) +assert p == loads(dumps(p)) + +# test and demonstrate ability to override methods +class Point(namedtuple('Point', 'x y')): + __slots__ = () + @property + def hypot(self): + return (self.x ** 2 + self.y ** 2) ** 0.5 + def __str__(self): + return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) + +for p in Point(3, 4), Point(14, 5/7.): + print (p) + +class Point(namedtuple('Point', 'x y')): + 'Point class with optimized _make() and _replace() without error-checking' + __slots__ = () + _make = classmethod(tuple.__new__) + def _replace(self, _map=map, **kwds): + return self._make(_map(kwds.get, ('x', 'y'), self)) + +print(Point(11, 22)._replace(x=100)) + +Point3D = namedtuple('Point3D', Point._fields + ('z',)) +print(Point3D.__doc__) + +import doctest, collections +TestResults = namedtuple('TestResults', 'failed attempted') +print(TestResults(*doctest.testmod(collections))) diff --git a/typing_extensions/test_data/python-3.5.0/collections/abc.py b/typing_extensions/test_data/python-3.5.0/collections/abc.py new file mode 100644 index 00000000..891600d1 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.0/collections/abc.py @@ -0,0 +1,2 @@ +from _collections_abc import * +from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.5.0/typing.py b/typing_extensions/test_data/python-3.5.0/typing.py new file mode 100644 index 00000000..1a4982ea --- /dev/null +++ b/typing_extensions/test_data/python-3.5.0/typing.py @@ -0,0 +1,1648 @@ +# TODO nits: +# Get rid of asserts that are the caller's fault. +# Docstrings (e.g. ABCs). + +import abc +from abc import abstractmethod, abstractproperty +import collections +import functools +import re as stdlib_re # Avoid confusion with the re we export. +import sys +import types +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'Any', + 'Callable', + 'Generic', + 'Optional', + 'TypeVar', + 'Union', + 'Tuple', + + # ABCs (from collections.abc). + 'AbstractSet', # collections.abc.Set. + 'ByteString', + 'Container', + 'Hashable', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'Mapping', + 'MappingView', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Sequence', + 'Sized', + 'ValuesView', + + # Structural checks, a.k.a. protocols. + 'Reversible', + 'SupportsAbs', + 'SupportsFloat', + 'SupportsInt', + 'SupportsRound', + + # Concrete collection types. + 'Dict', + 'List', + 'Set', + 'NamedTuple', # Not really a type. + 'Generator', + + # One-off things. + 'AnyStr', + 'cast', + 'get_type_hints', + 'no_type_check', + 'no_type_check_decorator', + 'overload', + + # Submodules. + 'io', + 're', +] + + +def _qualname(x): + if sys.version_info[:2] >= (3, 3): + return x.__qualname__ + else: + # Fall back to just name. + return x.__name__ + + +class TypingMeta(type): + """Metaclass for every type defined below. + + This overrides __new__() to require an extra keyword parameter + '_root', which serves as a guard against naive subclassing of the + typing classes. Any legitimate class defined using a metaclass + derived from TypingMeta (including internal subclasses created by + e.g. Union[X, Y]) must pass _root=True. + + This also defines a dummy constructor (all the work is done in + __new__) and a nicer repr(). + """ + + _is_protocol = False + + def __new__(cls, name, bases, namespace, *, _root=False): + if not _root: + raise TypeError("Cannot subclass %s" % + (', '.join(map(_type_repr, bases)) or '()')) + return super().__new__(cls, name, bases, namespace) + + def __init__(self, *args, **kwds): + pass + + def _eval_type(self, globalns, localns): + """Override this in subclasses to interpret forward references. + + For example, Union['C'] is internally stored as + Union[_ForwardRef('C')], which should evaluate to _Union[C], + where C is an object found in globalns or localns (searching + localns first, of course). + """ + return self + + def _has_type_var(self): + return False + + def __repr__(self): + return '%s.%s' % (self.__module__, _qualname(self)) + + +class Final: + """Mix-in class to prevent instantiation.""" + + __slots__ = () + + def __new__(self, *args, **kwds): + raise TypeError("Cannot instantiate %r" % self.__class__) + + +class _ForwardRef(TypingMeta): + """Wrapper to hold a forward reference.""" + + def __new__(cls, arg): + if not isinstance(arg, str): + raise TypeError('ForwardRef must be a string -- got %r' % (arg,)) + try: + code = compile(arg, '', 'eval') + except SyntaxError: + raise SyntaxError('ForwardRef must be an expression -- got %r' % + (arg,)) + self = super().__new__(cls, arg, (), {}, _root=True) + self.__forward_arg__ = arg + self.__forward_code__ = code + self.__forward_evaluated__ = False + self.__forward_value__ = None + typing_globals = globals() + frame = sys._getframe(1) + while frame is not None and frame.f_globals is typing_globals: + frame = frame.f_back + assert frame is not None + self.__forward_frame__ = frame + return self + + def _eval_type(self, globalns, localns): + if not isinstance(localns, dict): + raise TypeError('ForwardRef localns must be a dict -- got %r' % + (localns,)) + if not isinstance(globalns, dict): + raise TypeError('ForwardRef globalns must be a dict -- got %r' % + (globalns,)) + if not self.__forward_evaluated__: + if globalns is None and localns is None: + globalns = localns = {} + elif globalns is None: + globalns = localns + elif localns is None: + localns = globalns + self.__forward_value__ = _type_check( + eval(self.__forward_code__, globalns, localns), + "Forward references must evaluate to types.") + self.__forward_evaluated__ = True + return self.__forward_value__ + + def __instancecheck__(self, obj): + raise TypeError("Forward references cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not self.__forward_evaluated__: + globalns = self.__forward_frame__.f_globals + localns = self.__forward_frame__.f_locals + try: + self._eval_type(globalns, localns) + except NameError: + return False # Too early. + return issubclass(cls, self.__forward_value__) + + def __repr__(self): + return '_ForwardRef(%r)' % (self.__forward_arg__,) + + +class _TypeAlias: + """Internal helper class for defining generic variants of concrete types. + + Note that this is not a type; let's call it a pseudo-type. It can + be used in instance and subclass checks, e.g. isinstance(m, Match) + or issubclass(type(m), Match). However, it cannot be itself the + target of an issubclass() call; e.g. issubclass(Match, C) (for + some arbitrary class C) raises TypeError rather than returning + False. + """ + + __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') + + def __new__(cls, *args, **kwds): + """Constructor. + + This only exists to give a better error message in case + someone tries to subclass a type alias (not a good idea). + """ + if (len(args) == 3 and + isinstance(args[0], str) and + isinstance(args[1], tuple)): + # Close enough. + raise TypeError("A type alias cannot be subclassed") + return object.__new__(cls) + + def __init__(self, name, type_var, impl_type, type_checker): + """Initializer. + + Args: + name: The name, e.g. 'Pattern'. + type_var: The type parameter, e.g. AnyStr, or the + specific type, e.g. str. + impl_type: The implementation type. + type_checker: Function that takes an impl_type instance. + and returns a value that should be a type_var instance. + """ + assert isinstance(name, str), repr(name) + assert isinstance(type_var, type), repr(type_var) + assert isinstance(impl_type, type), repr(impl_type) + assert not isinstance(impl_type, TypingMeta), repr(impl_type) + self.name = name + self.type_var = type_var + self.impl_type = impl_type + self.type_checker = type_checker + + def __repr__(self): + return "%s[%s]" % (self.name, _type_repr(self.type_var)) + + def __getitem__(self, parameter): + assert isinstance(parameter, type), repr(parameter) + if not isinstance(self.type_var, TypeVar): + raise TypeError("%s cannot be further parameterized." % self) + if self.type_var.__constraints__: + if not issubclass(parameter, Union[self.type_var.__constraints__]): + raise TypeError("%s is not a valid substitution for %s." % + (parameter, self.type_var)) + return self.__class__(self.name, parameter, + self.impl_type, self.type_checker) + + def __instancecheck__(self, obj): + raise TypeError("Type aliases cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if isinstance(cls, _TypeAlias): + # Covariance. For now, we compare by name. + return (cls.name == self.name and + issubclass(cls.type_var, self.type_var)) + else: + # Note that this is too lenient, because the + # implementation type doesn't carry information about + # whether it is about bytes or str (for example). + return issubclass(cls, self.impl_type) + + +def _has_type_var(t): + return t is not None and isinstance(t, TypingMeta) and t._has_type_var() + + +def _eval_type(t, globalns, localns): + if isinstance(t, TypingMeta): + return t._eval_type(globalns, localns) + else: + return t + + +def _type_check(arg, msg): + """Check that the argument is a type, and return it. + + As a special case, accept None and return type(None) instead. + Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. + + The msg argument is a human-readable error message, e.g. + + "Union[arg, ...]: arg should be a type." + + We append the repr() of the actual value (truncated to 100 chars). + """ + if arg is None: + return type(None) + if isinstance(arg, str): + arg = _ForwardRef(arg) + if not isinstance(arg, (type, _TypeAlias)): + raise TypeError(msg + " Got %.100r." % (arg,)) + return arg + + +def _type_repr(obj): + """Return the repr() of an object, special-casing types. + + If obj is a type, we return a shorter version than the default + type.__repr__, based on the module and qualified name, which is + typically enough to uniquely identify a type. For everything + else, we fall back on repr(obj). + """ + if isinstance(obj, type) and not isinstance(obj, TypingMeta): + if obj.__module__ == 'builtins': + return _qualname(obj) + else: + return '%s.%s' % (obj.__module__, _qualname(obj)) + else: + return repr(obj) + + +class AnyMeta(TypingMeta): + """Metaclass for Any.""" + + def __new__(cls, name, bases, namespace, _root=False): + self = super().__new__(cls, name, bases, namespace, _root=_root) + return self + + def __instancecheck__(self, obj): + raise TypeError("Any cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not isinstance(cls, type): + return super().__subclasscheck__(cls) # To TypeError. + return True + + +class Any(Final, metaclass=AnyMeta, _root=True): + """Special type indicating an unconstrained type. + + - Any object is an instance of Any. + - Any class is a subclass of Any. + - As a special case, Any and object are subclasses of each other. + """ + + __slots__ = () + + +class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True): + """Type variable. + + Usage:: + + T = TypeVar('T') # Can be anything + A = TypeVar('A', str, bytes) # Must be str or bytes + + Type variables exist primarily for the benefit of static type + checkers. They serve as the parameters for generic types as well + as for generic function definitions. See class Generic for more + information on generic types. Generic functions work as follows: + + def repeat(x: T, n: int) -> Sequence[T]: + '''Return a list containing n references to x.''' + return [x]*n + + def longest(x: A, y: A) -> A: + '''Return the longest of two strings.''' + return x if len(x) >= len(y) else y + + The latter example's signature is essentially the overloading + of (str, str) -> str and (bytes, bytes) -> bytes. Also note + that if the arguments are instances of some subclass of str, + the return type is still plain str. + + At runtime, isinstance(x, T) will raise TypeError. However, + issubclass(C, T) is true for any class C, and issubclass(str, A) + and issubclass(bytes, A) are true, and issubclass(int, A) is + false. + + Type variables may be marked covariant or contravariant by passing + covariant=True or contravariant=True. See PEP 484 for more + details. By default type variables are invariant. + + Type variables can be introspected. e.g.: + + T.__name__ == 'T' + T.__constraints__ == () + T.__covariant__ == False + T.__contravariant__ = False + A.__constraints__ == (str, bytes) + """ + + def __new__(cls, name, *constraints, bound=None, + covariant=False, contravariant=False): + self = super().__new__(cls, name, (Final,), {}, _root=True) + if covariant and contravariant: + raise ValueError("Bivariant type variables are not supported.") + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if constraints and bound is not None: + raise TypeError("Constraints cannot be combined with bound=...") + if constraints and len(constraints) == 1: + raise TypeError("A single constraint is not allowed") + msg = "TypeVar(name, constraint, ...): constraints must be types." + self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) + if bound: + self.__bound__ = _type_check(bound, "Bound must be a type.") + else: + self.__bound__ = None + return self + + def _has_type_var(self): + return True + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __instancecheck__(self, instance): + raise TypeError("Type variables cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + # TODO: Make this raise TypeError too? + if cls is self: + return True + if cls is Any: + return True + if self.__bound__ is not None: + return issubclass(cls, self.__bound__) + if self.__constraints__: + return any(issubclass(cls, c) for c in self.__constraints__) + return True + + +# Some unconstrained type variables. These are used by the container types. +T = TypeVar('T') # Any type. +KT = TypeVar('KT') # Key type. +VT = TypeVar('VT') # Value type. +T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. +V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. +VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. +T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +# A useful type variable with constraints. This represents string types. +# TODO: What about bytearray, memoryview? +AnyStr = TypeVar('AnyStr', bytes, str) + + +class UnionMeta(TypingMeta): + """Metaclass for Union.""" + + def __new__(cls, name, bases, namespace, parameters=None, _root=False): + if parameters is None: + return super().__new__(cls, name, bases, namespace, _root=_root) + if not isinstance(parameters, tuple): + raise TypeError("Expected parameters=") + # Flatten out Union[Union[...], ...] and type-check non-Union args. + params = [] + msg = "Union[arg, ...]: each arg must be a type." + for p in parameters: + if isinstance(p, UnionMeta): + params.extend(p.__union_params__) + else: + params.append(_type_check(p, msg)) + # Weed out strict duplicates, preserving the first of each occurrence. + all_params = set(params) + if len(all_params) < len(params): + new_params = [] + for t in params: + if t in all_params: + new_params.append(t) + all_params.remove(t) + params = new_params + assert not all_params, all_params + # Weed out subclasses. + # E.g. Union[int, Employee, Manager] == Union[int, Employee]. + # If Any or object is present it will be the sole survivor. + # If both Any and object are present, Any wins. + # Never discard type variables, except against Any. + # (In particular, Union[str, AnyStr] != AnyStr.) + all_params = set(params) + for t1 in params: + if t1 is Any: + return Any + if isinstance(t1, TypeVar): + continue + if any(issubclass(t1, t2) + for t2 in all_params - {t1} if not isinstance(t2, TypeVar)): + all_params.remove(t1) + # It's not a union if there's only one type left. + if len(all_params) == 1: + return all_params.pop() + # Create a new class with these params. + self = super().__new__(cls, name, bases, {}, _root=True) + self.__union_params__ = tuple(t for t in params if t in all_params) + self.__union_set_params__ = frozenset(self.__union_params__) + return self + + def _eval_type(self, globalns, localns): + p = tuple(_eval_type(t, globalns, localns) + for t in self.__union_params__) + if p == self.__union_params__: + return self + else: + return self.__class__(self.__name__, self.__bases__, {}, + p, _root=True) + + def _has_type_var(self): + if self.__union_params__: + for t in self.__union_params__: + if _has_type_var(t): + return True + return False + + def __repr__(self): + r = super().__repr__() + if self.__union_params__: + r += '[%s]' % (', '.join(_type_repr(t) + for t in self.__union_params__)) + return r + + def __getitem__(self, parameters): + if self.__union_params__ is not None: + raise TypeError( + "Cannot subscript an existing Union. Use Union[u, t] instead.") + if parameters == (): + raise TypeError("Cannot take a Union of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + return self.__class__(self.__name__, self.__bases__, + dict(self.__dict__), parameters, _root=True) + + def __eq__(self, other): + if not isinstance(other, UnionMeta): + return NotImplemented + return self.__union_set_params__ == other.__union_set_params__ + + def __hash__(self): + return hash(self.__union_set_params__) + + def __instancecheck__(self, obj): + raise TypeError("Unions cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if self.__union_params__ is None: + return isinstance(cls, UnionMeta) + elif isinstance(cls, UnionMeta): + if cls.__union_params__ is None: + return False + return all(issubclass(c, self) for c in (cls.__union_params__)) + elif isinstance(cls, TypeVar): + if cls in self.__union_params__: + return True + if cls.__constraints__: + return issubclass(Union[cls.__constraints__], self) + return False + else: + return any(issubclass(cls, t) for t in self.__union_params__) + + +class Union(Final, metaclass=UnionMeta, _root=True): + """Union type; Union[X, Y] means either X or Y. + + To define a union, use e.g. Union[int, str]. Details: + + - The arguments must be types and there must be at least one. + + - None as an argument is a special case and is replaced by + type(None). + + - Unions of unions are flattened, e.g.:: + + Union[Union[int, str], float] == Union[int, str, float] + + - Unions of a single argument vanish, e.g.:: + + Union[int] == int # The constructor actually returns int + + - Redundant arguments are skipped, e.g.:: + + Union[int, str, int] == Union[int, str] + + - When comparing unions, the argument order is ignored, e.g.:: + + Union[int, str] == Union[str, int] + + - When two arguments have a subclass relationship, the least + derived argument is kept, e.g.:: + + class Employee: pass + class Manager(Employee): pass + Union[int, Employee, Manager] == Union[int, Employee] + Union[Manager, int, Employee] == Union[int, Employee] + Union[Employee, Manager] == Employee + + - Corollary: if Any is present it is the sole survivor, e.g.:: + + Union[int, Any] == Any + + - Similar for object:: + + Union[int, object] == object + + - To cut a tie: Union[object, Any] == Union[Any, object] == Any. + + - You cannot subclass or instantiate a union. + + - You cannot write Union[X][Y] (what would it mean?). + + - You can use Optional[X] as a shorthand for Union[X, None]. + """ + + # Unsubscripted Union type has params set to None. + __union_params__ = None + __union_set_params__ = None + + +class OptionalMeta(TypingMeta): + """Metaclass for Optional.""" + + def __new__(cls, name, bases, namespace, _root=False): + return super().__new__(cls, name, bases, namespace, _root=_root) + + def __getitem__(self, arg): + arg = _type_check(arg, "Optional[t] requires a single type.") + return Union[arg, type(None)] + + +class Optional(Final, metaclass=OptionalMeta, _root=True): + """Optional type. + + Optional[X] is equivalent to Union[X, type(None)]. + """ + + __slots__ = () + + +class TupleMeta(TypingMeta): + """Metaclass for Tuple.""" + + def __new__(cls, name, bases, namespace, parameters=None, + use_ellipsis=False, _root=False): + self = super().__new__(cls, name, bases, namespace, _root=_root) + self.__tuple_params__ = parameters + self.__tuple_use_ellipsis__ = use_ellipsis + return self + + def _has_type_var(self): + if self.__tuple_params__: + for t in self.__tuple_params__: + if _has_type_var(t): + return True + return False + + def _eval_type(self, globalns, localns): + tp = self.__tuple_params__ + if tp is None: + return self + p = tuple(_eval_type(t, globalns, localns) for t in tp) + if p == self.__tuple_params__: + return self + else: + return self.__class__(self.__name__, self.__bases__, {}, + p, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__tuple_params__ is not None: + params = [_type_repr(p) for p in self.__tuple_params__] + if self.__tuple_use_ellipsis__: + params.append('...') + r += '[%s]' % ( + ', '.join(params)) + return r + + def __getitem__(self, parameters): + if self.__tuple_params__ is not None: + raise TypeError("Cannot re-parameterize %r" % (self,)) + if not isinstance(parameters, tuple): + parameters = (parameters,) + if len(parameters) == 2 and parameters[1] == Ellipsis: + parameters = parameters[:1] + use_ellipsis = True + msg = "Tuple[t, ...]: t must be a type." + else: + use_ellipsis = False + msg = "Tuple[t0, t1, ...]: each t must be a type." + parameters = tuple(_type_check(p, msg) for p in parameters) + return self.__class__(self.__name__, self.__bases__, + dict(self.__dict__), parameters, + use_ellipsis=use_ellipsis, _root=True) + + def __eq__(self, other): + if not isinstance(other, TupleMeta): + return NotImplemented + return self.__tuple_params__ == other.__tuple_params__ + + def __hash__(self): + return hash(self.__tuple_params__) + + def __instancecheck__(self, obj): + raise TypeError("Tuples cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if not isinstance(cls, type): + return super().__subclasscheck__(cls) # To TypeError. + if issubclass(cls, tuple): + return True # Special case. + if not isinstance(cls, TupleMeta): + return super().__subclasscheck__(cls) # False. + if self.__tuple_params__ is None: + return True + if cls.__tuple_params__ is None: + return False # ??? + if cls.__tuple_use_ellipsis__ != self.__tuple_use_ellipsis__: + return False + # Covariance. + return (len(self.__tuple_params__) == len(cls.__tuple_params__) and + all(issubclass(x, p) + for x, p in zip(cls.__tuple_params__, + self.__tuple_params__))) + + +class Tuple(Final, metaclass=TupleMeta, _root=True): + """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. + + Example: Tuple[T1, T2] is a tuple of two elements corresponding + to type variables T1 and T2. Tuple[int, float, str] is a tuple + of an int, a float and a string. + + To specify a variable-length tuple of homogeneous type, use Sequence[T]. + """ + + __slots__ = () + + +class CallableMeta(TypingMeta): + """Metaclass for Callable.""" + + def __new__(cls, name, bases, namespace, _root=False, + args=None, result=None): + if args is None and result is None: + pass # Must be 'class Callable'. + else: + if args is not Ellipsis: + if not isinstance(args, list): + raise TypeError("Callable[args, result]: " + "args must be a list." + " Got %.100r." % (args,)) + msg = "Callable[[arg, ...], result]: each arg must be a type." + args = tuple(_type_check(arg, msg) for arg in args) + msg = "Callable[args, result]: result must be a type." + result = _type_check(result, msg) + self = super().__new__(cls, name, bases, namespace, _root=_root) + self.__args__ = args + self.__result__ = result + return self + + def _has_type_var(self): + if self.__args__: + for t in self.__args__: + if _has_type_var(t): + return True + return _has_type_var(self.__result__) + + def _eval_type(self, globalns, localns): + if self.__args__ is None and self.__result__ is None: + return self + if self.__args__ is Ellipsis: + args = self.__args__ + else: + args = [_eval_type(t, globalns, localns) for t in self.__args__] + result = _eval_type(self.__result__, globalns, localns) + if args == self.__args__ and result == self.__result__: + return self + else: + return self.__class__(self.__name__, self.__bases__, {}, + args=args, result=result, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__args__ is not None or self.__result__ is not None: + if self.__args__ is Ellipsis: + args_r = '...' + else: + args_r = '[%s]' % ', '.join(_type_repr(t) + for t in self.__args__) + r += '[%s, %s]' % (args_r, _type_repr(self.__result__)) + return r + + def __getitem__(self, parameters): + if self.__args__ is not None or self.__result__ is not None: + raise TypeError("This Callable type is already parameterized.") + if not isinstance(parameters, tuple) or len(parameters) != 2: + raise TypeError( + "Callable must be used as Callable[[arg, ...], result].") + args, result = parameters + return self.__class__(self.__name__, self.__bases__, + dict(self.__dict__), _root=True, + args=args, result=result) + + def __eq__(self, other): + if not isinstance(other, CallableMeta): + return NotImplemented + return (self.__args__ == other.__args__ and + self.__result__ == other.__result__) + + def __hash__(self): + return hash(self.__args__) ^ hash(self.__result__) + + def __instancecheck__(self, obj): + # For unparametrized Callable we allow this, because + # typing.Callable should be equivalent to + # collections.abc.Callable. + if self.__args__ is None and self.__result__ is None: + return isinstance(obj, collections_abc.Callable) + else: + raise TypeError("Callable[] cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if not isinstance(cls, CallableMeta): + return super().__subclasscheck__(cls) + if self.__args__ is None and self.__result__ is None: + return True + # We're not doing covariance or contravariance -- this is *invariance*. + return self == cls + + +class Callable(Final, metaclass=CallableMeta, _root=True): + """Callable type; Callable[[int], str] is a function of (int) -> str. + + The subscription syntax must always be used with exactly two + values: the argument list and the return type. The argument list + must be a list of types; the return type must be a single type. + + There is no syntax to indicate optional or keyword arguments, + such function types are rarely used as callback types. + """ + + __slots__ = () + + +def _gorg(a): + """Return the farthest origin of a generic class.""" + assert isinstance(a, GenericMeta) + while a.__origin__ is not None: + a = a.__origin__ + return a + + +def _geqv(a, b): + """Return whether two generic classes are equivalent. + + The intention is to consider generic class X and any of its + parameterized forms (X[T], X[int], etc.) as equivalent. + + However, X is not equivalent to a subclass of X. + + The relation is reflexive, symmetric and transitive. + """ + assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) + # Reduce each to its origin. + return _gorg(a) is _gorg(b) + + +class GenericMeta(TypingMeta, abc.ABCMeta): + """Metaclass for generic types.""" + + # TODO: Constrain more how Generic is used; only a few + # standard patterns should be allowed. + + # TODO: Use a more precise rule than matching __name__ to decide + # whether two classes are the same. Also, save the formal + # parameters. (These things are related! A solution lies in + # using origin.) + + __extra__ = None + + def __new__(cls, name, bases, namespace, + parameters=None, origin=None, extra=None): + if parameters is None: + # Extract parameters from direct base classes. Only + # direct bases are considered and only those that are + # themselves generic, and parameterized with type + # variables. Don't use bases like Any, Union, Tuple, + # Callable or type variables. + params = None + for base in bases: + if isinstance(base, TypingMeta): + if not isinstance(base, GenericMeta): + raise TypeError( + "You cannot inherit from magic class %s" % + repr(base)) + if base.__parameters__ is None: + continue # The base is unparameterized. + for bp in base.__parameters__: + if _has_type_var(bp) and not isinstance(bp, TypeVar): + raise TypeError( + "Cannot inherit from a generic class " + "parameterized with " + "non-type-variable %s" % bp) + if params is None: + params = [] + if bp not in params: + params.append(bp) + if params is not None: + parameters = tuple(params) + self = super().__new__(cls, name, bases, namespace, _root=True) + self.__parameters__ = parameters + if extra is not None: + self.__extra__ = extra + # Else __extra__ is inherited, eventually from the + # (meta-)class default above. + self.__origin__ = origin + return self + + def _has_type_var(self): + if self.__parameters__: + for t in self.__parameters__: + if _has_type_var(t): + return True + return False + + def __repr__(self): + r = super().__repr__() + if self.__parameters__ is not None: + r += '[%s]' % ( + ', '.join(_type_repr(p) for p in self.__parameters__)) + return r + + def __eq__(self, other): + if not isinstance(other, GenericMeta): + return NotImplemented + return (_geqv(self, other) and + self.__parameters__ == other.__parameters__) + + def __hash__(self): + return hash((self.__name__, self.__parameters__)) + + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + if not params: + raise TypeError("Cannot have empty parameter list") + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + if self.__parameters__ is None: + for p in params: + if not isinstance(p, TypeVar): + raise TypeError("Initial parameters must be " + "type variables; got %s" % p) + if len(set(params)) != len(params): + raise TypeError( + "All type variables in Generic[...] must be distinct.") + else: + if len(params) != len(self.__parameters__): + raise TypeError("Cannot change parameter count from %d to %d" % + (len(self.__parameters__), len(params))) + for new, old in zip(params, self.__parameters__): + if isinstance(old, TypeVar): + if not old.__constraints__: + # Substituting for an unconstrained TypeVar is OK. + continue + if issubclass(new, Union[old.__constraints__]): + # Specializing a constrained type variable is OK. + continue + if not issubclass(new, old): + raise TypeError( + "Cannot substitute %s for %s in %s" % + (_type_repr(new), _type_repr(old), self)) + + return self.__class__(self.__name__, self.__bases__, + dict(self.__dict__), + parameters=params, + origin=self, + extra=self.__extra__) + + def __instancecheck__(self, instance): + # Since we extend ABC.__subclasscheck__ and + # ABC.__instancecheck__ inlines the cache checking done by the + # latter, we must extend __instancecheck__ too. For simplicity + # we just skip the cache check -- instance checks for generic + # classes are supposed to be rare anyways. + return self.__subclasscheck__(instance.__class__) + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if isinstance(cls, GenericMeta): + # For a class C(Generic[T]) where T is co-variant, + # C[X] is a subclass of C[Y] iff X is a subclass of Y. + origin = self.__origin__ + if origin is not None and origin is cls.__origin__: + assert len(self.__parameters__) == len(origin.__parameters__) + assert len(cls.__parameters__) == len(origin.__parameters__) + for p_self, p_cls, p_origin in zip(self.__parameters__, + cls.__parameters__, + origin.__parameters__): + if isinstance(p_origin, TypeVar): + if p_origin.__covariant__: + # Covariant -- p_cls must be a subclass of p_self. + if not issubclass(p_cls, p_self): + break + elif p_origin.__contravariant__: + # Contravariant. I think it's the opposite. :-) + if not issubclass(p_self, p_cls): + break + else: + # Invariant -- p_cls and p_self must equal. + if p_self != p_cls: + break + else: + # If the origin's parameter is not a typevar, + # insist on invariance. + if p_self != p_cls: + break + else: + return True + # If we break out of the loop, the superclass gets a chance. + if super().__subclasscheck__(cls): + return True + if self.__extra__ is None or isinstance(cls, GenericMeta): + return False + return issubclass(cls, self.__extra__) + + +class Generic(metaclass=GenericMeta): + """Abstract base class for generic types. + + A generic type is typically declared by inheriting from an + instantiation of this class with one or more type variables. + For example, a generic mapping type might be defined as:: + + class Mapping(Generic[KT, VT]): + def __getitem__(self, key: KT) -> VT: + ... + # Etc. + + This class can then be used as follows:: + + def lookup_name(mapping: Mapping, key: KT, default: VT) -> VT: + try: + return mapping[key] + except KeyError: + return default + + For clarity the type variables may be redefined, e.g.:: + + X = TypeVar('X') + Y = TypeVar('Y') + def lookup_name(mapping: Mapping[X, Y], key: X, default: Y) -> Y: + # Same body as above. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + next_in_mro = object + # Look for the last occurrence of Generic or Generic[...]. + for i, c in enumerate(cls.__mro__[:-1]): + if isinstance(c, GenericMeta) and _gorg(c) is Generic: + next_in_mro = cls.__mro__[i+1] + return next_in_mro.__new__(_gorg(cls)) + + +def cast(typ, val): + """Cast a value to a type. + + This returns the value unchanged. To the type checker this + signals that the return value has the designated type, but at + runtime we intentionally don't check anything (we want this + to be as fast as possible). + """ + return val + + +def _get_defaults(func): + """Internal helper to extract the default arguments, by name.""" + code = func.__code__ + pos_count = code.co_argcount + kw_count = code.co_kwonlyargcount + arg_names = code.co_varnames + kwarg_names = arg_names[pos_count:pos_count + kw_count] + arg_names = arg_names[:pos_count] + defaults = func.__defaults__ or () + kwdefaults = func.__kwdefaults__ + res = dict(kwdefaults) if kwdefaults else {} + pos_offset = pos_count - len(defaults) + for name, value in zip(arg_names[pos_offset:], defaults): + assert name not in res + res[name] = value + return res + + +def get_type_hints(obj, globalns=None, localns=None): + """Return type hints for a function or method object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, and if necessary + adds Optional[t] if a default value equal to None is set. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj, and these are also used as the locals. If the + object does not appear to have globals, an exception is raised. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + if getattr(obj, '__no_type_check__', None): + return {} + if globalns is None: + globalns = getattr(obj, '__globals__', {}) + if localns is None: + localns = globalns + elif localns is None: + localns = globalns + defaults = _get_defaults(obj) + hints = dict(obj.__annotations__) + for name, value in hints.items(): + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, globalns, localns) + if name in defaults and defaults[name] is None: + value = Optional[value] + hints[name] = value + return hints + + +# TODO: Also support this as a class decorator. +def no_type_check(arg): + """Decorator to indicate that annotations are not type hints. + + The argument must be a class or function; if it is a class, it + applies recursively to all methods defined in that class (but not + to methods defined in its superclasses or subclasses). + + This mutates the function(s) in place. + """ + if isinstance(arg, type): + for obj in arg.__dict__.values(): + if isinstance(obj, types.FunctionType): + obj.__no_type_check__ = True + else: + arg.__no_type_check__ = True + return arg + + +def no_type_check_decorator(decorator): + """Decorator to give another decorator the @no_type_check effect. + + This wraps the decorator with something that wraps the decorated + function in @no_type_check. + """ + + @functools.wraps(decorator) + def wrapped_decorator(*args, **kwds): + func = decorator(*args, **kwds) + func = no_type_check(func) + return func + + return wrapped_decorator + + +def overload(func): + raise RuntimeError("Overloading is only supported in library stubs") + + +class _ProtocolMeta(GenericMeta): + """Internal metaclass for _Protocol. + + This exists so _Protocol classes can be generic without deriving + from Generic. + """ + + def __instancecheck__(self, obj): + raise TypeError("Protocols cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not self._is_protocol: + # No structural checks since this isn't a protocol. + return NotImplemented + + if self is _Protocol: + # Every class is a subclass of the empty protocol. + return True + + # Find all attributes defined in the protocol. + attrs = self._get_protocol_attrs() + + for attr in attrs: + if not any(attr in d.__dict__ for d in cls.__mro__): + return False + return True + + def _get_protocol_attrs(self): + # Get all Protocol base classes. + protocol_bases = [] + for c in self.__mro__: + if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': + protocol_bases.append(c) + + # Get attributes included in protocol. + attrs = set() + for base in protocol_bases: + for attr in base.__dict__.keys(): + # Include attributes not defined in any non-protocol bases. + for c in self.__mro__: + if (c is not base and attr in c.__dict__ and + not getattr(c, '_is_protocol', False)): + break + else: + if (not attr.startswith('_abc_') and + attr != '__abstractmethods__' and + attr != '_is_protocol' and + attr != '__dict__' and + attr != '__slots__' and + attr != '_get_protocol_attrs' and + attr != '__parameters__' and + attr != '__origin__' and + attr != '__module__'): + attrs.add(attr) + + return attrs + + +class _Protocol(metaclass=_ProtocolMeta): + """Internal base class for protocol classes. + + This implements a simple-minded structural isinstance check + (similar but more general than the one-offs in collections.abc + such as Hashable). + """ + + __slots__ = () + + _is_protocol = True + + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + +Hashable = collections_abc.Hashable # Not generic. + + +class Iterable(Generic[T_co], extra=collections_abc.Iterable): + __slots__ = () + + +class Iterator(Iterable[T_co], extra=collections_abc.Iterator): + __slots__ = () + + +class SupportsInt(_Protocol): + __slots__ = () + + @abstractmethod + def __int__(self) -> int: + pass + + +class SupportsFloat(_Protocol): + __slots__ = () + + @abstractmethod + def __float__(self) -> float: + pass + + +class SupportsComplex(_Protocol): + __slots__ = () + + @abstractmethod + def __complex__(self) -> complex: + pass + + +class SupportsBytes(_Protocol): + __slots__ = () + + @abstractmethod + def __bytes__(self) -> bytes: + pass + + +class SupportsAbs(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __abs__(self) -> T_co: + pass + + +class SupportsRound(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +class Reversible(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __reversed__(self) -> 'Iterator[T_co]': + pass + + +Sized = collections_abc.Sized # Not generic. + + +class Container(Generic[T_co], extra=collections_abc.Container): + __slots__ = () + + +# Callable was defined earlier. + + +class AbstractSet(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Set): + pass + + +class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): + pass + + +# NOTE: Only the value type is covariant. +class Mapping(Sized, Iterable[KT], Container[KT], Generic[VT_co], + extra=collections_abc.Mapping): + pass + + +class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): + pass + + +class Sequence(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Sequence): + pass + + +class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): + pass + + +class ByteString(Sequence[int], extra=collections_abc.ByteString): + pass + + +ByteString.register(type(memoryview(b''))) + + +class List(list, MutableSequence[T]): + + def __new__(cls, *args, **kwds): + if _geqv(cls, List): + raise TypeError("Type List cannot be instantiated; " + "use list() instead") + return list.__new__(cls, *args, **kwds) + + +class Set(set, MutableSet[T]): + + def __new__(cls, *args, **kwds): + if _geqv(cls, Set): + raise TypeError("Type Set cannot be instantiated; " + "use set() instead") + return set.__new__(cls, *args, **kwds) + + +class _FrozenSetMeta(GenericMeta): + """This metaclass ensures set is not a subclass of FrozenSet. + + Without this metaclass, set would be considered a subclass of + FrozenSet, because FrozenSet.__extra__ is collections.abc.Set, and + set is a subclass of that. + """ + + def __subclasscheck__(self, cls): + if issubclass(cls, Set): + return False + return super().__subclasscheck__(cls) + + +class FrozenSet(frozenset, AbstractSet[T_co], metaclass=_FrozenSetMeta): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, FrozenSet): + raise TypeError("Type FrozenSet cannot be instantiated; " + "use frozenset() instead") + return frozenset.__new__(cls, *args, **kwds) + + +class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): + pass + + +class KeysView(MappingView[KT], AbstractSet[KT], + extra=collections_abc.KeysView): + pass + + +# TODO: Enable Set[Tuple[KT, VT_co]] instead of Generic[KT, VT_co]. +class ItemsView(MappingView, Generic[KT, VT_co], + extra=collections_abc.ItemsView): + pass + + +class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): + pass + + +class Dict(dict, MutableMapping[KT, VT]): + + def __new__(cls, *args, **kwds): + if _geqv(cls, Dict): + raise TypeError("Type Dict cannot be instantiated; " + "use dict() instead") + return dict.__new__(cls, *args, **kwds) + + +# Determine what base class to use for Generator. +if hasattr(collections_abc, 'Generator'): + # Sufficiently recent versions of 3.5 have a Generator ABC. + _G_base = collections_abc.Generator +else: + # Fall back on the exact type. + _G_base = types.GeneratorType + + +class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], + extra=_G_base): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Generator): + raise TypeError("Type Generator cannot be instantiated; " + "create a subclass instead") + return super().__new__(cls, *args, **kwds) + + +def NamedTuple(typename, fields): + """Typed version of namedtuple. + + Usage:: + + Employee = typing.NamedTuple('Employee', [('name', str), 'id', int)]) + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has one extra attribute: _field_types, + giving a dict mapping field names to types. (The field names + are in the _fields attribute, which is part of the namedtuple + API.) + """ + fields = [(n, t) for n, t in fields] + cls = collections.namedtuple(typename, [n for n, t in fields]) + cls._field_types = dict(fields) + return cls + + +class IO(Generic[AnyStr]): + """Generic base class for TextIO and BinaryIO. + + This is an abstract, generic version of the return of open(). + + NOTE: This does not distinguish between the different possible + classes (text vs. binary, read vs. write vs. read/write, + append-only, unbuffered). The TextIO and BinaryIO subclasses + below capture the distinctions between text vs. binary, which is + pervasive in the interface; however we currently do not offer a + way to track the other distinctions in the type system. + """ + + __slots__ = () + + @abstractproperty + def mode(self) -> str: + pass + + @abstractproperty + def name(self) -> str: + pass + + @abstractmethod + def close(self) -> None: + pass + + @abstractmethod + def closed(self) -> bool: + pass + + @abstractmethod + def fileno(self) -> int: + pass + + @abstractmethod + def flush(self) -> None: + pass + + @abstractmethod + def isatty(self) -> bool: + pass + + @abstractmethod + def read(self, n: int = -1) -> AnyStr: + pass + + @abstractmethod + def readable(self) -> bool: + pass + + @abstractmethod + def readline(self, limit: int = -1) -> AnyStr: + pass + + @abstractmethod + def readlines(self, hint: int = -1) -> List[AnyStr]: + pass + + @abstractmethod + def seek(self, offset: int, whence: int = 0) -> int: + pass + + @abstractmethod + def seekable(self) -> bool: + pass + + @abstractmethod + def tell(self) -> int: + pass + + @abstractmethod + def truncate(self, size: int = None) -> int: + pass + + @abstractmethod + def writable(self) -> bool: + pass + + @abstractmethod + def write(self, s: AnyStr) -> int: + pass + + @abstractmethod + def writelines(self, lines: List[AnyStr]) -> None: + pass + + @abstractmethod + def __enter__(self) -> 'IO[AnyStr]': + pass + + @abstractmethod + def __exit__(self, type, value, traceback) -> None: + pass + + +class BinaryIO(IO[bytes]): + """Typed version of the return of open() in binary mode.""" + + __slots__ = () + + @abstractmethod + def write(self, s: Union[bytes, bytearray]) -> int: + pass + + @abstractmethod + def __enter__(self) -> 'BinaryIO': + pass + + +class TextIO(IO[str]): + """Typed version of the return of open() in text mode.""" + + __slots__ = () + + @abstractproperty + def buffer(self) -> BinaryIO: + pass + + @abstractproperty + def encoding(self) -> str: + pass + + @abstractproperty + def errors(self) -> str: + pass + + @abstractproperty + def line_buffering(self) -> bool: + pass + + @abstractproperty + def newlines(self) -> Any: + pass + + @abstractmethod + def __enter__(self) -> 'TextIO': + pass + + +class io: + """Wrapper namespace for IO generic classes.""" + + __all__ = ['IO', 'TextIO', 'BinaryIO'] + IO = IO + TextIO = TextIO + BinaryIO = BinaryIO + +io.__name__ = __name__ + '.io' +sys.modules[io.__name__] = io + + +Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), + lambda p: p.pattern) +Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), + lambda m: m.re.pattern) + + +class re: + """Wrapper namespace for re type aliases.""" + + __all__ = ['Pattern', 'Match'] + Pattern = Pattern + Match = Match + +re.__name__ = __name__ + '.re' +sys.modules[re.__name__] = re diff --git a/typing_extensions/test_data/python-3.5.1/_collections_abc.py b/typing_extensions/test_data/python-3.5.1/_collections_abc.py new file mode 100644 index 00000000..f89bb6f0 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.1/_collections_abc.py @@ -0,0 +1,939 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. + +Unit tests are in test_collections. +""" + +from abc import ABCMeta, abstractmethod +import sys + +__all__ = ["Awaitable", "Coroutine", "AsyncIterable", "AsyncIterator", + "Hashable", "Iterable", "Iterator", "Generator", + "Sized", "Container", "Callable", + "Set", "MutableSet", + "Mapping", "MutableMapping", + "MappingView", "KeysView", "ItemsView", "ValuesView", + "Sequence", "MutableSequence", + "ByteString", + ] + +# This module has been renamed from collections.abc to _collections_abc to +# speed up interpreter startup. Some of the types such as MutableMapping are +# required early but collections module imports a lot of other modules. +# See issue #19218 +__name__ = "collections.abc" + +# Private list of types that we want to register with the various ABCs +# so that they will pass tests like: +# it = iter(somebytearray) +# assert isinstance(it, Iterable) +# Note: in other implementations, these types many not be distinct +# and they make have their own implementation specific types that +# are not included on this list. +bytes_iterator = type(iter(b'')) +bytearray_iterator = type(iter(bytearray())) +#callable_iterator = ??? +dict_keyiterator = type(iter({}.keys())) +dict_valueiterator = type(iter({}.values())) +dict_itemiterator = type(iter({}.items())) +list_iterator = type(iter([])) +list_reverseiterator = type(iter(reversed([]))) +range_iterator = type(iter(range(0))) +set_iterator = type(iter(set())) +str_iterator = type(iter("")) +tuple_iterator = type(iter(())) +zip_iterator = type(iter(zip())) +## views ## +dict_keys = type({}.keys()) +dict_values = type({}.values()) +dict_items = type({}.items()) +## misc ## +mappingproxy = type(type.__dict__) +generator = type((lambda: (yield))()) +## coroutine ## +async def _coro(): pass +_coro = _coro() +coroutine = type(_coro) +_coro.close() # Prevent ResourceWarning +del _coro + + +### ONE-TRICK PONIES ### + +class Hashable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __hash__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Hashable: + for B in C.__mro__: + if "__hash__" in B.__dict__: + if B.__dict__["__hash__"]: + return True + break + return NotImplemented + + +class Awaitable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __await__(self): + yield + + @classmethod + def __subclasshook__(cls, C): + if cls is Awaitable: + for B in C.__mro__: + if "__await__" in B.__dict__: + if B.__dict__["__await__"]: + return True + break + return NotImplemented + + +class Coroutine(Awaitable): + + __slots__ = () + + @abstractmethod + def send(self, value): + """Send a value into the coroutine. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the coroutine. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside coroutine. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("coroutine ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Coroutine: + mro = C.__mro__ + for method in ('__await__', 'send', 'throw', 'close'): + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + +Coroutine.register(coroutine) + + +class AsyncIterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + async def __aiter__(self): + return AsyncIterator() + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterable: + if any("__aiter__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class AsyncIterator(AsyncIterable): + + __slots__ = () + + @abstractmethod + async def __anext__(self): + """Return the next item or raise StopAsyncIteration when exhausted.""" + raise StopAsyncIteration + + async def __aiter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterator: + if (any("__anext__" in B.__dict__ for B in C.__mro__) and + any("__aiter__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + + +class Iterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __iter__(self): + while False: + yield None + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterable: + if any("__iter__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Iterator(Iterable): + + __slots__ = () + + @abstractmethod + def __next__(self): + 'Return the next item from the iterator. When exhausted, raise StopIteration' + raise StopIteration + + def __iter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterator: + if (any("__next__" in B.__dict__ for B in C.__mro__) and + any("__iter__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + +Iterator.register(bytes_iterator) +Iterator.register(bytearray_iterator) +#Iterator.register(callable_iterator) +Iterator.register(dict_keyiterator) +Iterator.register(dict_valueiterator) +Iterator.register(dict_itemiterator) +Iterator.register(list_iterator) +Iterator.register(list_reverseiterator) +Iterator.register(range_iterator) +Iterator.register(set_iterator) +Iterator.register(str_iterator) +Iterator.register(tuple_iterator) +Iterator.register(zip_iterator) + + +class Generator(Iterator): + + __slots__ = () + + def __next__(self): + """Return the next item from the generator. + When exhausted, raise StopIteration. + """ + return self.send(None) + + @abstractmethod + def send(self, value): + """Send a value into the generator. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the generator. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside generator. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("generator ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Generator: + mro = C.__mro__ + for method in ('__iter__', '__next__', 'send', 'throw', 'close'): + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + +Generator.register(generator) + + +class Sized(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __len__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Sized: + if any("__len__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Container(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __contains__(self, x): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Container: + if any("__contains__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Callable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __call__(self, *args, **kwds): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Callable: + if any("__call__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +### SETS ### + + +class Set(Sized, Iterable, Container): + + """A set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__ and __len__. + + To override the comparisons (presumably for speed, as the + semantics are fixed), redefine __le__ and __ge__, + then the other operations will automatically follow suit. + """ + + __slots__ = () + + def __le__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) > len(other): + return False + for elem in self: + if elem not in other: + return False + return True + + def __lt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) < len(other) and self.__le__(other) + + def __gt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) > len(other) and self.__ge__(other) + + def __ge__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) < len(other): + return False + for elem in other: + if elem not in self: + return False + return True + + def __eq__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) == len(other) and self.__le__(other) + + @classmethod + def _from_iterable(cls, it): + '''Construct an instance of the class from any iterable input. + + Must override this method if the class constructor signature + does not accept an iterable for an input. + ''' + return cls(it) + + def __and__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + return self._from_iterable(value for value in other if value in self) + + __rand__ = __and__ + + def isdisjoint(self, other): + 'Return True if two sets have a null intersection.' + for value in other: + if value in self: + return False + return True + + def __or__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + chain = (e for s in (self, other) for e in s) + return self._from_iterable(chain) + + __ror__ = __or__ + + def __sub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in self + if value not in other) + + def __rsub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in other + if value not in self) + + def __xor__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return (self - other) | (other - self) + + __rxor__ = __xor__ + + def _hash(self): + """Compute the hash value of a set. + + Note that we don't define __hash__: not all sets are hashable. + But if you define a hashable set type, its __hash__ should + call this function. + + This must be compatible __eq__. + + All sets ought to compare equal if they contain the same + elements, regardless of how they are implemented, and + regardless of the order of the elements; so there's not much + freedom for __eq__ or __hash__. We match the algorithm used + by the built-in frozenset type. + """ + MAX = sys.maxsize + MASK = 2 * MAX + 1 + n = len(self) + h = 1927868237 * (n + 1) + h &= MASK + for x in self: + hx = hash(x) + h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 + h &= MASK + h = h * 69069 + 907133923 + h &= MASK + if h > MAX: + h -= MASK + 1 + if h == -1: + h = 590923713 + return h + +Set.register(frozenset) + + +class MutableSet(Set): + """A mutable set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__, __len__, + add(), and discard(). + + To override the comparisons (presumably for speed, as the + semantics are fixed), all you have to do is redefine __le__ and + then the other operations will automatically follow suit. + """ + + __slots__ = () + + @abstractmethod + def add(self, value): + """Add an element.""" + raise NotImplementedError + + @abstractmethod + def discard(self, value): + """Remove an element. Do not raise an exception if absent.""" + raise NotImplementedError + + def remove(self, value): + """Remove an element. If not a member, raise a KeyError.""" + if value not in self: + raise KeyError(value) + self.discard(value) + + def pop(self): + """Return the popped value. Raise KeyError if empty.""" + it = iter(self) + try: + value = next(it) + except StopIteration: + raise KeyError + self.discard(value) + return value + + def clear(self): + """This is slow (creates N new iterators!) but effective.""" + try: + while True: + self.pop() + except KeyError: + pass + + def __ior__(self, it): + for value in it: + self.add(value) + return self + + def __iand__(self, it): + for value in (self - it): + self.discard(value) + return self + + def __ixor__(self, it): + if it is self: + self.clear() + else: + if not isinstance(it, Set): + it = self._from_iterable(it) + for value in it: + if value in self: + self.discard(value) + else: + self.add(value) + return self + + def __isub__(self, it): + if it is self: + self.clear() + else: + for value in it: + self.discard(value) + return self + +MutableSet.register(set) + + +### MAPPINGS ### + + +class Mapping(Sized, Iterable, Container): + + __slots__ = () + + """A Mapping is a generic container for associating key/value + pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __iter__, and __len__. + + """ + + @abstractmethod + def __getitem__(self, key): + raise KeyError + + def get(self, key, default=None): + 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' + try: + return self[key] + except KeyError: + return default + + def __contains__(self, key): + try: + self[key] + except KeyError: + return False + else: + return True + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return KeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return ItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return ValuesView(self) + + def __eq__(self, other): + if not isinstance(other, Mapping): + return NotImplemented + return dict(self.items()) == dict(other.items()) + +Mapping.register(mappingproxy) + + +class MappingView(Sized): + + __slots__ = '_mapping', + + def __init__(self, mapping): + self._mapping = mapping + + def __len__(self): + return len(self._mapping) + + def __repr__(self): + return '{0.__class__.__name__}({0._mapping!r})'.format(self) + + +class KeysView(MappingView, Set): + + __slots__ = () + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, key): + return key in self._mapping + + def __iter__(self): + yield from self._mapping + +KeysView.register(dict_keys) + + +class ItemsView(MappingView, Set): + + __slots__ = () + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, item): + key, value = item + try: + v = self._mapping[key] + except KeyError: + return False + else: + return v == value + + def __iter__(self): + for key in self._mapping: + yield (key, self._mapping[key]) + +ItemsView.register(dict_items) + + +class ValuesView(MappingView): + + __slots__ = () + + def __contains__(self, value): + for key in self._mapping: + if value == self._mapping[key]: + return True + return False + + def __iter__(self): + for key in self._mapping: + yield self._mapping[key] + +ValuesView.register(dict_values) + + +class MutableMapping(Mapping): + + __slots__ = () + + """A MutableMapping is a generic container for associating + key/value pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __setitem__, __delitem__, + __iter__, and __len__. + + """ + + @abstractmethod + def __setitem__(self, key, value): + raise KeyError + + @abstractmethod + def __delitem__(self, key): + raise KeyError + + __marker = object() + + def pop(self, key, default=__marker): + '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + ''' + try: + value = self[key] + except KeyError: + if default is self.__marker: + raise + return default + else: + del self[key] + return value + + def popitem(self): + '''D.popitem() -> (k, v), remove and return some (key, value) pair + as a 2-tuple; but raise KeyError if D is empty. + ''' + try: + key = next(iter(self)) + except StopIteration: + raise KeyError + value = self[key] + del self[key] + return key, value + + def clear(self): + 'D.clear() -> None. Remove all items from D.' + try: + while True: + self.popitem() + except KeyError: + pass + + def update(*args, **kwds): + ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. + If E present and has a .keys() method, does: for k in E: D[k] = E[k] + If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v + In either case, this is followed by: for k, v in F.items(): D[k] = v + ''' + if not args: + raise TypeError("descriptor 'update' of 'MutableMapping' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('update expected at most 1 arguments, got %d' % + len(args)) + if args: + other = args[0] + if isinstance(other, Mapping): + for key in other: + self[key] = other[key] + elif hasattr(other, "keys"): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + def setdefault(self, key, default=None): + 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' + try: + return self[key] + except KeyError: + self[key] = default + return default + +MutableMapping.register(dict) + + +### SEQUENCES ### + + +class Sequence(Sized, Iterable, Container): + + """All the operations on a read-only sequence. + + Concrete subclasses must override __new__ or __init__, + __getitem__, and __len__. + """ + + __slots__ = () + + @abstractmethod + def __getitem__(self, index): + raise IndexError + + def __iter__(self): + i = 0 + try: + while True: + v = self[i] + yield v + i += 1 + except IndexError: + return + + def __contains__(self, value): + for v in self: + if v == value: + return True + return False + + def __reversed__(self): + for i in reversed(range(len(self))): + yield self[i] + + def index(self, value, start=0, stop=None): + '''S.index(value, [start, [stop]]) -> integer -- return first index of value. + Raises ValueError if the value is not present. + ''' + if start is not None and start < 0: + start = max(len(self) + start, 0) + if stop is not None and stop < 0: + stop += len(self) + + i = start + while stop is None or i < stop: + try: + if self[i] == value: + return i + except IndexError: + break + i += 1 + raise ValueError + + def count(self, value): + 'S.count(value) -> integer -- return number of occurrences of value' + return sum(1 for v in self if v == value) + +Sequence.register(tuple) +Sequence.register(str) +Sequence.register(range) +Sequence.register(memoryview) + + +class ByteString(Sequence): + + """This unifies bytes and bytearray. + + XXX Should add all their methods. + """ + + __slots__ = () + +ByteString.register(bytes) +ByteString.register(bytearray) + + +class MutableSequence(Sequence): + + __slots__ = () + + """All the operations on a read-write sequence. + + Concrete subclasses must provide __new__ or __init__, + __getitem__, __setitem__, __delitem__, __len__, and insert(). + + """ + + @abstractmethod + def __setitem__(self, index, value): + raise IndexError + + @abstractmethod + def __delitem__(self, index): + raise IndexError + + @abstractmethod + def insert(self, index, value): + 'S.insert(index, value) -- insert value before index' + raise IndexError + + def append(self, value): + 'S.append(value) -- append value to the end of the sequence' + self.insert(len(self), value) + + def clear(self): + 'S.clear() -> None -- remove all items from S' + try: + while True: + self.pop() + except IndexError: + pass + + def reverse(self): + 'S.reverse() -- reverse *IN PLACE*' + n = len(self) + for i in range(n//2): + self[i], self[n-i-1] = self[n-i-1], self[i] + + def extend(self, values): + 'S.extend(iterable) -- extend sequence by appending elements from the iterable' + for v in values: + self.append(v) + + def pop(self, index=-1): + '''S.pop([index]) -> item -- remove and return item at index (default last). + Raise IndexError if list is empty or index is out of range. + ''' + v = self[index] + del self[index] + return v + + def remove(self, value): + '''S.remove(value) -- remove first occurrence of value. + Raise ValueError if the value is not present. + ''' + del self[self.index(value)] + + def __iadd__(self, values): + self.extend(values) + return self + +MutableSequence.register(list) +MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.5.1/abc.py b/typing_extensions/test_data/python-3.5.1/abc.py new file mode 100644 index 00000000..1cbf96a6 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.1/abc.py @@ -0,0 +1,248 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) according to PEP 3119.""" + +from _weakrefset import WeakSet + + +def abstractmethod(funcobj): + """A decorator indicating abstract methods. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract methods are overridden. + The abstract methods can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractmethod + def my_abstract_method(self, ...): + ... + """ + funcobj.__isabstractmethod__ = True + return funcobj + + +class abstractclassmethod(classmethod): + """ + A decorator indicating abstract classmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractclassmethod + def my_abstract_classmethod(cls, ...): + ... + + 'abstractclassmethod' is deprecated. Use 'classmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractstaticmethod(staticmethod): + """ + A decorator indicating abstract staticmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractstaticmethod + def my_abstract_staticmethod(...): + ... + + 'abstractstaticmethod' is deprecated. Use 'staticmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractproperty(property): + """ + A decorator indicating abstract properties. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract properties are overridden. + The abstract properties can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractproperty + def my_abstract_property(self): + ... + + This defines a read-only property; you can also define a read-write + abstract property using the 'long' form of property declaration: + + class C(metaclass=ABCMeta): + def getx(self): ... + def setx(self, value): ... + x = abstractproperty(getx, setx) + + 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' + instead. + """ + + __isabstractmethod__ = True + + +class ABCMeta(type): + + """Metaclass for defining Abstract Base Classes (ABCs). + + Use this metaclass to create an ABC. An ABC can be subclassed + directly, and then acts as a mix-in class. You can also register + unrelated concrete classes (even built-in classes) and unrelated + ABCs as 'virtual subclasses' -- these and their descendants will + be considered subclasses of the registering ABC by the built-in + issubclass() function, but the registering ABC won't show up in + their MRO (Method Resolution Order) nor will method + implementations defined by the registering ABC be callable (not + even via super()). + + """ + + # A global counter that is incremented each time a class is + # registered as a virtual subclass of anything. It forces the + # negative cache to be cleared before its next use. + # Note: this counter is private. Use `abc.get_cache_token()` for + # external code. + _abc_invalidation_counter = 0 + + def __new__(mcls, name, bases, namespace): + cls = super().__new__(mcls, name, bases, namespace) + # Compute set of abstract method names + abstracts = {name + for name, value in namespace.items() + if getattr(value, "__isabstractmethod__", False)} + for base in bases: + for name in getattr(base, "__abstractmethods__", set()): + value = getattr(cls, name, None) + if getattr(value, "__isabstractmethod__", False): + abstracts.add(name) + cls.__abstractmethods__ = frozenset(abstracts) + # Set up inheritance registry + cls._abc_registry = WeakSet() + cls._abc_cache = WeakSet() + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + return cls + + def register(cls, subclass): + """Register a virtual subclass of an ABC. + + Returns the subclass, to allow usage as a class decorator. + """ + if not isinstance(subclass, type): + raise TypeError("Can only register classes") + if issubclass(subclass, cls): + return subclass # Already a subclass + # Subtle: test for cycles *after* testing for "already a subclass"; + # this means we allow X.register(X) and interpret it as a no-op. + if issubclass(cls, subclass): + # This would create a cycle, which is bad for the algorithm below + raise RuntimeError("Refusing to create an inheritance cycle") + cls._abc_registry.add(subclass) + ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache + return subclass + + def _dump_registry(cls, file=None): + """Debug helper to print the ABC registry.""" + print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) + print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) + for name in sorted(cls.__dict__.keys()): + if name.startswith("_abc_"): + value = getattr(cls, name) + print("%s: %r" % (name, value), file=file) + + def __instancecheck__(cls, instance): + """Override for isinstance(instance, cls).""" + # Inline the cache checking + subclass = instance.__class__ + if subclass in cls._abc_cache: + return True + subtype = type(instance) + if subtype is subclass: + if (cls._abc_negative_cache_version == + ABCMeta._abc_invalidation_counter and + subclass in cls._abc_negative_cache): + return False + # Fall back to the subclass check. + return cls.__subclasscheck__(subclass) + return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) + + def __subclasscheck__(cls, subclass): + """Override for issubclass(subclass, cls).""" + # Check cache + if subclass in cls._abc_cache: + return True + # Check negative cache; may have to invalidate + if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: + # Invalidate the negative cache + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + elif subclass in cls._abc_negative_cache: + return False + # Check the subclass hook + ok = cls.__subclasshook__(subclass) + if ok is not NotImplemented: + assert isinstance(ok, bool) + if ok: + cls._abc_cache.add(subclass) + else: + cls._abc_negative_cache.add(subclass) + return ok + # Check if it's a direct subclass + if cls in getattr(subclass, '__mro__', ()): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a registered class (recursive) + for rcls in cls._abc_registry: + if issubclass(subclass, rcls): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a subclass (recursive) + for scls in cls.__subclasses__(): + if issubclass(subclass, scls): + cls._abc_cache.add(subclass) + return True + # No dice; update negative cache + cls._abc_negative_cache.add(subclass) + return False + + +class ABC(metaclass=ABCMeta): + """Helper class that provides a standard way to create an ABC using + inheritance. + """ + pass + + +def get_cache_token(): + """Returns the current ABC cache token. + + The token is an opaque object (supporting equality testing) identifying the + current version of the ABC cache for virtual subclasses. The token changes + with every call to ``register()`` on any ABC. + """ + return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.5.1/collections/__init__.py b/typing_extensions/test_data/python-3.5.1/collections/__init__.py new file mode 100644 index 00000000..e8312a90 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.1/collections/__init__.py @@ -0,0 +1,1222 @@ +__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', + 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] + +# For backwards compatibility, continue to make the collections ABCs +# available through the collections module. +from _collections_abc import * +import _collections_abc +__all__ += _collections_abc.__all__ + +from operator import itemgetter as _itemgetter, eq as _eq +from keyword import iskeyword as _iskeyword +import sys as _sys +import heapq as _heapq +from _weakref import proxy as _proxy +from itertools import repeat as _repeat, chain as _chain, starmap as _starmap +from reprlib import recursive_repr as _recursive_repr + +try: + from _collections import deque +except ImportError: + pass +else: + MutableSequence.register(deque) + +try: + from _collections import defaultdict +except ImportError: + pass + + +################################################################################ +### OrderedDict +################################################################################ + +class _OrderedDictKeysView(KeysView): + + def __reversed__(self): + yield from reversed(self._mapping) + +class _OrderedDictItemsView(ItemsView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield (key, self._mapping[key]) + +class _OrderedDictValuesView(ValuesView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield self._mapping[key] + +class _Link(object): + __slots__ = 'prev', 'next', 'key', '__weakref__' + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as regular dictionaries. + + # The internal self.__map dict maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # The sentinel is in self.__hardroot with a weakref proxy in self.__root. + # The prev links are weakref proxies (to prevent circular references). + # Individual links are kept alive by the hard reference in self.__map. + # Those hard references disappear when a key is deleted from an OrderedDict. + + def __init__(*args, **kwds): + '''Initialize an ordered dictionary. The signature is the same as + regular dictionaries, but keyword arguments are not recommended because + their insertion order is arbitrary. + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'OrderedDict' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__hardroot = _Link() + self.__root = root = _proxy(self.__hardroot) + root.prev = root.next = root + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, + dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link at the end of the linked list, + # and the inherited dictionary is updated with the new key/value pair. + if key not in self: + self.__map[key] = link = Link() + root = self.__root + last = root.prev + link.prev, link.next, link.key = last, root, key + last.next = link + root.prev = proxy(link) + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which gets + # removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link = self.__map.pop(key) + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + link.prev = None + link.next = None + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + # Traverse the linked list in order. + root = self.__root + curr = root.next + while curr is not root: + yield curr.key + curr = curr.next + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + # Traverse the linked list in reverse order. + root = self.__root + curr = root.prev + while curr is not root: + yield curr.key + curr = curr.prev + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + root = self.__root + root.prev = root.next = root + self.__map.clear() + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root.prev + link_prev = link.prev + link_prev.next = root + root.prev = link_prev + else: + link = root.next + link_next = link.next + root.next = link_next + link_next.prev = root + key = link.key + del self.__map[key] + value = dict.pop(self, key) + return key, value + + def move_to_end(self, key, last=True): + '''Move an existing element to the end (or beginning if last==False). + + Raises KeyError if the element does not exist. + When last=True, acts like a fast version of self[key]=self.pop(key). + + ''' + link = self.__map[key] + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + root = self.__root + if last: + last = root.prev + link.prev = last + link.next = root + last.next = root.prev = link + else: + first = root.next + link.prev = root + link.next = first + root.next = first.prev = link + + def __sizeof__(self): + sizeof = _sys.getsizeof + n = len(self) + 1 # number of links including root + size = sizeof(self.__dict__) # instance dictionary + size += sizeof(self.__map) * 2 # internal dict and inherited dict + size += sizeof(self.__hardroot) * n # link objects + size += sizeof(self.__root) * n # proxy objects + return size + + update = __update = MutableMapping.update + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return _OrderedDictKeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return _OrderedDictItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return _OrderedDictValuesView(self) + + __ne__ = MutableMapping.__ne__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding + value. If key is not found, d is returned if given, otherwise KeyError + is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + @_recursive_repr() + def __repr__(self): + 'od.__repr__() <==> repr(od)' + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self.items())) + + def __reduce__(self): + 'Return state information for pickling' + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + return self.__class__, (), inst_dict or None, None, iter(self.items()) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. + If not specified, the value defaults to None. + + ''' + self = cls() + for key in iterable: + self[key] = value + return self + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return dict.__eq__(self, other) and all(map(_eq, self, other)) + return dict.__eq__(self, other) + + +try: + from _collections import OrderedDict +except ImportError: + # Leave the pure Python version in place. + pass + + +################################################################################ +### namedtuple +################################################################################ + +_class_template = """\ +from builtins import property as _property, tuple as _tuple +from operator import itemgetter as _itemgetter +from collections import OrderedDict + +class {typename}(tuple): + '{typename}({arg_list})' + + __slots__ = () + + _fields = {field_names!r} + + def __new__(_cls, {arg_list}): + 'Create new instance of {typename}({arg_list})' + return _tuple.__new__(_cls, ({arg_list})) + + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + 'Make a new {typename} object from a sequence or iterable' + result = new(cls, iterable) + if len(result) != {num_fields:d}: + raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) + return result + + def _replace(_self, **kwds): + 'Return a new {typename} object replacing specified fields with new values' + result = _self._make(map(kwds.pop, {field_names!r}, _self)) + if kwds: + raise ValueError('Got unexpected field names: %r' % list(kwds)) + return result + + def __repr__(self): + 'Return a nicely formatted representation string' + return self.__class__.__name__ + '({repr_fmt})' % self + + def _asdict(self): + 'Return a new OrderedDict which maps field names to their values.' + return OrderedDict(zip(self._fields, self)) + + def __getnewargs__(self): + 'Return self as a plain tuple. Used by copy and pickle.' + return tuple(self) + +{field_defs} +""" + +_repr_template = '{name}=%r' + +_field_template = '''\ + {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') +''' + +def namedtuple(typename, field_names, verbose=False, rename=False): + """Returns a new subclass of tuple with named fields. + + >>> Point = namedtuple('Point', ['x', 'y']) + >>> Point.__doc__ # docstring for the new class + 'Point(x, y)' + >>> p = Point(11, y=22) # instantiate with positional args or keywords + >>> p[0] + p[1] # indexable like a plain tuple + 33 + >>> x, y = p # unpack like a regular tuple + >>> x, y + (11, 22) + >>> p.x + p.y # fields also accessable by name + 33 + >>> d = p._asdict() # convert to a dictionary + >>> d['x'] + 11 + >>> Point(**d) # convert from a dictionary + Point(x=11, y=22) + >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields + Point(x=100, y=22) + + """ + + # Validate the field names. At the user's option, either generate an error + # message or automatically replace the field name with a valid name. + if isinstance(field_names, str): + field_names = field_names.replace(',', ' ').split() + field_names = list(map(str, field_names)) + typename = str(typename) + if rename: + seen = set() + for index, name in enumerate(field_names): + if (not name.isidentifier() + or _iskeyword(name) + or name.startswith('_') + or name in seen): + field_names[index] = '_%d' % index + seen.add(name) + for name in [typename] + field_names: + if type(name) != str: + raise TypeError('Type names and field names must be strings') + if not name.isidentifier(): + raise ValueError('Type names and field names must be valid ' + 'identifiers: %r' % name) + if _iskeyword(name): + raise ValueError('Type names and field names cannot be a ' + 'keyword: %r' % name) + seen = set() + for name in field_names: + if name.startswith('_') and not rename: + raise ValueError('Field names cannot start with an underscore: ' + '%r' % name) + if name in seen: + raise ValueError('Encountered duplicate field name: %r' % name) + seen.add(name) + + # Fill-in the class template + class_definition = _class_template.format( + typename = typename, + field_names = tuple(field_names), + num_fields = len(field_names), + arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], + repr_fmt = ', '.join(_repr_template.format(name=name) + for name in field_names), + field_defs = '\n'.join(_field_template.format(index=index, name=name) + for index, name in enumerate(field_names)) + ) + + # Execute the template string in a temporary namespace and support + # tracing utilities by setting a value for frame.f_globals['__name__'] + namespace = dict(__name__='namedtuple_%s' % typename) + exec(class_definition, namespace) + result = namespace[typename] + result._source = class_definition + if verbose: + print(result._source) + + # For pickling to work, the __module__ variable needs to be set to the frame + # where the named tuple is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + + return result + + +######################################################################## +### Counter +######################################################################## + +def _count_elements(mapping, iterable): + 'Tally elements from the iterable.' + mapping_get = mapping.get + for elem in iterable: + mapping[elem] = mapping_get(elem, 0) + 1 + +try: # Load C helper function if available + from _collections import _count_elements +except ImportError: + pass + +class Counter(dict): + '''Dict subclass for counting hashable items. Sometimes called a bag + or multiset. Elements are stored as dictionary keys and their counts + are stored as dictionary values. + + >>> c = Counter('abcdeabcdabcaba') # count elements from a string + + >>> c.most_common(3) # three most common elements + [('a', 5), ('b', 4), ('c', 3)] + >>> sorted(c) # list all unique elements + ['a', 'b', 'c', 'd', 'e'] + >>> ''.join(sorted(c.elements())) # list elements with repetitions + 'aaaaabbbbcccdde' + >>> sum(c.values()) # total of all counts + 15 + + >>> c['a'] # count of letter 'a' + 5 + >>> for elem in 'shazam': # update counts from an iterable + ... c[elem] += 1 # by adding 1 to each element's count + >>> c['a'] # now there are seven 'a' + 7 + >>> del c['b'] # remove all 'b' + >>> c['b'] # now there are zero 'b' + 0 + + >>> d = Counter('simsalabim') # make another counter + >>> c.update(d) # add in the second counter + >>> c['a'] # now there are nine 'a' + 9 + + >>> c.clear() # empty the counter + >>> c + Counter() + + Note: If a count is set to zero or reduced to zero, it will remain + in the counter until the entry is deleted or the counter is cleared: + + >>> c = Counter('aaabbc') + >>> c['b'] -= 2 # reduce the count of 'b' by two + >>> c.most_common() # 'b' is still in, but its count is zero + [('a', 3), ('c', 1), ('b', 0)] + + ''' + # References: + # http://en.wikipedia.org/wiki/Multiset + # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html + # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm + # http://code.activestate.com/recipes/259174/ + # Knuth, TAOCP Vol. II section 4.6.3 + + def __init__(*args, **kwds): + '''Create a new, empty Counter object. And if given, count elements + from an input iterable. Or, initialize the count from another mapping + of elements to their counts. + + >>> c = Counter() # a new, empty counter + >>> c = Counter('gallahad') # a new counter from an iterable + >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping + >>> c = Counter(a=4, b=2) # a new counter from keyword args + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + super(Counter, self).__init__() + self.update(*args, **kwds) + + def __missing__(self, key): + 'The count of elements not in the Counter is zero.' + # Needed so that self[missing_item] does not raise KeyError + return 0 + + def most_common(self, n=None): + '''List the n most common elements and their counts from the most + common to the least. If n is None, then list all element counts. + + >>> Counter('abcdeabcdabcaba').most_common(3) + [('a', 5), ('b', 4), ('c', 3)] + + ''' + # Emulate Bag.sortedByCount from Smalltalk + if n is None: + return sorted(self.items(), key=_itemgetter(1), reverse=True) + return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) + + def elements(self): + '''Iterator over elements repeating each as many times as its count. + + >>> c = Counter('ABCABC') + >>> sorted(c.elements()) + ['A', 'A', 'B', 'B', 'C', 'C'] + + # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) + >>> product = 1 + >>> for factor in prime_factors.elements(): # loop over factors + ... product *= factor # and multiply them + >>> product + 1836 + + Note, if an element's count has been set to zero or is a negative + number, elements() will ignore it. + + ''' + # Emulate Bag.do from Smalltalk and Multiset.begin from C++. + return _chain.from_iterable(_starmap(_repeat, self.items())) + + # Override dict methods where necessary + + @classmethod + def fromkeys(cls, iterable, v=None): + # There is no equivalent method for counters because setting v=1 + # means that no element can have a count greater than one. + raise NotImplementedError( + 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + + def update(*args, **kwds): + '''Like dict.update() but add counts instead of replacing them. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.update('witch') # add elements from another iterable + >>> d = Counter('watch') + >>> c.update(d) # add elements from another counter + >>> c['h'] # four 'h' in which, witch, and watch + 4 + + ''' + # The regular dict.update() operation makes no sense here because the + # replace behavior results in the some of original untouched counts + # being mixed-in with all of the other counts for a mismash that + # doesn't have a straight-forward interpretation in most counting + # contexts. Instead, we implement straight-addition. Both the inputs + # and outputs are allowed to contain zero and negative counts. + + if not args: + raise TypeError("descriptor 'update' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + if isinstance(iterable, Mapping): + if self: + self_get = self.get + for elem, count in iterable.items(): + self[elem] = count + self_get(elem, 0) + else: + super(Counter, self).update(iterable) # fast path when counter is empty + else: + _count_elements(self, iterable) + if kwds: + self.update(kwds) + + def subtract(*args, **kwds): + '''Like dict.update() but subtracts counts instead of replacing them. + Counts can be reduced below zero. Both the inputs and outputs are + allowed to contain zero and negative counts. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.subtract('witch') # subtract elements from another iterable + >>> c.subtract(Counter('watch')) # subtract elements from another counter + >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch + 0 + >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch + -1 + + ''' + if not args: + raise TypeError("descriptor 'subtract' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + self_get = self.get + if isinstance(iterable, Mapping): + for elem, count in iterable.items(): + self[elem] = self_get(elem, 0) - count + else: + for elem in iterable: + self[elem] = self_get(elem, 0) - 1 + if kwds: + self.subtract(kwds) + + def copy(self): + 'Return a shallow copy.' + return self.__class__(self) + + def __reduce__(self): + return self.__class__, (dict(self),) + + def __delitem__(self, elem): + 'Like dict.__delitem__() but does not raise KeyError for missing values.' + if elem in self: + super().__delitem__(elem) + + def __repr__(self): + if not self: + return '%s()' % self.__class__.__name__ + try: + items = ', '.join(map('%r: %r'.__mod__, self.most_common())) + return '%s({%s})' % (self.__class__.__name__, items) + except TypeError: + # handle case where values are not orderable + return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) + + # Multiset-style mathematical operations discussed in: + # Knuth TAOCP Volume II section 4.6.3 exercise 19 + # and at http://en.wikipedia.org/wiki/Multiset + # + # Outputs guaranteed to only include positive counts. + # + # To strip negative and zero counts, add-in an empty counter: + # c += Counter() + + def __add__(self, other): + '''Add counts from two counters. + + >>> Counter('abbb') + Counter('bcc') + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count + other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __sub__(self, other): + ''' Subtract count, but keep only results with positive counts. + + >>> Counter('abbbc') - Counter('bccd') + Counter({'b': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count - other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count < 0: + result[elem] = 0 - count + return result + + def __or__(self, other): + '''Union is the maximum of value in either of the input counters. + + >>> Counter('abbb') | Counter('bcc') + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = other_count if count < other_count else count + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __and__(self, other): + ''' Intersection is the minimum of corresponding counts. + + >>> Counter('abbb') & Counter('bcc') + Counter({'b': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = count if count < other_count else other_count + if newcount > 0: + result[elem] = newcount + return result + + def __pos__(self): + 'Adds an empty counter, effectively stripping negative and zero counts' + result = Counter() + for elem, count in self.items(): + if count > 0: + result[elem] = count + return result + + def __neg__(self): + '''Subtracts from an empty counter. Strips positive and zero counts, + and flips the sign on negative counts. + + ''' + result = Counter() + for elem, count in self.items(): + if count < 0: + result[elem] = 0 - count + return result + + def _keep_positive(self): + '''Internal method to strip elements with a negative or zero count''' + nonpositive = [elem for elem, count in self.items() if not count > 0] + for elem in nonpositive: + del self[elem] + return self + + def __iadd__(self, other): + '''Inplace add from another counter, keeping only positive counts. + + >>> c = Counter('abbb') + >>> c += Counter('bcc') + >>> c + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] += count + return self._keep_positive() + + def __isub__(self, other): + '''Inplace subtract counter, but keep only results with positive counts. + + >>> c = Counter('abbbc') + >>> c -= Counter('bccd') + >>> c + Counter({'b': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] -= count + return self._keep_positive() + + def __ior__(self, other): + '''Inplace union is the maximum of value from either counter. + + >>> c = Counter('abbb') + >>> c |= Counter('bcc') + >>> c + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + for elem, other_count in other.items(): + count = self[elem] + if other_count > count: + self[elem] = other_count + return self._keep_positive() + + def __iand__(self, other): + '''Inplace intersection is the minimum of corresponding counts. + + >>> c = Counter('abbb') + >>> c &= Counter('bcc') + >>> c + Counter({'b': 1}) + + ''' + for elem, count in self.items(): + other_count = other[elem] + if other_count < count: + self[elem] = other_count + return self._keep_positive() + + +######################################################################## +### ChainMap (helper for configparser and string.Template) +######################################################################## + +class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self, m=None): # like Django's Context.push() + '''New ChainMap with a new map followed by all previous maps. + If no map is provided, an empty dict is used. + ''' + if m is None: + m = {} + return self.__class__(m, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +################################################################################ +### UserDict +################################################################################ + +class UserDict(MutableMapping): + + # Start by filling-out the abstract methods + def __init__(*args, **kwargs): + if not args: + raise TypeError("descriptor '__init__' of 'UserDict' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + if args: + dict = args[0] + elif 'dict' in kwargs: + dict = kwargs.pop('dict') + import warnings + warnings.warn("Passing 'dict' as keyword argument is deprecated", + PendingDeprecationWarning, stacklevel=2) + else: + dict = None + self.data = {} + if dict is not None: + self.update(dict) + if len(kwargs): + self.update(kwargs) + def __len__(self): return len(self.data) + def __getitem__(self, key): + if key in self.data: + return self.data[key] + if hasattr(self.__class__, "__missing__"): + return self.__class__.__missing__(self, key) + raise KeyError(key) + def __setitem__(self, key, item): self.data[key] = item + def __delitem__(self, key): del self.data[key] + def __iter__(self): + return iter(self.data) + + # Modify __contains__ to work correctly when __missing__ is present + def __contains__(self, key): + return key in self.data + + # Now, add the methods in dicts but not in MutableMapping + def __repr__(self): return repr(self.data) + def copy(self): + if self.__class__ is UserDict: + return UserDict(self.data.copy()) + import copy + data = self.data + try: + self.data = {} + c = copy.copy(self) + finally: + self.data = data + c.update(self) + return c + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + + +################################################################################ +### UserList +################################################################################ + +class UserList(MutableSequence): + """A more or less complete user-defined wrapper around list objects.""" + def __init__(self, initlist=None): + self.data = [] + if initlist is not None: + # XXX should this accept an arbitrary sequence? + if type(initlist) == type(self.data): + self.data[:] = initlist + elif isinstance(initlist, UserList): + self.data[:] = initlist.data[:] + else: + self.data = list(initlist) + def __repr__(self): return repr(self.data) + def __lt__(self, other): return self.data < self.__cast(other) + def __le__(self, other): return self.data <= self.__cast(other) + def __eq__(self, other): return self.data == self.__cast(other) + def __gt__(self, other): return self.data > self.__cast(other) + def __ge__(self, other): return self.data >= self.__cast(other) + def __cast(self, other): + return other.data if isinstance(other, UserList) else other + def __contains__(self, item): return item in self.data + def __len__(self): return len(self.data) + def __getitem__(self, i): return self.data[i] + def __setitem__(self, i, item): self.data[i] = item + def __delitem__(self, i): del self.data[i] + def __add__(self, other): + if isinstance(other, UserList): + return self.__class__(self.data + other.data) + elif isinstance(other, type(self.data)): + return self.__class__(self.data + other) + return self.__class__(self.data + list(other)) + def __radd__(self, other): + if isinstance(other, UserList): + return self.__class__(other.data + self.data) + elif isinstance(other, type(self.data)): + return self.__class__(other + self.data) + return self.__class__(list(other) + self.data) + def __iadd__(self, other): + if isinstance(other, UserList): + self.data += other.data + elif isinstance(other, type(self.data)): + self.data += other + else: + self.data += list(other) + return self + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __imul__(self, n): + self.data *= n + return self + def append(self, item): self.data.append(item) + def insert(self, i, item): self.data.insert(i, item) + def pop(self, i=-1): return self.data.pop(i) + def remove(self, item): self.data.remove(item) + def clear(self): self.data.clear() + def copy(self): return self.__class__(self) + def count(self, item): return self.data.count(item) + def index(self, item, *args): return self.data.index(item, *args) + def reverse(self): self.data.reverse() + def sort(self, *args, **kwds): self.data.sort(*args, **kwds) + def extend(self, other): + if isinstance(other, UserList): + self.data.extend(other.data) + else: + self.data.extend(other) + + + +################################################################################ +### UserString +################################################################################ + +class UserString(Sequence): + def __init__(self, seq): + if isinstance(seq, str): + self.data = seq + elif isinstance(seq, UserString): + self.data = seq.data[:] + else: + self.data = str(seq) + def __str__(self): return str(self.data) + def __repr__(self): return repr(self.data) + def __int__(self): return int(self.data) + def __float__(self): return float(self.data) + def __complex__(self): return complex(self.data) + def __hash__(self): return hash(self.data) + def __getnewargs__(self): + return (self.data[:],) + + def __eq__(self, string): + if isinstance(string, UserString): + return self.data == string.data + return self.data == string + def __lt__(self, string): + if isinstance(string, UserString): + return self.data < string.data + return self.data < string + def __le__(self, string): + if isinstance(string, UserString): + return self.data <= string.data + return self.data <= string + def __gt__(self, string): + if isinstance(string, UserString): + return self.data > string.data + return self.data > string + def __ge__(self, string): + if isinstance(string, UserString): + return self.data >= string.data + return self.data >= string + + def __contains__(self, char): + if isinstance(char, UserString): + char = char.data + return char in self.data + + def __len__(self): return len(self.data) + def __getitem__(self, index): return self.__class__(self.data[index]) + def __add__(self, other): + if isinstance(other, UserString): + return self.__class__(self.data + other.data) + elif isinstance(other, str): + return self.__class__(self.data + other) + return self.__class__(self.data + str(other)) + def __radd__(self, other): + if isinstance(other, str): + return self.__class__(other + self.data) + return self.__class__(str(other) + self.data) + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __mod__(self, args): + return self.__class__(self.data % args) + def __rmod__(self, format): + return self.__class__(format % args) + + # the following methods are defined in alphabetical order: + def capitalize(self): return self.__class__(self.data.capitalize()) + def casefold(self): + return self.__class__(self.data.casefold()) + def center(self, width, *args): + return self.__class__(self.data.center(width, *args)) + def count(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.count(sub, start, end) + def encode(self, encoding=None, errors=None): # XXX improve this? + if encoding: + if errors: + return self.__class__(self.data.encode(encoding, errors)) + return self.__class__(self.data.encode(encoding)) + return self.__class__(self.data.encode()) + def endswith(self, suffix, start=0, end=_sys.maxsize): + return self.data.endswith(suffix, start, end) + def expandtabs(self, tabsize=8): + return self.__class__(self.data.expandtabs(tabsize)) + def find(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.find(sub, start, end) + def format(self, *args, **kwds): + return self.data.format(*args, **kwds) + def format_map(self, mapping): + return self.data.format_map(mapping) + def index(self, sub, start=0, end=_sys.maxsize): + return self.data.index(sub, start, end) + def isalpha(self): return self.data.isalpha() + def isalnum(self): return self.data.isalnum() + def isdecimal(self): return self.data.isdecimal() + def isdigit(self): return self.data.isdigit() + def isidentifier(self): return self.data.isidentifier() + def islower(self): return self.data.islower() + def isnumeric(self): return self.data.isnumeric() + def isprintable(self): return self.data.isprintable() + def isspace(self): return self.data.isspace() + def istitle(self): return self.data.istitle() + def isupper(self): return self.data.isupper() + def join(self, seq): return self.data.join(seq) + def ljust(self, width, *args): + return self.__class__(self.data.ljust(width, *args)) + def lower(self): return self.__class__(self.data.lower()) + def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) + maketrans = str.maketrans + def partition(self, sep): + return self.data.partition(sep) + def replace(self, old, new, maxsplit=-1): + if isinstance(old, UserString): + old = old.data + if isinstance(new, UserString): + new = new.data + return self.__class__(self.data.replace(old, new, maxsplit)) + def rfind(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.rfind(sub, start, end) + def rindex(self, sub, start=0, end=_sys.maxsize): + return self.data.rindex(sub, start, end) + def rjust(self, width, *args): + return self.__class__(self.data.rjust(width, *args)) + def rpartition(self, sep): + return self.data.rpartition(sep) + def rstrip(self, chars=None): + return self.__class__(self.data.rstrip(chars)) + def split(self, sep=None, maxsplit=-1): + return self.data.split(sep, maxsplit) + def rsplit(self, sep=None, maxsplit=-1): + return self.data.rsplit(sep, maxsplit) + def splitlines(self, keepends=False): return self.data.splitlines(keepends) + def startswith(self, prefix, start=0, end=_sys.maxsize): + return self.data.startswith(prefix, start, end) + def strip(self, chars=None): return self.__class__(self.data.strip(chars)) + def swapcase(self): return self.__class__(self.data.swapcase()) + def title(self): return self.__class__(self.data.title()) + def translate(self, *args): + return self.__class__(self.data.translate(*args)) + def upper(self): return self.__class__(self.data.upper()) + def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.5.1/collections/__main__.py b/typing_extensions/test_data/python-3.5.1/collections/__main__.py new file mode 100644 index 00000000..763e38e0 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.1/collections/__main__.py @@ -0,0 +1,38 @@ +################################################################################ +### Simple tests +################################################################################ + +# verify that instances can be pickled +from collections import namedtuple +from pickle import loads, dumps +Point = namedtuple('Point', 'x, y', True) +p = Point(x=10, y=20) +assert p == loads(dumps(p)) + +# test and demonstrate ability to override methods +class Point(namedtuple('Point', 'x y')): + __slots__ = () + @property + def hypot(self): + return (self.x ** 2 + self.y ** 2) ** 0.5 + def __str__(self): + return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) + +for p in Point(3, 4), Point(14, 5/7.): + print (p) + +class Point(namedtuple('Point', 'x y')): + 'Point class with optimized _make() and _replace() without error-checking' + __slots__ = () + _make = classmethod(tuple.__new__) + def _replace(self, _map=map, **kwds): + return self._make(_map(kwds.get, ('x', 'y'), self)) + +print(Point(11, 22)._replace(x=100)) + +Point3D = namedtuple('Point3D', Point._fields + ('z',)) +print(Point3D.__doc__) + +import doctest, collections +TestResults = namedtuple('TestResults', 'failed attempted') +print(TestResults(*doctest.testmod(collections))) diff --git a/typing_extensions/test_data/python-3.5.1/collections/abc.py b/typing_extensions/test_data/python-3.5.1/collections/abc.py new file mode 100644 index 00000000..891600d1 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.1/collections/abc.py @@ -0,0 +1,2 @@ +from _collections_abc import * +from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.5.1/typing.py b/typing_extensions/test_data/python-3.5.1/typing.py new file mode 100644 index 00000000..1757f138 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.1/typing.py @@ -0,0 +1,1656 @@ +# TODO nits: +# Get rid of asserts that are the caller's fault. +# Docstrings (e.g. ABCs). + +import abc +from abc import abstractmethod, abstractproperty +import collections +import functools +import re as stdlib_re # Avoid confusion with the re we export. +import sys +import types +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'Any', + 'Callable', + 'Generic', + 'Optional', + 'TypeVar', + 'Union', + 'Tuple', + + # ABCs (from collections.abc). + 'AbstractSet', # collections.abc.Set. + 'ByteString', + 'Container', + 'Hashable', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'Mapping', + 'MappingView', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Sequence', + 'Sized', + 'ValuesView', + + # Structural checks, a.k.a. protocols. + 'Reversible', + 'SupportsAbs', + 'SupportsFloat', + 'SupportsInt', + 'SupportsRound', + + # Concrete collection types. + 'Dict', + 'List', + 'Set', + 'NamedTuple', # Not really a type. + 'Generator', + + # One-off things. + 'AnyStr', + 'cast', + 'get_type_hints', + 'no_type_check', + 'no_type_check_decorator', + 'overload', + + # Submodules. + 'io', + 're', +] + + +def _qualname(x): + if sys.version_info[:2] >= (3, 3): + return x.__qualname__ + else: + # Fall back to just name. + return x.__name__ + + +class TypingMeta(type): + """Metaclass for every type defined below. + + This overrides __new__() to require an extra keyword parameter + '_root', which serves as a guard against naive subclassing of the + typing classes. Any legitimate class defined using a metaclass + derived from TypingMeta (including internal subclasses created by + e.g. Union[X, Y]) must pass _root=True. + + This also defines a dummy constructor (all the work is done in + __new__) and a nicer repr(). + """ + + _is_protocol = False + + def __new__(cls, name, bases, namespace, *, _root=False): + if not _root: + raise TypeError("Cannot subclass %s" % + (', '.join(map(_type_repr, bases)) or '()')) + return super().__new__(cls, name, bases, namespace) + + def __init__(self, *args, **kwds): + pass + + def _eval_type(self, globalns, localns): + """Override this in subclasses to interpret forward references. + + For example, Union['C'] is internally stored as + Union[_ForwardRef('C')], which should evaluate to _Union[C], + where C is an object found in globalns or localns (searching + localns first, of course). + """ + return self + + def _has_type_var(self): + return False + + def __repr__(self): + return '%s.%s' % (self.__module__, _qualname(self)) + + +class Final: + """Mix-in class to prevent instantiation.""" + + __slots__ = () + + def __new__(self, *args, **kwds): + raise TypeError("Cannot instantiate %r" % self.__class__) + + +class _ForwardRef(TypingMeta): + """Wrapper to hold a forward reference.""" + + def __new__(cls, arg): + if not isinstance(arg, str): + raise TypeError('ForwardRef must be a string -- got %r' % (arg,)) + try: + code = compile(arg, '', 'eval') + except SyntaxError: + raise SyntaxError('ForwardRef must be an expression -- got %r' % + (arg,)) + self = super().__new__(cls, arg, (), {}, _root=True) + self.__forward_arg__ = arg + self.__forward_code__ = code + self.__forward_evaluated__ = False + self.__forward_value__ = None + typing_globals = globals() + frame = sys._getframe(1) + while frame is not None and frame.f_globals is typing_globals: + frame = frame.f_back + assert frame is not None + self.__forward_frame__ = frame + return self + + def _eval_type(self, globalns, localns): + if not isinstance(localns, dict): + raise TypeError('ForwardRef localns must be a dict -- got %r' % + (localns,)) + if not isinstance(globalns, dict): + raise TypeError('ForwardRef globalns must be a dict -- got %r' % + (globalns,)) + if not self.__forward_evaluated__: + if globalns is None and localns is None: + globalns = localns = {} + elif globalns is None: + globalns = localns + elif localns is None: + localns = globalns + self.__forward_value__ = _type_check( + eval(self.__forward_code__, globalns, localns), + "Forward references must evaluate to types.") + self.__forward_evaluated__ = True + return self.__forward_value__ + + def __instancecheck__(self, obj): + raise TypeError("Forward references cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not self.__forward_evaluated__: + globalns = self.__forward_frame__.f_globals + localns = self.__forward_frame__.f_locals + try: + self._eval_type(globalns, localns) + except NameError: + return False # Too early. + return issubclass(cls, self.__forward_value__) + + def __repr__(self): + return '_ForwardRef(%r)' % (self.__forward_arg__,) + + +class _TypeAlias: + """Internal helper class for defining generic variants of concrete types. + + Note that this is not a type; let's call it a pseudo-type. It can + be used in instance and subclass checks, e.g. isinstance(m, Match) + or issubclass(type(m), Match). However, it cannot be itself the + target of an issubclass() call; e.g. issubclass(Match, C) (for + some arbitrary class C) raises TypeError rather than returning + False. + """ + + __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') + + def __new__(cls, *args, **kwds): + """Constructor. + + This only exists to give a better error message in case + someone tries to subclass a type alias (not a good idea). + """ + if (len(args) == 3 and + isinstance(args[0], str) and + isinstance(args[1], tuple)): + # Close enough. + raise TypeError("A type alias cannot be subclassed") + return object.__new__(cls) + + def __init__(self, name, type_var, impl_type, type_checker): + """Initializer. + + Args: + name: The name, e.g. 'Pattern'. + type_var: The type parameter, e.g. AnyStr, or the + specific type, e.g. str. + impl_type: The implementation type. + type_checker: Function that takes an impl_type instance. + and returns a value that should be a type_var instance. + """ + assert isinstance(name, str), repr(name) + assert isinstance(type_var, type), repr(type_var) + assert isinstance(impl_type, type), repr(impl_type) + assert not isinstance(impl_type, TypingMeta), repr(impl_type) + self.name = name + self.type_var = type_var + self.impl_type = impl_type + self.type_checker = type_checker + + def __repr__(self): + return "%s[%s]" % (self.name, _type_repr(self.type_var)) + + def __getitem__(self, parameter): + assert isinstance(parameter, type), repr(parameter) + if not isinstance(self.type_var, TypeVar): + raise TypeError("%s cannot be further parameterized." % self) + if self.type_var.__constraints__: + if not issubclass(parameter, Union[self.type_var.__constraints__]): + raise TypeError("%s is not a valid substitution for %s." % + (parameter, self.type_var)) + return self.__class__(self.name, parameter, + self.impl_type, self.type_checker) + + def __instancecheck__(self, obj): + raise TypeError("Type aliases cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if isinstance(cls, _TypeAlias): + # Covariance. For now, we compare by name. + return (cls.name == self.name and + issubclass(cls.type_var, self.type_var)) + else: + # Note that this is too lenient, because the + # implementation type doesn't carry information about + # whether it is about bytes or str (for example). + return issubclass(cls, self.impl_type) + + +def _has_type_var(t): + return t is not None and isinstance(t, TypingMeta) and t._has_type_var() + + +def _eval_type(t, globalns, localns): + if isinstance(t, TypingMeta): + return t._eval_type(globalns, localns) + else: + return t + + +def _type_check(arg, msg): + """Check that the argument is a type, and return it. + + As a special case, accept None and return type(None) instead. + Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. + + The msg argument is a human-readable error message, e.g. + + "Union[arg, ...]: arg should be a type." + + We append the repr() of the actual value (truncated to 100 chars). + """ + if arg is None: + return type(None) + if isinstance(arg, str): + arg = _ForwardRef(arg) + if not isinstance(arg, (type, _TypeAlias)): + raise TypeError(msg + " Got %.100r." % (arg,)) + return arg + + +def _type_repr(obj): + """Return the repr() of an object, special-casing types. + + If obj is a type, we return a shorter version than the default + type.__repr__, based on the module and qualified name, which is + typically enough to uniquely identify a type. For everything + else, we fall back on repr(obj). + """ + if isinstance(obj, type) and not isinstance(obj, TypingMeta): + if obj.__module__ == 'builtins': + return _qualname(obj) + else: + return '%s.%s' % (obj.__module__, _qualname(obj)) + else: + return repr(obj) + + +class AnyMeta(TypingMeta): + """Metaclass for Any.""" + + def __new__(cls, name, bases, namespace, _root=False): + self = super().__new__(cls, name, bases, namespace, _root=_root) + return self + + def __instancecheck__(self, obj): + raise TypeError("Any cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not isinstance(cls, type): + return super().__subclasscheck__(cls) # To TypeError. + return True + + +class Any(Final, metaclass=AnyMeta, _root=True): + """Special type indicating an unconstrained type. + + - Any object is an instance of Any. + - Any class is a subclass of Any. + - As a special case, Any and object are subclasses of each other. + """ + + __slots__ = () + + +class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True): + """Type variable. + + Usage:: + + T = TypeVar('T') # Can be anything + A = TypeVar('A', str, bytes) # Must be str or bytes + + Type variables exist primarily for the benefit of static type + checkers. They serve as the parameters for generic types as well + as for generic function definitions. See class Generic for more + information on generic types. Generic functions work as follows: + + def repeat(x: T, n: int) -> Sequence[T]: + '''Return a list containing n references to x.''' + return [x]*n + + def longest(x: A, y: A) -> A: + '''Return the longest of two strings.''' + return x if len(x) >= len(y) else y + + The latter example's signature is essentially the overloading + of (str, str) -> str and (bytes, bytes) -> bytes. Also note + that if the arguments are instances of some subclass of str, + the return type is still plain str. + + At runtime, isinstance(x, T) will raise TypeError. However, + issubclass(C, T) is true for any class C, and issubclass(str, A) + and issubclass(bytes, A) are true, and issubclass(int, A) is + false. + + Type variables may be marked covariant or contravariant by passing + covariant=True or contravariant=True. See PEP 484 for more + details. By default type variables are invariant. + + Type variables can be introspected. e.g.: + + T.__name__ == 'T' + T.__constraints__ == () + T.__covariant__ == False + T.__contravariant__ = False + A.__constraints__ == (str, bytes) + """ + + def __new__(cls, name, *constraints, bound=None, + covariant=False, contravariant=False): + self = super().__new__(cls, name, (Final,), {}, _root=True) + if covariant and contravariant: + raise ValueError("Bivariant type variables are not supported.") + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if constraints and bound is not None: + raise TypeError("Constraints cannot be combined with bound=...") + if constraints and len(constraints) == 1: + raise TypeError("A single constraint is not allowed") + msg = "TypeVar(name, constraint, ...): constraints must be types." + self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) + if bound: + self.__bound__ = _type_check(bound, "Bound must be a type.") + else: + self.__bound__ = None + return self + + def _has_type_var(self): + return True + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __instancecheck__(self, instance): + raise TypeError("Type variables cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + # TODO: Make this raise TypeError too? + if cls is self: + return True + if cls is Any: + return True + if self.__bound__ is not None: + return issubclass(cls, self.__bound__) + if self.__constraints__: + return any(issubclass(cls, c) for c in self.__constraints__) + return True + + +# Some unconstrained type variables. These are used by the container types. +T = TypeVar('T') # Any type. +KT = TypeVar('KT') # Key type. +VT = TypeVar('VT') # Value type. +T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. +V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. +VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. +T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +# A useful type variable with constraints. This represents string types. +# TODO: What about bytearray, memoryview? +AnyStr = TypeVar('AnyStr', bytes, str) + + +class UnionMeta(TypingMeta): + """Metaclass for Union.""" + + def __new__(cls, name, bases, namespace, parameters=None, _root=False): + if parameters is None: + return super().__new__(cls, name, bases, namespace, _root=_root) + if not isinstance(parameters, tuple): + raise TypeError("Expected parameters=") + # Flatten out Union[Union[...], ...] and type-check non-Union args. + params = [] + msg = "Union[arg, ...]: each arg must be a type." + for p in parameters: + if isinstance(p, UnionMeta): + params.extend(p.__union_params__) + else: + params.append(_type_check(p, msg)) + # Weed out strict duplicates, preserving the first of each occurrence. + all_params = set(params) + if len(all_params) < len(params): + new_params = [] + for t in params: + if t in all_params: + new_params.append(t) + all_params.remove(t) + params = new_params + assert not all_params, all_params + # Weed out subclasses. + # E.g. Union[int, Employee, Manager] == Union[int, Employee]. + # If Any or object is present it will be the sole survivor. + # If both Any and object are present, Any wins. + # Never discard type variables, except against Any. + # (In particular, Union[str, AnyStr] != AnyStr.) + all_params = set(params) + for t1 in params: + if t1 is Any: + return Any + if isinstance(t1, TypeVar): + continue + if isinstance(t1, _TypeAlias): + # _TypeAlias is not a real class. + continue + if any(issubclass(t1, t2) + for t2 in all_params - {t1} if not isinstance(t2, TypeVar)): + all_params.remove(t1) + # It's not a union if there's only one type left. + if len(all_params) == 1: + return all_params.pop() + # Create a new class with these params. + self = super().__new__(cls, name, bases, {}, _root=True) + self.__union_params__ = tuple(t for t in params if t in all_params) + self.__union_set_params__ = frozenset(self.__union_params__) + return self + + def _eval_type(self, globalns, localns): + p = tuple(_eval_type(t, globalns, localns) + for t in self.__union_params__) + if p == self.__union_params__: + return self + else: + return self.__class__(self.__name__, self.__bases__, {}, + p, _root=True) + + def _has_type_var(self): + if self.__union_params__: + for t in self.__union_params__: + if _has_type_var(t): + return True + return False + + def __repr__(self): + r = super().__repr__() + if self.__union_params__: + r += '[%s]' % (', '.join(_type_repr(t) + for t in self.__union_params__)) + return r + + def __getitem__(self, parameters): + if self.__union_params__ is not None: + raise TypeError( + "Cannot subscript an existing Union. Use Union[u, t] instead.") + if parameters == (): + raise TypeError("Cannot take a Union of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + return self.__class__(self.__name__, self.__bases__, + dict(self.__dict__), parameters, _root=True) + + def __eq__(self, other): + if not isinstance(other, UnionMeta): + return NotImplemented + return self.__union_set_params__ == other.__union_set_params__ + + def __hash__(self): + return hash(self.__union_set_params__) + + def __instancecheck__(self, obj): + raise TypeError("Unions cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if self.__union_params__ is None: + return isinstance(cls, UnionMeta) + elif isinstance(cls, UnionMeta): + if cls.__union_params__ is None: + return False + return all(issubclass(c, self) for c in (cls.__union_params__)) + elif isinstance(cls, TypeVar): + if cls in self.__union_params__: + return True + if cls.__constraints__: + return issubclass(Union[cls.__constraints__], self) + return False + else: + return any(issubclass(cls, t) for t in self.__union_params__) + + +class Union(Final, metaclass=UnionMeta, _root=True): + """Union type; Union[X, Y] means either X or Y. + + To define a union, use e.g. Union[int, str]. Details: + + - The arguments must be types and there must be at least one. + + - None as an argument is a special case and is replaced by + type(None). + + - Unions of unions are flattened, e.g.:: + + Union[Union[int, str], float] == Union[int, str, float] + + - Unions of a single argument vanish, e.g.:: + + Union[int] == int # The constructor actually returns int + + - Redundant arguments are skipped, e.g.:: + + Union[int, str, int] == Union[int, str] + + - When comparing unions, the argument order is ignored, e.g.:: + + Union[int, str] == Union[str, int] + + - When two arguments have a subclass relationship, the least + derived argument is kept, e.g.:: + + class Employee: pass + class Manager(Employee): pass + Union[int, Employee, Manager] == Union[int, Employee] + Union[Manager, int, Employee] == Union[int, Employee] + Union[Employee, Manager] == Employee + + - Corollary: if Any is present it is the sole survivor, e.g.:: + + Union[int, Any] == Any + + - Similar for object:: + + Union[int, object] == object + + - To cut a tie: Union[object, Any] == Union[Any, object] == Any. + + - You cannot subclass or instantiate a union. + + - You cannot write Union[X][Y] (what would it mean?). + + - You can use Optional[X] as a shorthand for Union[X, None]. + """ + + # Unsubscripted Union type has params set to None. + __union_params__ = None + __union_set_params__ = None + + +class OptionalMeta(TypingMeta): + """Metaclass for Optional.""" + + def __new__(cls, name, bases, namespace, _root=False): + return super().__new__(cls, name, bases, namespace, _root=_root) + + def __getitem__(self, arg): + arg = _type_check(arg, "Optional[t] requires a single type.") + return Union[arg, type(None)] + + +class Optional(Final, metaclass=OptionalMeta, _root=True): + """Optional type. + + Optional[X] is equivalent to Union[X, type(None)]. + """ + + __slots__ = () + + +class TupleMeta(TypingMeta): + """Metaclass for Tuple.""" + + def __new__(cls, name, bases, namespace, parameters=None, + use_ellipsis=False, _root=False): + self = super().__new__(cls, name, bases, namespace, _root=_root) + self.__tuple_params__ = parameters + self.__tuple_use_ellipsis__ = use_ellipsis + return self + + def _has_type_var(self): + if self.__tuple_params__: + for t in self.__tuple_params__: + if _has_type_var(t): + return True + return False + + def _eval_type(self, globalns, localns): + tp = self.__tuple_params__ + if tp is None: + return self + p = tuple(_eval_type(t, globalns, localns) for t in tp) + if p == self.__tuple_params__: + return self + else: + return self.__class__(self.__name__, self.__bases__, {}, + p, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__tuple_params__ is not None: + params = [_type_repr(p) for p in self.__tuple_params__] + if self.__tuple_use_ellipsis__: + params.append('...') + r += '[%s]' % ( + ', '.join(params)) + return r + + def __getitem__(self, parameters): + if self.__tuple_params__ is not None: + raise TypeError("Cannot re-parameterize %r" % (self,)) + if not isinstance(parameters, tuple): + parameters = (parameters,) + if len(parameters) == 2 and parameters[1] == Ellipsis: + parameters = parameters[:1] + use_ellipsis = True + msg = "Tuple[t, ...]: t must be a type." + else: + use_ellipsis = False + msg = "Tuple[t0, t1, ...]: each t must be a type." + parameters = tuple(_type_check(p, msg) for p in parameters) + return self.__class__(self.__name__, self.__bases__, + dict(self.__dict__), parameters, + use_ellipsis=use_ellipsis, _root=True) + + def __eq__(self, other): + if not isinstance(other, TupleMeta): + return NotImplemented + return self.__tuple_params__ == other.__tuple_params__ + + def __hash__(self): + return hash(self.__tuple_params__) + + def __instancecheck__(self, obj): + raise TypeError("Tuples cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if not isinstance(cls, type): + return super().__subclasscheck__(cls) # To TypeError. + if issubclass(cls, tuple): + return True # Special case. + if not isinstance(cls, TupleMeta): + return super().__subclasscheck__(cls) # False. + if self.__tuple_params__ is None: + return True + if cls.__tuple_params__ is None: + return False # ??? + if cls.__tuple_use_ellipsis__ != self.__tuple_use_ellipsis__: + return False + # Covariance. + return (len(self.__tuple_params__) == len(cls.__tuple_params__) and + all(issubclass(x, p) + for x, p in zip(cls.__tuple_params__, + self.__tuple_params__))) + + +class Tuple(Final, metaclass=TupleMeta, _root=True): + """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. + + Example: Tuple[T1, T2] is a tuple of two elements corresponding + to type variables T1 and T2. Tuple[int, float, str] is a tuple + of an int, a float and a string. + + To specify a variable-length tuple of homogeneous type, use Sequence[T]. + """ + + __slots__ = () + + +class CallableMeta(TypingMeta): + """Metaclass for Callable.""" + + def __new__(cls, name, bases, namespace, _root=False, + args=None, result=None): + if args is None and result is None: + pass # Must be 'class Callable'. + else: + if args is not Ellipsis: + if not isinstance(args, list): + raise TypeError("Callable[args, result]: " + "args must be a list." + " Got %.100r." % (args,)) + msg = "Callable[[arg, ...], result]: each arg must be a type." + args = tuple(_type_check(arg, msg) for arg in args) + msg = "Callable[args, result]: result must be a type." + result = _type_check(result, msg) + self = super().__new__(cls, name, bases, namespace, _root=_root) + self.__args__ = args + self.__result__ = result + return self + + def _has_type_var(self): + if self.__args__: + for t in self.__args__: + if _has_type_var(t): + return True + return _has_type_var(self.__result__) + + def _eval_type(self, globalns, localns): + if self.__args__ is None and self.__result__ is None: + return self + if self.__args__ is Ellipsis: + args = self.__args__ + else: + args = [_eval_type(t, globalns, localns) for t in self.__args__] + result = _eval_type(self.__result__, globalns, localns) + if args == self.__args__ and result == self.__result__: + return self + else: + return self.__class__(self.__name__, self.__bases__, {}, + args=args, result=result, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__args__ is not None or self.__result__ is not None: + if self.__args__ is Ellipsis: + args_r = '...' + else: + args_r = '[%s]' % ', '.join(_type_repr(t) + for t in self.__args__) + r += '[%s, %s]' % (args_r, _type_repr(self.__result__)) + return r + + def __getitem__(self, parameters): + if self.__args__ is not None or self.__result__ is not None: + raise TypeError("This Callable type is already parameterized.") + if not isinstance(parameters, tuple) or len(parameters) != 2: + raise TypeError( + "Callable must be used as Callable[[arg, ...], result].") + args, result = parameters + return self.__class__(self.__name__, self.__bases__, + dict(self.__dict__), _root=True, + args=args, result=result) + + def __eq__(self, other): + if not isinstance(other, CallableMeta): + return NotImplemented + return (self.__args__ == other.__args__ and + self.__result__ == other.__result__) + + def __hash__(self): + return hash(self.__args__) ^ hash(self.__result__) + + def __instancecheck__(self, obj): + # For unparametrized Callable we allow this, because + # typing.Callable should be equivalent to + # collections.abc.Callable. + if self.__args__ is None and self.__result__ is None: + return isinstance(obj, collections_abc.Callable) + else: + raise TypeError("Callable[] cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if not isinstance(cls, CallableMeta): + return super().__subclasscheck__(cls) + if self.__args__ is None and self.__result__ is None: + return True + # We're not doing covariance or contravariance -- this is *invariance*. + return self == cls + + +class Callable(Final, metaclass=CallableMeta, _root=True): + """Callable type; Callable[[int], str] is a function of (int) -> str. + + The subscription syntax must always be used with exactly two + values: the argument list and the return type. The argument list + must be a list of types; the return type must be a single type. + + There is no syntax to indicate optional or keyword arguments, + such function types are rarely used as callback types. + """ + + __slots__ = () + + +def _gorg(a): + """Return the farthest origin of a generic class.""" + assert isinstance(a, GenericMeta) + while a.__origin__ is not None: + a = a.__origin__ + return a + + +def _geqv(a, b): + """Return whether two generic classes are equivalent. + + The intention is to consider generic class X and any of its + parameterized forms (X[T], X[int], etc.) as equivalent. + + However, X is not equivalent to a subclass of X. + + The relation is reflexive, symmetric and transitive. + """ + assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) + # Reduce each to its origin. + return _gorg(a) is _gorg(b) + + +class GenericMeta(TypingMeta, abc.ABCMeta): + """Metaclass for generic types.""" + + # TODO: Constrain more how Generic is used; only a few + # standard patterns should be allowed. + + # TODO: Use a more precise rule than matching __name__ to decide + # whether two classes are the same. Also, save the formal + # parameters. (These things are related! A solution lies in + # using origin.) + + __extra__ = None + + def __new__(cls, name, bases, namespace, + parameters=None, origin=None, extra=None): + if parameters is None: + # Extract parameters from direct base classes. Only + # direct bases are considered and only those that are + # themselves generic, and parameterized with type + # variables. Don't use bases like Any, Union, Tuple, + # Callable or type variables. + params = None + for base in bases: + if isinstance(base, TypingMeta): + if not isinstance(base, GenericMeta): + raise TypeError( + "You cannot inherit from magic class %s" % + repr(base)) + if base.__parameters__ is None: + continue # The base is unparameterized. + for bp in base.__parameters__: + if _has_type_var(bp) and not isinstance(bp, TypeVar): + raise TypeError( + "Cannot inherit from a generic class " + "parameterized with " + "non-type-variable %s" % bp) + if params is None: + params = [] + if bp not in params: + params.append(bp) + if params is not None: + parameters = tuple(params) + self = super().__new__(cls, name, bases, namespace, _root=True) + self.__parameters__ = parameters + if extra is not None: + self.__extra__ = extra + # Else __extra__ is inherited, eventually from the + # (meta-)class default above. + self.__origin__ = origin + return self + + def _has_type_var(self): + if self.__parameters__: + for t in self.__parameters__: + if _has_type_var(t): + return True + return False + + def __repr__(self): + r = super().__repr__() + if self.__parameters__ is not None: + r += '[%s]' % ( + ', '.join(_type_repr(p) for p in self.__parameters__)) + return r + + def __eq__(self, other): + if not isinstance(other, GenericMeta): + return NotImplemented + return (_geqv(self, other) and + self.__parameters__ == other.__parameters__) + + def __hash__(self): + return hash((self.__name__, self.__parameters__)) + + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + if not params: + raise TypeError("Cannot have empty parameter list") + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + if self.__parameters__ is None: + for p in params: + if not isinstance(p, TypeVar): + raise TypeError("Initial parameters must be " + "type variables; got %s" % p) + if len(set(params)) != len(params): + raise TypeError( + "All type variables in Generic[...] must be distinct.") + else: + if len(params) != len(self.__parameters__): + raise TypeError("Cannot change parameter count from %d to %d" % + (len(self.__parameters__), len(params))) + for new, old in zip(params, self.__parameters__): + if isinstance(old, TypeVar): + if not old.__constraints__: + # Substituting for an unconstrained TypeVar is OK. + continue + if issubclass(new, Union[old.__constraints__]): + # Specializing a constrained type variable is OK. + continue + if not issubclass(new, old): + raise TypeError( + "Cannot substitute %s for %s in %s" % + (_type_repr(new), _type_repr(old), self)) + + return self.__class__(self.__name__, (self,) + self.__bases__, + dict(self.__dict__), + parameters=params, + origin=self, + extra=self.__extra__) + + def __instancecheck__(self, instance): + # Since we extend ABC.__subclasscheck__ and + # ABC.__instancecheck__ inlines the cache checking done by the + # latter, we must extend __instancecheck__ too. For simplicity + # we just skip the cache check -- instance checks for generic + # classes are supposed to be rare anyways. + return self.__subclasscheck__(instance.__class__) + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if isinstance(cls, GenericMeta): + # For a class C(Generic[T]) where T is co-variant, + # C[X] is a subclass of C[Y] iff X is a subclass of Y. + origin = self.__origin__ + if origin is not None and origin is cls.__origin__: + assert len(self.__parameters__) == len(origin.__parameters__) + assert len(cls.__parameters__) == len(origin.__parameters__) + for p_self, p_cls, p_origin in zip(self.__parameters__, + cls.__parameters__, + origin.__parameters__): + if isinstance(p_origin, TypeVar): + if p_origin.__covariant__: + # Covariant -- p_cls must be a subclass of p_self. + if not issubclass(p_cls, p_self): + break + elif p_origin.__contravariant__: + # Contravariant. I think it's the opposite. :-) + if not issubclass(p_self, p_cls): + break + else: + # Invariant -- p_cls and p_self must equal. + if p_self != p_cls: + break + else: + # If the origin's parameter is not a typevar, + # insist on invariance. + if p_self != p_cls: + break + else: + return True + # If we break out of the loop, the superclass gets a chance. + if super().__subclasscheck__(cls): + return True + if self.__extra__ is None or isinstance(cls, GenericMeta): + return False + return issubclass(cls, self.__extra__) + + +class Generic(metaclass=GenericMeta): + """Abstract base class for generic types. + + A generic type is typically declared by inheriting from an + instantiation of this class with one or more type variables. + For example, a generic mapping type might be defined as:: + + class Mapping(Generic[KT, VT]): + def __getitem__(self, key: KT) -> VT: + ... + # Etc. + + This class can then be used as follows:: + + def lookup_name(mapping: Mapping, key: KT, default: VT) -> VT: + try: + return mapping[key] + except KeyError: + return default + + For clarity the type variables may be redefined, e.g.:: + + X = TypeVar('X') + Y = TypeVar('Y') + def lookup_name(mapping: Mapping[X, Y], key: X, default: Y) -> Y: + # Same body as above. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + next_in_mro = object + # Look for the last occurrence of Generic or Generic[...]. + for i, c in enumerate(cls.__mro__[:-1]): + if isinstance(c, GenericMeta) and _gorg(c) is Generic: + next_in_mro = cls.__mro__[i+1] + return next_in_mro.__new__(_gorg(cls)) + + +def cast(typ, val): + """Cast a value to a type. + + This returns the value unchanged. To the type checker this + signals that the return value has the designated type, but at + runtime we intentionally don't check anything (we want this + to be as fast as possible). + """ + return val + + +def _get_defaults(func): + """Internal helper to extract the default arguments, by name.""" + code = func.__code__ + pos_count = code.co_argcount + kw_count = code.co_kwonlyargcount + arg_names = code.co_varnames + kwarg_names = arg_names[pos_count:pos_count + kw_count] + arg_names = arg_names[:pos_count] + defaults = func.__defaults__ or () + kwdefaults = func.__kwdefaults__ + res = dict(kwdefaults) if kwdefaults else {} + pos_offset = pos_count - len(defaults) + for name, value in zip(arg_names[pos_offset:], defaults): + assert name not in res + res[name] = value + return res + + +def get_type_hints(obj, globalns=None, localns=None): + """Return type hints for a function or method object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, and if necessary + adds Optional[t] if a default value equal to None is set. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj, and these are also used as the locals. If the + object does not appear to have globals, an exception is raised. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + if getattr(obj, '__no_type_check__', None): + return {} + if globalns is None: + globalns = getattr(obj, '__globals__', {}) + if localns is None: + localns = globalns + elif localns is None: + localns = globalns + defaults = _get_defaults(obj) + hints = dict(obj.__annotations__) + for name, value in hints.items(): + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, globalns, localns) + if name in defaults and defaults[name] is None: + value = Optional[value] + hints[name] = value + return hints + + +# TODO: Also support this as a class decorator. +def no_type_check(arg): + """Decorator to indicate that annotations are not type hints. + + The argument must be a class or function; if it is a class, it + applies recursively to all methods defined in that class (but not + to methods defined in its superclasses or subclasses). + + This mutates the function(s) in place. + """ + if isinstance(arg, type): + for obj in arg.__dict__.values(): + if isinstance(obj, types.FunctionType): + obj.__no_type_check__ = True + else: + arg.__no_type_check__ = True + return arg + + +def no_type_check_decorator(decorator): + """Decorator to give another decorator the @no_type_check effect. + + This wraps the decorator with something that wraps the decorated + function in @no_type_check. + """ + + @functools.wraps(decorator) + def wrapped_decorator(*args, **kwds): + func = decorator(*args, **kwds) + func = no_type_check(func) + return func + + return wrapped_decorator + + +def overload(func): + raise RuntimeError("Overloading is only supported in library stubs") + + +class _ProtocolMeta(GenericMeta): + """Internal metaclass for _Protocol. + + This exists so _Protocol classes can be generic without deriving + from Generic. + """ + + def __instancecheck__(self, obj): + raise TypeError("Protocols cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not self._is_protocol: + # No structural checks since this isn't a protocol. + return NotImplemented + + if self is _Protocol: + # Every class is a subclass of the empty protocol. + return True + + # Find all attributes defined in the protocol. + attrs = self._get_protocol_attrs() + + for attr in attrs: + if not any(attr in d.__dict__ for d in cls.__mro__): + return False + return True + + def _get_protocol_attrs(self): + # Get all Protocol base classes. + protocol_bases = [] + for c in self.__mro__: + if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': + protocol_bases.append(c) + + # Get attributes included in protocol. + attrs = set() + for base in protocol_bases: + for attr in base.__dict__.keys(): + # Include attributes not defined in any non-protocol bases. + for c in self.__mro__: + if (c is not base and attr in c.__dict__ and + not getattr(c, '_is_protocol', False)): + break + else: + if (not attr.startswith('_abc_') and + attr != '__abstractmethods__' and + attr != '_is_protocol' and + attr != '__dict__' and + attr != '__slots__' and + attr != '_get_protocol_attrs' and + attr != '__parameters__' and + attr != '__origin__' and + attr != '__module__'): + attrs.add(attr) + + return attrs + + +class _Protocol(metaclass=_ProtocolMeta): + """Internal base class for protocol classes. + + This implements a simple-minded structural isinstance check + (similar but more general than the one-offs in collections.abc + such as Hashable). + """ + + __slots__ = () + + _is_protocol = True + + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + +Hashable = collections_abc.Hashable # Not generic. + + +class Iterable(Generic[T_co], extra=collections_abc.Iterable): + __slots__ = () + + +class Iterator(Iterable[T_co], extra=collections_abc.Iterator): + __slots__ = () + + +class SupportsInt(_Protocol): + __slots__ = () + + @abstractmethod + def __int__(self) -> int: + pass + + +class SupportsFloat(_Protocol): + __slots__ = () + + @abstractmethod + def __float__(self) -> float: + pass + + +class SupportsComplex(_Protocol): + __slots__ = () + + @abstractmethod + def __complex__(self) -> complex: + pass + + +class SupportsBytes(_Protocol): + __slots__ = () + + @abstractmethod + def __bytes__(self) -> bytes: + pass + + +class SupportsAbs(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __abs__(self) -> T_co: + pass + + +class SupportsRound(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +class Reversible(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __reversed__(self) -> 'Iterator[T_co]': + pass + + +Sized = collections_abc.Sized # Not generic. + + +class Container(Generic[T_co], extra=collections_abc.Container): + __slots__ = () + + +# Callable was defined earlier. + + +class AbstractSet(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Set): + pass + + +class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): + pass + + +# NOTE: Only the value type is covariant. +class Mapping(Sized, Iterable[KT], Container[KT], Generic[VT_co], + extra=collections_abc.Mapping): + pass + + +class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): + pass + + +class Sequence(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Sequence): + pass + + +class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): + pass + + +class ByteString(Sequence[int], extra=collections_abc.ByteString): + pass + + +ByteString.register(type(memoryview(b''))) + + +class List(list, MutableSequence[T]): + + def __new__(cls, *args, **kwds): + if _geqv(cls, List): + raise TypeError("Type List cannot be instantiated; " + "use list() instead") + return list.__new__(cls, *args, **kwds) + + +class Set(set, MutableSet[T]): + + def __new__(cls, *args, **kwds): + if _geqv(cls, Set): + raise TypeError("Type Set cannot be instantiated; " + "use set() instead") + return set.__new__(cls, *args, **kwds) + + +class _FrozenSetMeta(GenericMeta): + """This metaclass ensures set is not a subclass of FrozenSet. + + Without this metaclass, set would be considered a subclass of + FrozenSet, because FrozenSet.__extra__ is collections.abc.Set, and + set is a subclass of that. + """ + + def __subclasscheck__(self, cls): + if issubclass(cls, Set): + return False + return super().__subclasscheck__(cls) + + +class FrozenSet(frozenset, AbstractSet[T_co], metaclass=_FrozenSetMeta): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, FrozenSet): + raise TypeError("Type FrozenSet cannot be instantiated; " + "use frozenset() instead") + return frozenset.__new__(cls, *args, **kwds) + + +class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): + pass + + +class KeysView(MappingView[KT], AbstractSet[KT], + extra=collections_abc.KeysView): + pass + + +# TODO: Enable Set[Tuple[KT, VT_co]] instead of Generic[KT, VT_co]. +class ItemsView(MappingView, Generic[KT, VT_co], + extra=collections_abc.ItemsView): + pass + + +class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): + pass + + +class Dict(dict, MutableMapping[KT, VT]): + + def __new__(cls, *args, **kwds): + if _geqv(cls, Dict): + raise TypeError("Type Dict cannot be instantiated; " + "use dict() instead") + return dict.__new__(cls, *args, **kwds) + + +# Determine what base class to use for Generator. +if hasattr(collections_abc, 'Generator'): + # Sufficiently recent versions of 3.5 have a Generator ABC. + _G_base = collections_abc.Generator +else: + # Fall back on the exact type. + _G_base = types.GeneratorType + + +class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], + extra=_G_base): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Generator): + raise TypeError("Type Generator cannot be instantiated; " + "create a subclass instead") + return super().__new__(cls, *args, **kwds) + + +def NamedTuple(typename, fields): + """Typed version of namedtuple. + + Usage:: + + Employee = typing.NamedTuple('Employee', [('name', str), 'id', int)]) + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has one extra attribute: _field_types, + giving a dict mapping field names to types. (The field names + are in the _fields attribute, which is part of the namedtuple + API.) + """ + fields = [(n, t) for n, t in fields] + cls = collections.namedtuple(typename, [n for n, t in fields]) + cls._field_types = dict(fields) + # Set the module to the caller's module (otherwise it'd be 'typing'). + try: + cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + return cls + + +class IO(Generic[AnyStr]): + """Generic base class for TextIO and BinaryIO. + + This is an abstract, generic version of the return of open(). + + NOTE: This does not distinguish between the different possible + classes (text vs. binary, read vs. write vs. read/write, + append-only, unbuffered). The TextIO and BinaryIO subclasses + below capture the distinctions between text vs. binary, which is + pervasive in the interface; however we currently do not offer a + way to track the other distinctions in the type system. + """ + + __slots__ = () + + @abstractproperty + def mode(self) -> str: + pass + + @abstractproperty + def name(self) -> str: + pass + + @abstractmethod + def close(self) -> None: + pass + + @abstractmethod + def closed(self) -> bool: + pass + + @abstractmethod + def fileno(self) -> int: + pass + + @abstractmethod + def flush(self) -> None: + pass + + @abstractmethod + def isatty(self) -> bool: + pass + + @abstractmethod + def read(self, n: int = -1) -> AnyStr: + pass + + @abstractmethod + def readable(self) -> bool: + pass + + @abstractmethod + def readline(self, limit: int = -1) -> AnyStr: + pass + + @abstractmethod + def readlines(self, hint: int = -1) -> List[AnyStr]: + pass + + @abstractmethod + def seek(self, offset: int, whence: int = 0) -> int: + pass + + @abstractmethod + def seekable(self) -> bool: + pass + + @abstractmethod + def tell(self) -> int: + pass + + @abstractmethod + def truncate(self, size: int = None) -> int: + pass + + @abstractmethod + def writable(self) -> bool: + pass + + @abstractmethod + def write(self, s: AnyStr) -> int: + pass + + @abstractmethod + def writelines(self, lines: List[AnyStr]) -> None: + pass + + @abstractmethod + def __enter__(self) -> 'IO[AnyStr]': + pass + + @abstractmethod + def __exit__(self, type, value, traceback) -> None: + pass + + +class BinaryIO(IO[bytes]): + """Typed version of the return of open() in binary mode.""" + + __slots__ = () + + @abstractmethod + def write(self, s: Union[bytes, bytearray]) -> int: + pass + + @abstractmethod + def __enter__(self) -> 'BinaryIO': + pass + + +class TextIO(IO[str]): + """Typed version of the return of open() in text mode.""" + + __slots__ = () + + @abstractproperty + def buffer(self) -> BinaryIO: + pass + + @abstractproperty + def encoding(self) -> str: + pass + + @abstractproperty + def errors(self) -> str: + pass + + @abstractproperty + def line_buffering(self) -> bool: + pass + + @abstractproperty + def newlines(self) -> Any: + pass + + @abstractmethod + def __enter__(self) -> 'TextIO': + pass + + +class io: + """Wrapper namespace for IO generic classes.""" + + __all__ = ['IO', 'TextIO', 'BinaryIO'] + IO = IO + TextIO = TextIO + BinaryIO = BinaryIO + +io.__name__ = __name__ + '.io' +sys.modules[io.__name__] = io + + +Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), + lambda p: p.pattern) +Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), + lambda m: m.re.pattern) + + +class re: + """Wrapper namespace for re type aliases.""" + + __all__ = ['Pattern', 'Match'] + Pattern = Pattern + Match = Match + +re.__name__ = __name__ + '.re' +sys.modules[re.__name__] = re diff --git a/typing_extensions/test_data/python-3.5.2/_collections_abc.py b/typing_extensions/test_data/python-3.5.2/_collections_abc.py new file mode 100644 index 00000000..fc9c9f1c --- /dev/null +++ b/typing_extensions/test_data/python-3.5.2/_collections_abc.py @@ -0,0 +1,939 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. + +Unit tests are in test_collections. +""" + +from abc import ABCMeta, abstractmethod +import sys + +__all__ = ["Awaitable", "Coroutine", "AsyncIterable", "AsyncIterator", + "Hashable", "Iterable", "Iterator", "Generator", + "Sized", "Container", "Callable", + "Set", "MutableSet", + "Mapping", "MutableMapping", + "MappingView", "KeysView", "ItemsView", "ValuesView", + "Sequence", "MutableSequence", + "ByteString", + ] + +# This module has been renamed from collections.abc to _collections_abc to +# speed up interpreter startup. Some of the types such as MutableMapping are +# required early but collections module imports a lot of other modules. +# See issue #19218 +__name__ = "collections.abc" + +# Private list of types that we want to register with the various ABCs +# so that they will pass tests like: +# it = iter(somebytearray) +# assert isinstance(it, Iterable) +# Note: in other implementations, these types many not be distinct +# and they make have their own implementation specific types that +# are not included on this list. +bytes_iterator = type(iter(b'')) +bytearray_iterator = type(iter(bytearray())) +#callable_iterator = ??? +dict_keyiterator = type(iter({}.keys())) +dict_valueiterator = type(iter({}.values())) +dict_itemiterator = type(iter({}.items())) +list_iterator = type(iter([])) +list_reverseiterator = type(iter(reversed([]))) +range_iterator = type(iter(range(0))) +set_iterator = type(iter(set())) +str_iterator = type(iter("")) +tuple_iterator = type(iter(())) +zip_iterator = type(iter(zip())) +## views ## +dict_keys = type({}.keys()) +dict_values = type({}.values()) +dict_items = type({}.items()) +## misc ## +mappingproxy = type(type.__dict__) +generator = type((lambda: (yield))()) +## coroutine ## +async def _coro(): pass +_coro = _coro() +coroutine = type(_coro) +_coro.close() # Prevent ResourceWarning +del _coro + + +### ONE-TRICK PONIES ### + +class Hashable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __hash__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Hashable: + for B in C.__mro__: + if "__hash__" in B.__dict__: + if B.__dict__["__hash__"]: + return True + break + return NotImplemented + + +class Awaitable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __await__(self): + yield + + @classmethod + def __subclasshook__(cls, C): + if cls is Awaitable: + for B in C.__mro__: + if "__await__" in B.__dict__: + if B.__dict__["__await__"]: + return True + break + return NotImplemented + + +class Coroutine(Awaitable): + + __slots__ = () + + @abstractmethod + def send(self, value): + """Send a value into the coroutine. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the coroutine. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside coroutine. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("coroutine ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Coroutine: + mro = C.__mro__ + for method in ('__await__', 'send', 'throw', 'close'): + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + +Coroutine.register(coroutine) + + +class AsyncIterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __aiter__(self): + return AsyncIterator() + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterable: + if any("__aiter__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class AsyncIterator(AsyncIterable): + + __slots__ = () + + @abstractmethod + async def __anext__(self): + """Return the next item or raise StopAsyncIteration when exhausted.""" + raise StopAsyncIteration + + def __aiter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterator: + if (any("__anext__" in B.__dict__ for B in C.__mro__) and + any("__aiter__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + + +class Iterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __iter__(self): + while False: + yield None + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterable: + if any("__iter__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Iterator(Iterable): + + __slots__ = () + + @abstractmethod + def __next__(self): + 'Return the next item from the iterator. When exhausted, raise StopIteration' + raise StopIteration + + def __iter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterator: + if (any("__next__" in B.__dict__ for B in C.__mro__) and + any("__iter__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + +Iterator.register(bytes_iterator) +Iterator.register(bytearray_iterator) +#Iterator.register(callable_iterator) +Iterator.register(dict_keyiterator) +Iterator.register(dict_valueiterator) +Iterator.register(dict_itemiterator) +Iterator.register(list_iterator) +Iterator.register(list_reverseiterator) +Iterator.register(range_iterator) +Iterator.register(set_iterator) +Iterator.register(str_iterator) +Iterator.register(tuple_iterator) +Iterator.register(zip_iterator) + + +class Generator(Iterator): + + __slots__ = () + + def __next__(self): + """Return the next item from the generator. + When exhausted, raise StopIteration. + """ + return self.send(None) + + @abstractmethod + def send(self, value): + """Send a value into the generator. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the generator. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside generator. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("generator ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Generator: + mro = C.__mro__ + for method in ('__iter__', '__next__', 'send', 'throw', 'close'): + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + +Generator.register(generator) + + +class Sized(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __len__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Sized: + if any("__len__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Container(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __contains__(self, x): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Container: + if any("__contains__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Callable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __call__(self, *args, **kwds): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Callable: + if any("__call__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +### SETS ### + + +class Set(Sized, Iterable, Container): + + """A set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__ and __len__. + + To override the comparisons (presumably for speed, as the + semantics are fixed), redefine __le__ and __ge__, + then the other operations will automatically follow suit. + """ + + __slots__ = () + + def __le__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) > len(other): + return False + for elem in self: + if elem not in other: + return False + return True + + def __lt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) < len(other) and self.__le__(other) + + def __gt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) > len(other) and self.__ge__(other) + + def __ge__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) < len(other): + return False + for elem in other: + if elem not in self: + return False + return True + + def __eq__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) == len(other) and self.__le__(other) + + @classmethod + def _from_iterable(cls, it): + '''Construct an instance of the class from any iterable input. + + Must override this method if the class constructor signature + does not accept an iterable for an input. + ''' + return cls(it) + + def __and__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + return self._from_iterable(value for value in other if value in self) + + __rand__ = __and__ + + def isdisjoint(self, other): + 'Return True if two sets have a null intersection.' + for value in other: + if value in self: + return False + return True + + def __or__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + chain = (e for s in (self, other) for e in s) + return self._from_iterable(chain) + + __ror__ = __or__ + + def __sub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in self + if value not in other) + + def __rsub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in other + if value not in self) + + def __xor__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return (self - other) | (other - self) + + __rxor__ = __xor__ + + def _hash(self): + """Compute the hash value of a set. + + Note that we don't define __hash__: not all sets are hashable. + But if you define a hashable set type, its __hash__ should + call this function. + + This must be compatible __eq__. + + All sets ought to compare equal if they contain the same + elements, regardless of how they are implemented, and + regardless of the order of the elements; so there's not much + freedom for __eq__ or __hash__. We match the algorithm used + by the built-in frozenset type. + """ + MAX = sys.maxsize + MASK = 2 * MAX + 1 + n = len(self) + h = 1927868237 * (n + 1) + h &= MASK + for x in self: + hx = hash(x) + h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 + h &= MASK + h = h * 69069 + 907133923 + h &= MASK + if h > MAX: + h -= MASK + 1 + if h == -1: + h = 590923713 + return h + +Set.register(frozenset) + + +class MutableSet(Set): + """A mutable set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__, __len__, + add(), and discard(). + + To override the comparisons (presumably for speed, as the + semantics are fixed), all you have to do is redefine __le__ and + then the other operations will automatically follow suit. + """ + + __slots__ = () + + @abstractmethod + def add(self, value): + """Add an element.""" + raise NotImplementedError + + @abstractmethod + def discard(self, value): + """Remove an element. Do not raise an exception if absent.""" + raise NotImplementedError + + def remove(self, value): + """Remove an element. If not a member, raise a KeyError.""" + if value not in self: + raise KeyError(value) + self.discard(value) + + def pop(self): + """Return the popped value. Raise KeyError if empty.""" + it = iter(self) + try: + value = next(it) + except StopIteration: + raise KeyError + self.discard(value) + return value + + def clear(self): + """This is slow (creates N new iterators!) but effective.""" + try: + while True: + self.pop() + except KeyError: + pass + + def __ior__(self, it): + for value in it: + self.add(value) + return self + + def __iand__(self, it): + for value in (self - it): + self.discard(value) + return self + + def __ixor__(self, it): + if it is self: + self.clear() + else: + if not isinstance(it, Set): + it = self._from_iterable(it) + for value in it: + if value in self: + self.discard(value) + else: + self.add(value) + return self + + def __isub__(self, it): + if it is self: + self.clear() + else: + for value in it: + self.discard(value) + return self + +MutableSet.register(set) + + +### MAPPINGS ### + + +class Mapping(Sized, Iterable, Container): + + __slots__ = () + + """A Mapping is a generic container for associating key/value + pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __iter__, and __len__. + + """ + + @abstractmethod + def __getitem__(self, key): + raise KeyError + + def get(self, key, default=None): + 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' + try: + return self[key] + except KeyError: + return default + + def __contains__(self, key): + try: + self[key] + except KeyError: + return False + else: + return True + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return KeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return ItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return ValuesView(self) + + def __eq__(self, other): + if not isinstance(other, Mapping): + return NotImplemented + return dict(self.items()) == dict(other.items()) + +Mapping.register(mappingproxy) + + +class MappingView(Sized): + + __slots__ = '_mapping', + + def __init__(self, mapping): + self._mapping = mapping + + def __len__(self): + return len(self._mapping) + + def __repr__(self): + return '{0.__class__.__name__}({0._mapping!r})'.format(self) + + +class KeysView(MappingView, Set): + + __slots__ = () + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, key): + return key in self._mapping + + def __iter__(self): + yield from self._mapping + +KeysView.register(dict_keys) + + +class ItemsView(MappingView, Set): + + __slots__ = () + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, item): + key, value = item + try: + v = self._mapping[key] + except KeyError: + return False + else: + return v == value + + def __iter__(self): + for key in self._mapping: + yield (key, self._mapping[key]) + +ItemsView.register(dict_items) + + +class ValuesView(MappingView): + + __slots__ = () + + def __contains__(self, value): + for key in self._mapping: + if value == self._mapping[key]: + return True + return False + + def __iter__(self): + for key in self._mapping: + yield self._mapping[key] + +ValuesView.register(dict_values) + + +class MutableMapping(Mapping): + + __slots__ = () + + """A MutableMapping is a generic container for associating + key/value pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __setitem__, __delitem__, + __iter__, and __len__. + + """ + + @abstractmethod + def __setitem__(self, key, value): + raise KeyError + + @abstractmethod + def __delitem__(self, key): + raise KeyError + + __marker = object() + + def pop(self, key, default=__marker): + '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + ''' + try: + value = self[key] + except KeyError: + if default is self.__marker: + raise + return default + else: + del self[key] + return value + + def popitem(self): + '''D.popitem() -> (k, v), remove and return some (key, value) pair + as a 2-tuple; but raise KeyError if D is empty. + ''' + try: + key = next(iter(self)) + except StopIteration: + raise KeyError + value = self[key] + del self[key] + return key, value + + def clear(self): + 'D.clear() -> None. Remove all items from D.' + try: + while True: + self.popitem() + except KeyError: + pass + + def update(*args, **kwds): + ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. + If E present and has a .keys() method, does: for k in E: D[k] = E[k] + If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v + In either case, this is followed by: for k, v in F.items(): D[k] = v + ''' + if not args: + raise TypeError("descriptor 'update' of 'MutableMapping' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('update expected at most 1 arguments, got %d' % + len(args)) + if args: + other = args[0] + if isinstance(other, Mapping): + for key in other: + self[key] = other[key] + elif hasattr(other, "keys"): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + def setdefault(self, key, default=None): + 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' + try: + return self[key] + except KeyError: + self[key] = default + return default + +MutableMapping.register(dict) + + +### SEQUENCES ### + + +class Sequence(Sized, Iterable, Container): + + """All the operations on a read-only sequence. + + Concrete subclasses must override __new__ or __init__, + __getitem__, and __len__. + """ + + __slots__ = () + + @abstractmethod + def __getitem__(self, index): + raise IndexError + + def __iter__(self): + i = 0 + try: + while True: + v = self[i] + yield v + i += 1 + except IndexError: + return + + def __contains__(self, value): + for v in self: + if v == value: + return True + return False + + def __reversed__(self): + for i in reversed(range(len(self))): + yield self[i] + + def index(self, value, start=0, stop=None): + '''S.index(value, [start, [stop]]) -> integer -- return first index of value. + Raises ValueError if the value is not present. + ''' + if start is not None and start < 0: + start = max(len(self) + start, 0) + if stop is not None and stop < 0: + stop += len(self) + + i = start + while stop is None or i < stop: + try: + if self[i] == value: + return i + except IndexError: + break + i += 1 + raise ValueError + + def count(self, value): + 'S.count(value) -> integer -- return number of occurrences of value' + return sum(1 for v in self if v == value) + +Sequence.register(tuple) +Sequence.register(str) +Sequence.register(range) +Sequence.register(memoryview) + + +class ByteString(Sequence): + + """This unifies bytes and bytearray. + + XXX Should add all their methods. + """ + + __slots__ = () + +ByteString.register(bytes) +ByteString.register(bytearray) + + +class MutableSequence(Sequence): + + __slots__ = () + + """All the operations on a read-write sequence. + + Concrete subclasses must provide __new__ or __init__, + __getitem__, __setitem__, __delitem__, __len__, and insert(). + + """ + + @abstractmethod + def __setitem__(self, index, value): + raise IndexError + + @abstractmethod + def __delitem__(self, index): + raise IndexError + + @abstractmethod + def insert(self, index, value): + 'S.insert(index, value) -- insert value before index' + raise IndexError + + def append(self, value): + 'S.append(value) -- append value to the end of the sequence' + self.insert(len(self), value) + + def clear(self): + 'S.clear() -> None -- remove all items from S' + try: + while True: + self.pop() + except IndexError: + pass + + def reverse(self): + 'S.reverse() -- reverse *IN PLACE*' + n = len(self) + for i in range(n//2): + self[i], self[n-i-1] = self[n-i-1], self[i] + + def extend(self, values): + 'S.extend(iterable) -- extend sequence by appending elements from the iterable' + for v in values: + self.append(v) + + def pop(self, index=-1): + '''S.pop([index]) -> item -- remove and return item at index (default last). + Raise IndexError if list is empty or index is out of range. + ''' + v = self[index] + del self[index] + return v + + def remove(self, value): + '''S.remove(value) -- remove first occurrence of value. + Raise ValueError if the value is not present. + ''' + del self[self.index(value)] + + def __iadd__(self, values): + self.extend(values) + return self + +MutableSequence.register(list) +MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.5.2/abc.py b/typing_extensions/test_data/python-3.5.2/abc.py new file mode 100644 index 00000000..1cbf96a6 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.2/abc.py @@ -0,0 +1,248 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) according to PEP 3119.""" + +from _weakrefset import WeakSet + + +def abstractmethod(funcobj): + """A decorator indicating abstract methods. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract methods are overridden. + The abstract methods can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractmethod + def my_abstract_method(self, ...): + ... + """ + funcobj.__isabstractmethod__ = True + return funcobj + + +class abstractclassmethod(classmethod): + """ + A decorator indicating abstract classmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractclassmethod + def my_abstract_classmethod(cls, ...): + ... + + 'abstractclassmethod' is deprecated. Use 'classmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractstaticmethod(staticmethod): + """ + A decorator indicating abstract staticmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractstaticmethod + def my_abstract_staticmethod(...): + ... + + 'abstractstaticmethod' is deprecated. Use 'staticmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractproperty(property): + """ + A decorator indicating abstract properties. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract properties are overridden. + The abstract properties can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractproperty + def my_abstract_property(self): + ... + + This defines a read-only property; you can also define a read-write + abstract property using the 'long' form of property declaration: + + class C(metaclass=ABCMeta): + def getx(self): ... + def setx(self, value): ... + x = abstractproperty(getx, setx) + + 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' + instead. + """ + + __isabstractmethod__ = True + + +class ABCMeta(type): + + """Metaclass for defining Abstract Base Classes (ABCs). + + Use this metaclass to create an ABC. An ABC can be subclassed + directly, and then acts as a mix-in class. You can also register + unrelated concrete classes (even built-in classes) and unrelated + ABCs as 'virtual subclasses' -- these and their descendants will + be considered subclasses of the registering ABC by the built-in + issubclass() function, but the registering ABC won't show up in + their MRO (Method Resolution Order) nor will method + implementations defined by the registering ABC be callable (not + even via super()). + + """ + + # A global counter that is incremented each time a class is + # registered as a virtual subclass of anything. It forces the + # negative cache to be cleared before its next use. + # Note: this counter is private. Use `abc.get_cache_token()` for + # external code. + _abc_invalidation_counter = 0 + + def __new__(mcls, name, bases, namespace): + cls = super().__new__(mcls, name, bases, namespace) + # Compute set of abstract method names + abstracts = {name + for name, value in namespace.items() + if getattr(value, "__isabstractmethod__", False)} + for base in bases: + for name in getattr(base, "__abstractmethods__", set()): + value = getattr(cls, name, None) + if getattr(value, "__isabstractmethod__", False): + abstracts.add(name) + cls.__abstractmethods__ = frozenset(abstracts) + # Set up inheritance registry + cls._abc_registry = WeakSet() + cls._abc_cache = WeakSet() + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + return cls + + def register(cls, subclass): + """Register a virtual subclass of an ABC. + + Returns the subclass, to allow usage as a class decorator. + """ + if not isinstance(subclass, type): + raise TypeError("Can only register classes") + if issubclass(subclass, cls): + return subclass # Already a subclass + # Subtle: test for cycles *after* testing for "already a subclass"; + # this means we allow X.register(X) and interpret it as a no-op. + if issubclass(cls, subclass): + # This would create a cycle, which is bad for the algorithm below + raise RuntimeError("Refusing to create an inheritance cycle") + cls._abc_registry.add(subclass) + ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache + return subclass + + def _dump_registry(cls, file=None): + """Debug helper to print the ABC registry.""" + print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) + print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) + for name in sorted(cls.__dict__.keys()): + if name.startswith("_abc_"): + value = getattr(cls, name) + print("%s: %r" % (name, value), file=file) + + def __instancecheck__(cls, instance): + """Override for isinstance(instance, cls).""" + # Inline the cache checking + subclass = instance.__class__ + if subclass in cls._abc_cache: + return True + subtype = type(instance) + if subtype is subclass: + if (cls._abc_negative_cache_version == + ABCMeta._abc_invalidation_counter and + subclass in cls._abc_negative_cache): + return False + # Fall back to the subclass check. + return cls.__subclasscheck__(subclass) + return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) + + def __subclasscheck__(cls, subclass): + """Override for issubclass(subclass, cls).""" + # Check cache + if subclass in cls._abc_cache: + return True + # Check negative cache; may have to invalidate + if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: + # Invalidate the negative cache + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + elif subclass in cls._abc_negative_cache: + return False + # Check the subclass hook + ok = cls.__subclasshook__(subclass) + if ok is not NotImplemented: + assert isinstance(ok, bool) + if ok: + cls._abc_cache.add(subclass) + else: + cls._abc_negative_cache.add(subclass) + return ok + # Check if it's a direct subclass + if cls in getattr(subclass, '__mro__', ()): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a registered class (recursive) + for rcls in cls._abc_registry: + if issubclass(subclass, rcls): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a subclass (recursive) + for scls in cls.__subclasses__(): + if issubclass(subclass, scls): + cls._abc_cache.add(subclass) + return True + # No dice; update negative cache + cls._abc_negative_cache.add(subclass) + return False + + +class ABC(metaclass=ABCMeta): + """Helper class that provides a standard way to create an ABC using + inheritance. + """ + pass + + +def get_cache_token(): + """Returns the current ABC cache token. + + The token is an opaque object (supporting equality testing) identifying the + current version of the ABC cache for virtual subclasses. The token changes + with every call to ``register()`` on any ABC. + """ + return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.5.2/collections/__init__.py b/typing_extensions/test_data/python-3.5.2/collections/__init__.py new file mode 100644 index 00000000..ebe8ee7a --- /dev/null +++ b/typing_extensions/test_data/python-3.5.2/collections/__init__.py @@ -0,0 +1,1239 @@ +'''This module implements specialized container datatypes providing +alternatives to Python's general purpose built-in containers, dict, +list, set, and tuple. + +* namedtuple factory function for creating tuple subclasses with named fields +* deque list-like container with fast appends and pops on either end +* ChainMap dict-like class for creating a single view of multiple mappings +* Counter dict subclass for counting hashable objects +* OrderedDict dict subclass that remembers the order entries were added +* defaultdict dict subclass that calls a factory function to supply missing values +* UserDict wrapper around dictionary objects for easier dict subclassing +* UserList wrapper around list objects for easier list subclassing +* UserString wrapper around string objects for easier string subclassing + +''' + +__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', + 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] + +# For backwards compatibility, continue to make the collections ABCs +# available through the collections module. +from _collections_abc import * +import _collections_abc +__all__ += _collections_abc.__all__ + +from operator import itemgetter as _itemgetter, eq as _eq +from keyword import iskeyword as _iskeyword +import sys as _sys +import heapq as _heapq +from _weakref import proxy as _proxy +from itertools import repeat as _repeat, chain as _chain, starmap as _starmap +from reprlib import recursive_repr as _recursive_repr + +try: + from _collections import deque +except ImportError: + pass +else: + MutableSequence.register(deque) + +try: + from _collections import defaultdict +except ImportError: + pass + + +################################################################################ +### OrderedDict +################################################################################ + +class _OrderedDictKeysView(KeysView): + + def __reversed__(self): + yield from reversed(self._mapping) + +class _OrderedDictItemsView(ItemsView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield (key, self._mapping[key]) + +class _OrderedDictValuesView(ValuesView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield self._mapping[key] + +class _Link(object): + __slots__ = 'prev', 'next', 'key', '__weakref__' + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as regular dictionaries. + + # The internal self.__map dict maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # The sentinel is in self.__hardroot with a weakref proxy in self.__root. + # The prev links are weakref proxies (to prevent circular references). + # Individual links are kept alive by the hard reference in self.__map. + # Those hard references disappear when a key is deleted from an OrderedDict. + + def __init__(*args, **kwds): + '''Initialize an ordered dictionary. The signature is the same as + regular dictionaries, but keyword arguments are not recommended because + their insertion order is arbitrary. + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'OrderedDict' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__hardroot = _Link() + self.__root = root = _proxy(self.__hardroot) + root.prev = root.next = root + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, + dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link at the end of the linked list, + # and the inherited dictionary is updated with the new key/value pair. + if key not in self: + self.__map[key] = link = Link() + root = self.__root + last = root.prev + link.prev, link.next, link.key = last, root, key + last.next = link + root.prev = proxy(link) + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which gets + # removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link = self.__map.pop(key) + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + link.prev = None + link.next = None + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + # Traverse the linked list in order. + root = self.__root + curr = root.next + while curr is not root: + yield curr.key + curr = curr.next + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + # Traverse the linked list in reverse order. + root = self.__root + curr = root.prev + while curr is not root: + yield curr.key + curr = curr.prev + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + root = self.__root + root.prev = root.next = root + self.__map.clear() + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root.prev + link_prev = link.prev + link_prev.next = root + root.prev = link_prev + else: + link = root.next + link_next = link.next + root.next = link_next + link_next.prev = root + key = link.key + del self.__map[key] + value = dict.pop(self, key) + return key, value + + def move_to_end(self, key, last=True): + '''Move an existing element to the end (or beginning if last==False). + + Raises KeyError if the element does not exist. + When last=True, acts like a fast version of self[key]=self.pop(key). + + ''' + link = self.__map[key] + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + root = self.__root + if last: + last = root.prev + link.prev = last + link.next = root + last.next = root.prev = link + else: + first = root.next + link.prev = root + link.next = first + root.next = first.prev = link + + def __sizeof__(self): + sizeof = _sys.getsizeof + n = len(self) + 1 # number of links including root + size = sizeof(self.__dict__) # instance dictionary + size += sizeof(self.__map) * 2 # internal dict and inherited dict + size += sizeof(self.__hardroot) * n # link objects + size += sizeof(self.__root) * n # proxy objects + return size + + update = __update = MutableMapping.update + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return _OrderedDictKeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return _OrderedDictItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return _OrderedDictValuesView(self) + + __ne__ = MutableMapping.__ne__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding + value. If key is not found, d is returned if given, otherwise KeyError + is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + @_recursive_repr() + def __repr__(self): + 'od.__repr__() <==> repr(od)' + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self.items())) + + def __reduce__(self): + 'Return state information for pickling' + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + return self.__class__, (), inst_dict or None, None, iter(self.items()) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. + If not specified, the value defaults to None. + + ''' + self = cls() + for key in iterable: + self[key] = value + return self + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return dict.__eq__(self, other) and all(map(_eq, self, other)) + return dict.__eq__(self, other) + + +try: + from _collections import OrderedDict +except ImportError: + # Leave the pure Python version in place. + pass + + +################################################################################ +### namedtuple +################################################################################ + +_class_template = """\ +from builtins import property as _property, tuple as _tuple +from operator import itemgetter as _itemgetter +from collections import OrderedDict + +class {typename}(tuple): + '{typename}({arg_list})' + + __slots__ = () + + _fields = {field_names!r} + + def __new__(_cls, {arg_list}): + 'Create new instance of {typename}({arg_list})' + return _tuple.__new__(_cls, ({arg_list})) + + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + 'Make a new {typename} object from a sequence or iterable' + result = new(cls, iterable) + if len(result) != {num_fields:d}: + raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) + return result + + def _replace(_self, **kwds): + 'Return a new {typename} object replacing specified fields with new values' + result = _self._make(map(kwds.pop, {field_names!r}, _self)) + if kwds: + raise ValueError('Got unexpected field names: %r' % list(kwds)) + return result + + def __repr__(self): + 'Return a nicely formatted representation string' + return self.__class__.__name__ + '({repr_fmt})' % self + + def _asdict(self): + 'Return a new OrderedDict which maps field names to their values.' + return OrderedDict(zip(self._fields, self)) + + def __getnewargs__(self): + 'Return self as a plain tuple. Used by copy and pickle.' + return tuple(self) + +{field_defs} +""" + +_repr_template = '{name}=%r' + +_field_template = '''\ + {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') +''' + +def namedtuple(typename, field_names, verbose=False, rename=False): + """Returns a new subclass of tuple with named fields. + + >>> Point = namedtuple('Point', ['x', 'y']) + >>> Point.__doc__ # docstring for the new class + 'Point(x, y)' + >>> p = Point(11, y=22) # instantiate with positional args or keywords + >>> p[0] + p[1] # indexable like a plain tuple + 33 + >>> x, y = p # unpack like a regular tuple + >>> x, y + (11, 22) + >>> p.x + p.y # fields also accessible by name + 33 + >>> d = p._asdict() # convert to a dictionary + >>> d['x'] + 11 + >>> Point(**d) # convert from a dictionary + Point(x=11, y=22) + >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields + Point(x=100, y=22) + + """ + + # Validate the field names. At the user's option, either generate an error + # message or automatically replace the field name with a valid name. + if isinstance(field_names, str): + field_names = field_names.replace(',', ' ').split() + field_names = list(map(str, field_names)) + typename = str(typename) + if rename: + seen = set() + for index, name in enumerate(field_names): + if (not name.isidentifier() + or _iskeyword(name) + or name.startswith('_') + or name in seen): + field_names[index] = '_%d' % index + seen.add(name) + for name in [typename] + field_names: + if type(name) != str: + raise TypeError('Type names and field names must be strings') + if not name.isidentifier(): + raise ValueError('Type names and field names must be valid ' + 'identifiers: %r' % name) + if _iskeyword(name): + raise ValueError('Type names and field names cannot be a ' + 'keyword: %r' % name) + seen = set() + for name in field_names: + if name.startswith('_') and not rename: + raise ValueError('Field names cannot start with an underscore: ' + '%r' % name) + if name in seen: + raise ValueError('Encountered duplicate field name: %r' % name) + seen.add(name) + + # Fill-in the class template + class_definition = _class_template.format( + typename = typename, + field_names = tuple(field_names), + num_fields = len(field_names), + arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], + repr_fmt = ', '.join(_repr_template.format(name=name) + for name in field_names), + field_defs = '\n'.join(_field_template.format(index=index, name=name) + for index, name in enumerate(field_names)) + ) + + # Execute the template string in a temporary namespace and support + # tracing utilities by setting a value for frame.f_globals['__name__'] + namespace = dict(__name__='namedtuple_%s' % typename) + exec(class_definition, namespace) + result = namespace[typename] + result._source = class_definition + if verbose: + print(result._source) + + # For pickling to work, the __module__ variable needs to be set to the frame + # where the named tuple is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + + return result + + +######################################################################## +### Counter +######################################################################## + +def _count_elements(mapping, iterable): + 'Tally elements from the iterable.' + mapping_get = mapping.get + for elem in iterable: + mapping[elem] = mapping_get(elem, 0) + 1 + +try: # Load C helper function if available + from _collections import _count_elements +except ImportError: + pass + +class Counter(dict): + '''Dict subclass for counting hashable items. Sometimes called a bag + or multiset. Elements are stored as dictionary keys and their counts + are stored as dictionary values. + + >>> c = Counter('abcdeabcdabcaba') # count elements from a string + + >>> c.most_common(3) # three most common elements + [('a', 5), ('b', 4), ('c', 3)] + >>> sorted(c) # list all unique elements + ['a', 'b', 'c', 'd', 'e'] + >>> ''.join(sorted(c.elements())) # list elements with repetitions + 'aaaaabbbbcccdde' + >>> sum(c.values()) # total of all counts + 15 + + >>> c['a'] # count of letter 'a' + 5 + >>> for elem in 'shazam': # update counts from an iterable + ... c[elem] += 1 # by adding 1 to each element's count + >>> c['a'] # now there are seven 'a' + 7 + >>> del c['b'] # remove all 'b' + >>> c['b'] # now there are zero 'b' + 0 + + >>> d = Counter('simsalabim') # make another counter + >>> c.update(d) # add in the second counter + >>> c['a'] # now there are nine 'a' + 9 + + >>> c.clear() # empty the counter + >>> c + Counter() + + Note: If a count is set to zero or reduced to zero, it will remain + in the counter until the entry is deleted or the counter is cleared: + + >>> c = Counter('aaabbc') + >>> c['b'] -= 2 # reduce the count of 'b' by two + >>> c.most_common() # 'b' is still in, but its count is zero + [('a', 3), ('c', 1), ('b', 0)] + + ''' + # References: + # http://en.wikipedia.org/wiki/Multiset + # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html + # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm + # http://code.activestate.com/recipes/259174/ + # Knuth, TAOCP Vol. II section 4.6.3 + + def __init__(*args, **kwds): + '''Create a new, empty Counter object. And if given, count elements + from an input iterable. Or, initialize the count from another mapping + of elements to their counts. + + >>> c = Counter() # a new, empty counter + >>> c = Counter('gallahad') # a new counter from an iterable + >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping + >>> c = Counter(a=4, b=2) # a new counter from keyword args + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + super(Counter, self).__init__() + self.update(*args, **kwds) + + def __missing__(self, key): + 'The count of elements not in the Counter is zero.' + # Needed so that self[missing_item] does not raise KeyError + return 0 + + def most_common(self, n=None): + '''List the n most common elements and their counts from the most + common to the least. If n is None, then list all element counts. + + >>> Counter('abcdeabcdabcaba').most_common(3) + [('a', 5), ('b', 4), ('c', 3)] + + ''' + # Emulate Bag.sortedByCount from Smalltalk + if n is None: + return sorted(self.items(), key=_itemgetter(1), reverse=True) + return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) + + def elements(self): + '''Iterator over elements repeating each as many times as its count. + + >>> c = Counter('ABCABC') + >>> sorted(c.elements()) + ['A', 'A', 'B', 'B', 'C', 'C'] + + # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) + >>> product = 1 + >>> for factor in prime_factors.elements(): # loop over factors + ... product *= factor # and multiply them + >>> product + 1836 + + Note, if an element's count has been set to zero or is a negative + number, elements() will ignore it. + + ''' + # Emulate Bag.do from Smalltalk and Multiset.begin from C++. + return _chain.from_iterable(_starmap(_repeat, self.items())) + + # Override dict methods where necessary + + @classmethod + def fromkeys(cls, iterable, v=None): + # There is no equivalent method for counters because setting v=1 + # means that no element can have a count greater than one. + raise NotImplementedError( + 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + + def update(*args, **kwds): + '''Like dict.update() but add counts instead of replacing them. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.update('witch') # add elements from another iterable + >>> d = Counter('watch') + >>> c.update(d) # add elements from another counter + >>> c['h'] # four 'h' in which, witch, and watch + 4 + + ''' + # The regular dict.update() operation makes no sense here because the + # replace behavior results in the some of original untouched counts + # being mixed-in with all of the other counts for a mismash that + # doesn't have a straight-forward interpretation in most counting + # contexts. Instead, we implement straight-addition. Both the inputs + # and outputs are allowed to contain zero and negative counts. + + if not args: + raise TypeError("descriptor 'update' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + if isinstance(iterable, Mapping): + if self: + self_get = self.get + for elem, count in iterable.items(): + self[elem] = count + self_get(elem, 0) + else: + super(Counter, self).update(iterable) # fast path when counter is empty + else: + _count_elements(self, iterable) + if kwds: + self.update(kwds) + + def subtract(*args, **kwds): + '''Like dict.update() but subtracts counts instead of replacing them. + Counts can be reduced below zero. Both the inputs and outputs are + allowed to contain zero and negative counts. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.subtract('witch') # subtract elements from another iterable + >>> c.subtract(Counter('watch')) # subtract elements from another counter + >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch + 0 + >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch + -1 + + ''' + if not args: + raise TypeError("descriptor 'subtract' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + self_get = self.get + if isinstance(iterable, Mapping): + for elem, count in iterable.items(): + self[elem] = self_get(elem, 0) - count + else: + for elem in iterable: + self[elem] = self_get(elem, 0) - 1 + if kwds: + self.subtract(kwds) + + def copy(self): + 'Return a shallow copy.' + return self.__class__(self) + + def __reduce__(self): + return self.__class__, (dict(self),) + + def __delitem__(self, elem): + 'Like dict.__delitem__() but does not raise KeyError for missing values.' + if elem in self: + super().__delitem__(elem) + + def __repr__(self): + if not self: + return '%s()' % self.__class__.__name__ + try: + items = ', '.join(map('%r: %r'.__mod__, self.most_common())) + return '%s({%s})' % (self.__class__.__name__, items) + except TypeError: + # handle case where values are not orderable + return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) + + # Multiset-style mathematical operations discussed in: + # Knuth TAOCP Volume II section 4.6.3 exercise 19 + # and at http://en.wikipedia.org/wiki/Multiset + # + # Outputs guaranteed to only include positive counts. + # + # To strip negative and zero counts, add-in an empty counter: + # c += Counter() + + def __add__(self, other): + '''Add counts from two counters. + + >>> Counter('abbb') + Counter('bcc') + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count + other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __sub__(self, other): + ''' Subtract count, but keep only results with positive counts. + + >>> Counter('abbbc') - Counter('bccd') + Counter({'b': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count - other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count < 0: + result[elem] = 0 - count + return result + + def __or__(self, other): + '''Union is the maximum of value in either of the input counters. + + >>> Counter('abbb') | Counter('bcc') + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = other_count if count < other_count else count + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __and__(self, other): + ''' Intersection is the minimum of corresponding counts. + + >>> Counter('abbb') & Counter('bcc') + Counter({'b': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = count if count < other_count else other_count + if newcount > 0: + result[elem] = newcount + return result + + def __pos__(self): + 'Adds an empty counter, effectively stripping negative and zero counts' + result = Counter() + for elem, count in self.items(): + if count > 0: + result[elem] = count + return result + + def __neg__(self): + '''Subtracts from an empty counter. Strips positive and zero counts, + and flips the sign on negative counts. + + ''' + result = Counter() + for elem, count in self.items(): + if count < 0: + result[elem] = 0 - count + return result + + def _keep_positive(self): + '''Internal method to strip elements with a negative or zero count''' + nonpositive = [elem for elem, count in self.items() if not count > 0] + for elem in nonpositive: + del self[elem] + return self + + def __iadd__(self, other): + '''Inplace add from another counter, keeping only positive counts. + + >>> c = Counter('abbb') + >>> c += Counter('bcc') + >>> c + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] += count + return self._keep_positive() + + def __isub__(self, other): + '''Inplace subtract counter, but keep only results with positive counts. + + >>> c = Counter('abbbc') + >>> c -= Counter('bccd') + >>> c + Counter({'b': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] -= count + return self._keep_positive() + + def __ior__(self, other): + '''Inplace union is the maximum of value from either counter. + + >>> c = Counter('abbb') + >>> c |= Counter('bcc') + >>> c + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + for elem, other_count in other.items(): + count = self[elem] + if other_count > count: + self[elem] = other_count + return self._keep_positive() + + def __iand__(self, other): + '''Inplace intersection is the minimum of corresponding counts. + + >>> c = Counter('abbb') + >>> c &= Counter('bcc') + >>> c + Counter({'b': 1}) + + ''' + for elem, count in self.items(): + other_count = other[elem] + if other_count < count: + self[elem] = other_count + return self._keep_positive() + + +######################################################################## +### ChainMap (helper for configparser and string.Template) +######################################################################## + +class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + be accessed or updated using the *maps* attribute. There is no other + state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self, m=None): # like Django's Context.push() + '''New ChainMap with a new map followed by all previous maps. + If no map is provided, an empty dict is used. + ''' + if m is None: + m = {} + return self.__class__(m, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +################################################################################ +### UserDict +################################################################################ + +class UserDict(MutableMapping): + + # Start by filling-out the abstract methods + def __init__(*args, **kwargs): + if not args: + raise TypeError("descriptor '__init__' of 'UserDict' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + if args: + dict = args[0] + elif 'dict' in kwargs: + dict = kwargs.pop('dict') + import warnings + warnings.warn("Passing 'dict' as keyword argument is deprecated", + PendingDeprecationWarning, stacklevel=2) + else: + dict = None + self.data = {} + if dict is not None: + self.update(dict) + if len(kwargs): + self.update(kwargs) + def __len__(self): return len(self.data) + def __getitem__(self, key): + if key in self.data: + return self.data[key] + if hasattr(self.__class__, "__missing__"): + return self.__class__.__missing__(self, key) + raise KeyError(key) + def __setitem__(self, key, item): self.data[key] = item + def __delitem__(self, key): del self.data[key] + def __iter__(self): + return iter(self.data) + + # Modify __contains__ to work correctly when __missing__ is present + def __contains__(self, key): + return key in self.data + + # Now, add the methods in dicts but not in MutableMapping + def __repr__(self): return repr(self.data) + def copy(self): + if self.__class__ is UserDict: + return UserDict(self.data.copy()) + import copy + data = self.data + try: + self.data = {} + c = copy.copy(self) + finally: + self.data = data + c.update(self) + return c + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + + +################################################################################ +### UserList +################################################################################ + +class UserList(MutableSequence): + """A more or less complete user-defined wrapper around list objects.""" + def __init__(self, initlist=None): + self.data = [] + if initlist is not None: + # XXX should this accept an arbitrary sequence? + if type(initlist) == type(self.data): + self.data[:] = initlist + elif isinstance(initlist, UserList): + self.data[:] = initlist.data[:] + else: + self.data = list(initlist) + def __repr__(self): return repr(self.data) + def __lt__(self, other): return self.data < self.__cast(other) + def __le__(self, other): return self.data <= self.__cast(other) + def __eq__(self, other): return self.data == self.__cast(other) + def __gt__(self, other): return self.data > self.__cast(other) + def __ge__(self, other): return self.data >= self.__cast(other) + def __cast(self, other): + return other.data if isinstance(other, UserList) else other + def __contains__(self, item): return item in self.data + def __len__(self): return len(self.data) + def __getitem__(self, i): return self.data[i] + def __setitem__(self, i, item): self.data[i] = item + def __delitem__(self, i): del self.data[i] + def __add__(self, other): + if isinstance(other, UserList): + return self.__class__(self.data + other.data) + elif isinstance(other, type(self.data)): + return self.__class__(self.data + other) + return self.__class__(self.data + list(other)) + def __radd__(self, other): + if isinstance(other, UserList): + return self.__class__(other.data + self.data) + elif isinstance(other, type(self.data)): + return self.__class__(other + self.data) + return self.__class__(list(other) + self.data) + def __iadd__(self, other): + if isinstance(other, UserList): + self.data += other.data + elif isinstance(other, type(self.data)): + self.data += other + else: + self.data += list(other) + return self + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __imul__(self, n): + self.data *= n + return self + def append(self, item): self.data.append(item) + def insert(self, i, item): self.data.insert(i, item) + def pop(self, i=-1): return self.data.pop(i) + def remove(self, item): self.data.remove(item) + def clear(self): self.data.clear() + def copy(self): return self.__class__(self) + def count(self, item): return self.data.count(item) + def index(self, item, *args): return self.data.index(item, *args) + def reverse(self): self.data.reverse() + def sort(self, *args, **kwds): self.data.sort(*args, **kwds) + def extend(self, other): + if isinstance(other, UserList): + self.data.extend(other.data) + else: + self.data.extend(other) + + + +################################################################################ +### UserString +################################################################################ + +class UserString(Sequence): + def __init__(self, seq): + if isinstance(seq, str): + self.data = seq + elif isinstance(seq, UserString): + self.data = seq.data[:] + else: + self.data = str(seq) + def __str__(self): return str(self.data) + def __repr__(self): return repr(self.data) + def __int__(self): return int(self.data) + def __float__(self): return float(self.data) + def __complex__(self): return complex(self.data) + def __hash__(self): return hash(self.data) + def __getnewargs__(self): + return (self.data[:],) + + def __eq__(self, string): + if isinstance(string, UserString): + return self.data == string.data + return self.data == string + def __lt__(self, string): + if isinstance(string, UserString): + return self.data < string.data + return self.data < string + def __le__(self, string): + if isinstance(string, UserString): + return self.data <= string.data + return self.data <= string + def __gt__(self, string): + if isinstance(string, UserString): + return self.data > string.data + return self.data > string + def __ge__(self, string): + if isinstance(string, UserString): + return self.data >= string.data + return self.data >= string + + def __contains__(self, char): + if isinstance(char, UserString): + char = char.data + return char in self.data + + def __len__(self): return len(self.data) + def __getitem__(self, index): return self.__class__(self.data[index]) + def __add__(self, other): + if isinstance(other, UserString): + return self.__class__(self.data + other.data) + elif isinstance(other, str): + return self.__class__(self.data + other) + return self.__class__(self.data + str(other)) + def __radd__(self, other): + if isinstance(other, str): + return self.__class__(other + self.data) + return self.__class__(str(other) + self.data) + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __mod__(self, args): + return self.__class__(self.data % args) + def __rmod__(self, format): + return self.__class__(format % args) + + # the following methods are defined in alphabetical order: + def capitalize(self): return self.__class__(self.data.capitalize()) + def casefold(self): + return self.__class__(self.data.casefold()) + def center(self, width, *args): + return self.__class__(self.data.center(width, *args)) + def count(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.count(sub, start, end) + def encode(self, encoding=None, errors=None): # XXX improve this? + if encoding: + if errors: + return self.__class__(self.data.encode(encoding, errors)) + return self.__class__(self.data.encode(encoding)) + return self.__class__(self.data.encode()) + def endswith(self, suffix, start=0, end=_sys.maxsize): + return self.data.endswith(suffix, start, end) + def expandtabs(self, tabsize=8): + return self.__class__(self.data.expandtabs(tabsize)) + def find(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.find(sub, start, end) + def format(self, *args, **kwds): + return self.data.format(*args, **kwds) + def format_map(self, mapping): + return self.data.format_map(mapping) + def index(self, sub, start=0, end=_sys.maxsize): + return self.data.index(sub, start, end) + def isalpha(self): return self.data.isalpha() + def isalnum(self): return self.data.isalnum() + def isdecimal(self): return self.data.isdecimal() + def isdigit(self): return self.data.isdigit() + def isidentifier(self): return self.data.isidentifier() + def islower(self): return self.data.islower() + def isnumeric(self): return self.data.isnumeric() + def isprintable(self): return self.data.isprintable() + def isspace(self): return self.data.isspace() + def istitle(self): return self.data.istitle() + def isupper(self): return self.data.isupper() + def join(self, seq): return self.data.join(seq) + def ljust(self, width, *args): + return self.__class__(self.data.ljust(width, *args)) + def lower(self): return self.__class__(self.data.lower()) + def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) + maketrans = str.maketrans + def partition(self, sep): + return self.data.partition(sep) + def replace(self, old, new, maxsplit=-1): + if isinstance(old, UserString): + old = old.data + if isinstance(new, UserString): + new = new.data + return self.__class__(self.data.replace(old, new, maxsplit)) + def rfind(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.rfind(sub, start, end) + def rindex(self, sub, start=0, end=_sys.maxsize): + return self.data.rindex(sub, start, end) + def rjust(self, width, *args): + return self.__class__(self.data.rjust(width, *args)) + def rpartition(self, sep): + return self.data.rpartition(sep) + def rstrip(self, chars=None): + return self.__class__(self.data.rstrip(chars)) + def split(self, sep=None, maxsplit=-1): + return self.data.split(sep, maxsplit) + def rsplit(self, sep=None, maxsplit=-1): + return self.data.rsplit(sep, maxsplit) + def splitlines(self, keepends=False): return self.data.splitlines(keepends) + def startswith(self, prefix, start=0, end=_sys.maxsize): + return self.data.startswith(prefix, start, end) + def strip(self, chars=None): return self.__class__(self.data.strip(chars)) + def swapcase(self): return self.__class__(self.data.swapcase()) + def title(self): return self.__class__(self.data.title()) + def translate(self, *args): + return self.__class__(self.data.translate(*args)) + def upper(self): return self.__class__(self.data.upper()) + def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.5.2/collections/__main__.py b/typing_extensions/test_data/python-3.5.2/collections/__main__.py new file mode 100644 index 00000000..763e38e0 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.2/collections/__main__.py @@ -0,0 +1,38 @@ +################################################################################ +### Simple tests +################################################################################ + +# verify that instances can be pickled +from collections import namedtuple +from pickle import loads, dumps +Point = namedtuple('Point', 'x, y', True) +p = Point(x=10, y=20) +assert p == loads(dumps(p)) + +# test and demonstrate ability to override methods +class Point(namedtuple('Point', 'x y')): + __slots__ = () + @property + def hypot(self): + return (self.x ** 2 + self.y ** 2) ** 0.5 + def __str__(self): + return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) + +for p in Point(3, 4), Point(14, 5/7.): + print (p) + +class Point(namedtuple('Point', 'x y')): + 'Point class with optimized _make() and _replace() without error-checking' + __slots__ = () + _make = classmethod(tuple.__new__) + def _replace(self, _map=map, **kwds): + return self._make(_map(kwds.get, ('x', 'y'), self)) + +print(Point(11, 22)._replace(x=100)) + +Point3D = namedtuple('Point3D', Point._fields + ('z',)) +print(Point3D.__doc__) + +import doctest, collections +TestResults = namedtuple('TestResults', 'failed attempted') +print(TestResults(*doctest.testmod(collections))) diff --git a/typing_extensions/test_data/python-3.5.2/collections/abc.py b/typing_extensions/test_data/python-3.5.2/collections/abc.py new file mode 100644 index 00000000..891600d1 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.2/collections/abc.py @@ -0,0 +1,2 @@ +from _collections_abc import * +from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.5.2/typing.py b/typing_extensions/test_data/python-3.5.2/typing.py new file mode 100644 index 00000000..4cac66cd --- /dev/null +++ b/typing_extensions/test_data/python-3.5.2/typing.py @@ -0,0 +1,1843 @@ +import abc +from abc import abstractmethod, abstractproperty +import collections +import contextlib +import functools +import re as stdlib_re # Avoid confusion with the re we export. +import sys +import types +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'Any', + 'Callable', + 'Generic', + 'Optional', + 'Tuple', + 'Type', + 'TypeVar', + 'Union', + + # ABCs (from collections.abc). + 'AbstractSet', # collections.abc.Set. + 'Awaitable', + 'AsyncIterator', + 'AsyncIterable', + 'ByteString', + 'Container', + 'Hashable', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'Mapping', + 'MappingView', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Sequence', + 'Sized', + 'ValuesView', + + # Structural checks, a.k.a. protocols. + 'Reversible', + 'SupportsAbs', + 'SupportsFloat', + 'SupportsInt', + 'SupportsRound', + + # Concrete collection types. + 'Dict', + 'DefaultDict', + 'List', + 'Set', + 'NamedTuple', # Not really a type. + 'Generator', + + # One-off things. + 'AnyStr', + 'cast', + 'get_type_hints', + 'NewType', + 'no_type_check', + 'no_type_check_decorator', + 'overload', + 'Text', + 'TYPE_CHECKING', +] + +# The pseudo-submodules 're' and 'io' are part of the public +# namespace, but excluded from __all__ because they might stomp on +# legitimate imports of those modules. + + +def _qualname(x): + if sys.version_info[:2] >= (3, 3): + return x.__qualname__ + else: + # Fall back to just name. + return x.__name__ + + +class TypingMeta(type): + """Metaclass for every type defined below. + + This overrides __new__() to require an extra keyword parameter + '_root', which serves as a guard against naive subclassing of the + typing classes. Any legitimate class defined using a metaclass + derived from TypingMeta (including internal subclasses created by + e.g. Union[X, Y]) must pass _root=True. + + This also defines a dummy constructor (all the work is done in + __new__) and a nicer repr(). + """ + + _is_protocol = False + + def __new__(cls, name, bases, namespace, *, _root=False): + if not _root: + raise TypeError("Cannot subclass %s" % + (', '.join(map(_type_repr, bases)) or '()')) + return super().__new__(cls, name, bases, namespace) + + def __init__(self, *args, **kwds): + pass + + def _eval_type(self, globalns, localns): + """Override this in subclasses to interpret forward references. + + For example, Union['C'] is internally stored as + Union[_ForwardRef('C')], which should evaluate to _Union[C], + where C is an object found in globalns or localns (searching + localns first, of course). + """ + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + return '%s.%s' % (self.__module__, _qualname(self)) + + +class Final: + """Mix-in class to prevent instantiation.""" + + __slots__ = () + + def __new__(self, *args, **kwds): + raise TypeError("Cannot instantiate %r" % self.__class__) + + +class _ForwardRef(TypingMeta): + """Wrapper to hold a forward reference.""" + + def __new__(cls, arg): + if not isinstance(arg, str): + raise TypeError('ForwardRef must be a string -- got %r' % (arg,)) + try: + code = compile(arg, '', 'eval') + except SyntaxError: + raise SyntaxError('ForwardRef must be an expression -- got %r' % + (arg,)) + self = super().__new__(cls, arg, (), {}, _root=True) + self.__forward_arg__ = arg + self.__forward_code__ = code + self.__forward_evaluated__ = False + self.__forward_value__ = None + typing_globals = globals() + frame = sys._getframe(1) + while frame is not None and frame.f_globals is typing_globals: + frame = frame.f_back + assert frame is not None + self.__forward_frame__ = frame + return self + + def _eval_type(self, globalns, localns): + if not isinstance(localns, dict): + raise TypeError('ForwardRef localns must be a dict -- got %r' % + (localns,)) + if not isinstance(globalns, dict): + raise TypeError('ForwardRef globalns must be a dict -- got %r' % + (globalns,)) + if not self.__forward_evaluated__: + if globalns is None and localns is None: + globalns = localns = {} + elif globalns is None: + globalns = localns + elif localns is None: + localns = globalns + self.__forward_value__ = _type_check( + eval(self.__forward_code__, globalns, localns), + "Forward references must evaluate to types.") + self.__forward_evaluated__ = True + return self.__forward_value__ + + def __instancecheck__(self, obj): + raise TypeError("Forward references cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not self.__forward_evaluated__: + globalns = self.__forward_frame__.f_globals + localns = self.__forward_frame__.f_locals + try: + self._eval_type(globalns, localns) + except NameError: + return False # Too early. + return issubclass(cls, self.__forward_value__) + + def __repr__(self): + return '_ForwardRef(%r)' % (self.__forward_arg__,) + + +class _TypeAlias: + """Internal helper class for defining generic variants of concrete types. + + Note that this is not a type; let's call it a pseudo-type. It can + be used in instance and subclass checks, e.g. isinstance(m, Match) + or issubclass(type(m), Match). However, it cannot be itself the + target of an issubclass() call; e.g. issubclass(Match, C) (for + some arbitrary class C) raises TypeError rather than returning + False. + """ + + __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') + + def __new__(cls, *args, **kwds): + """Constructor. + + This only exists to give a better error message in case + someone tries to subclass a type alias (not a good idea). + """ + if (len(args) == 3 and + isinstance(args[0], str) and + isinstance(args[1], tuple)): + # Close enough. + raise TypeError("A type alias cannot be subclassed") + return object.__new__(cls) + + def __init__(self, name, type_var, impl_type, type_checker): + """Initializer. + + Args: + name: The name, e.g. 'Pattern'. + type_var: The type parameter, e.g. AnyStr, or the + specific type, e.g. str. + impl_type: The implementation type. + type_checker: Function that takes an impl_type instance. + and returns a value that should be a type_var instance. + """ + assert isinstance(name, str), repr(name) + assert isinstance(type_var, type), repr(type_var) + assert isinstance(impl_type, type), repr(impl_type) + assert not isinstance(impl_type, TypingMeta), repr(impl_type) + self.name = name + self.type_var = type_var + self.impl_type = impl_type + self.type_checker = type_checker + + def __repr__(self): + return "%s[%s]" % (self.name, _type_repr(self.type_var)) + + def __getitem__(self, parameter): + assert isinstance(parameter, type), repr(parameter) + if not isinstance(self.type_var, TypeVar): + raise TypeError("%s cannot be further parameterized." % self) + if self.type_var.__constraints__: + if not issubclass(parameter, Union[self.type_var.__constraints__]): + raise TypeError("%s is not a valid substitution for %s." % + (parameter, self.type_var)) + return self.__class__(self.name, parameter, + self.impl_type, self.type_checker) + + def __instancecheck__(self, obj): + raise TypeError("Type aliases cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if isinstance(cls, _TypeAlias): + # Covariance. For now, we compare by name. + return (cls.name == self.name and + issubclass(cls.type_var, self.type_var)) + else: + # Note that this is too lenient, because the + # implementation type doesn't carry information about + # whether it is about bytes or str (for example). + return issubclass(cls, self.impl_type) + + +def _get_type_vars(types, tvars): + for t in types: + if isinstance(t, TypingMeta): + t._get_type_vars(tvars) + + +def _type_vars(types): + tvars = [] + _get_type_vars(types, tvars) + return tuple(tvars) + + +def _eval_type(t, globalns, localns): + if isinstance(t, TypingMeta): + return t._eval_type(globalns, localns) + else: + return t + + +def _type_check(arg, msg): + """Check that the argument is a type, and return it. + + As a special case, accept None and return type(None) instead. + Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. + + The msg argument is a human-readable error message, e.g. + + "Union[arg, ...]: arg should be a type." + + We append the repr() of the actual value (truncated to 100 chars). + """ + if arg is None: + return type(None) + if isinstance(arg, str): + arg = _ForwardRef(arg) + if not isinstance(arg, (type, _TypeAlias)) and not callable(arg): + raise TypeError(msg + " Got %.100r." % (arg,)) + return arg + + +def _type_repr(obj): + """Return the repr() of an object, special-casing types. + + If obj is a type, we return a shorter version than the default + type.__repr__, based on the module and qualified name, which is + typically enough to uniquely identify a type. For everything + else, we fall back on repr(obj). + """ + if isinstance(obj, type) and not isinstance(obj, TypingMeta): + if obj.__module__ == 'builtins': + return _qualname(obj) + else: + return '%s.%s' % (obj.__module__, _qualname(obj)) + else: + return repr(obj) + + +class AnyMeta(TypingMeta): + """Metaclass for Any.""" + + def __new__(cls, name, bases, namespace, _root=False): + self = super().__new__(cls, name, bases, namespace, _root=_root) + return self + + def __instancecheck__(self, obj): + raise TypeError("Any cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not isinstance(cls, type): + return super().__subclasscheck__(cls) # To TypeError. + return True + + +class Any(Final, metaclass=AnyMeta, _root=True): + """Special type indicating an unconstrained type. + + - Any object is an instance of Any. + - Any class is a subclass of Any. + - As a special case, Any and object are subclasses of each other. + """ + + __slots__ = () + + +class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True): + """Type variable. + + Usage:: + + T = TypeVar('T') # Can be anything + A = TypeVar('A', str, bytes) # Must be str or bytes + + Type variables exist primarily for the benefit of static type + checkers. They serve as the parameters for generic types as well + as for generic function definitions. See class Generic for more + information on generic types. Generic functions work as follows: + + def repeat(x: T, n: int) -> Sequence[T]: + '''Return a list containing n references to x.''' + return [x]*n + + def longest(x: A, y: A) -> A: + '''Return the longest of two strings.''' + return x if len(x) >= len(y) else y + + The latter example's signature is essentially the overloading + of (str, str) -> str and (bytes, bytes) -> bytes. Also note + that if the arguments are instances of some subclass of str, + the return type is still plain str. + + At runtime, isinstance(x, T) will raise TypeError. However, + issubclass(C, T) is true for any class C, and issubclass(str, A) + and issubclass(bytes, A) are true, and issubclass(int, A) is + false. (TODO: Why is this needed? This may change. See #136.) + + Type variables may be marked covariant or contravariant by passing + covariant=True or contravariant=True. See PEP 484 for more + details. By default type variables are invariant. + + Type variables can be introspected. e.g.: + + T.__name__ == 'T' + T.__constraints__ == () + T.__covariant__ == False + T.__contravariant__ = False + A.__constraints__ == (str, bytes) + """ + + def __new__(cls, name, *constraints, bound=None, + covariant=False, contravariant=False): + self = super().__new__(cls, name, (Final,), {}, _root=True) + if covariant and contravariant: + raise ValueError("Bivariant type variables are not supported.") + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if constraints and bound is not None: + raise TypeError("Constraints cannot be combined with bound=...") + if constraints and len(constraints) == 1: + raise TypeError("A single constraint is not allowed") + msg = "TypeVar(name, constraint, ...): constraints must be types." + self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) + if bound: + self.__bound__ = _type_check(bound, "Bound must be a type.") + else: + self.__bound__ = None + return self + + def _get_type_vars(self, tvars): + if self not in tvars: + tvars.append(self) + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __instancecheck__(self, instance): + raise TypeError("Type variables cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + # TODO: Make this raise TypeError too? + if cls is self: + return True + if cls is Any: + return True + if self.__bound__ is not None: + return issubclass(cls, self.__bound__) + if self.__constraints__: + return any(issubclass(cls, c) for c in self.__constraints__) + return True + + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = TypeVar('T') # Any type. +KT = TypeVar('KT') # Key type. +VT = TypeVar('VT') # Value type. +T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. +V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. +VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. +T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +# A useful type variable with constraints. This represents string types. +# (This one *is* for export!) +AnyStr = TypeVar('AnyStr', bytes, str) + + +class UnionMeta(TypingMeta): + """Metaclass for Union.""" + + def __new__(cls, name, bases, namespace, parameters=None, _root=False): + if parameters is None: + return super().__new__(cls, name, bases, namespace, _root=_root) + if not isinstance(parameters, tuple): + raise TypeError("Expected parameters=") + # Flatten out Union[Union[...], ...] and type-check non-Union args. + params = [] + msg = "Union[arg, ...]: each arg must be a type." + for p in parameters: + if isinstance(p, UnionMeta): + params.extend(p.__union_params__) + else: + params.append(_type_check(p, msg)) + # Weed out strict duplicates, preserving the first of each occurrence. + all_params = set(params) + if len(all_params) < len(params): + new_params = [] + for t in params: + if t in all_params: + new_params.append(t) + all_params.remove(t) + params = new_params + assert not all_params, all_params + # Weed out subclasses. + # E.g. Union[int, Employee, Manager] == Union[int, Employee]. + # If Any or object is present it will be the sole survivor. + # If both Any and object are present, Any wins. + # Never discard type variables, except against Any. + # (In particular, Union[str, AnyStr] != AnyStr.) + all_params = set(params) + for t1 in params: + if t1 is Any: + return Any + if isinstance(t1, TypeVar): + continue + if isinstance(t1, _TypeAlias): + # _TypeAlias is not a real class. + continue + if not isinstance(t1, type): + assert callable(t1) # A callable might sneak through. + continue + if any(isinstance(t2, type) and issubclass(t1, t2) + for t2 in all_params - {t1} if not isinstance(t2, TypeVar)): + all_params.remove(t1) + # It's not a union if there's only one type left. + if len(all_params) == 1: + return all_params.pop() + # Create a new class with these params. + self = super().__new__(cls, name, bases, {}, _root=True) + self.__union_params__ = tuple(t for t in params if t in all_params) + self.__union_set_params__ = frozenset(self.__union_params__) + return self + + def _eval_type(self, globalns, localns): + p = tuple(_eval_type(t, globalns, localns) + for t in self.__union_params__) + if p == self.__union_params__: + return self + else: + return self.__class__(self.__name__, self.__bases__, {}, + p, _root=True) + + def _get_type_vars(self, tvars): + if self.__union_params__: + _get_type_vars(self.__union_params__, tvars) + + def __repr__(self): + r = super().__repr__() + if self.__union_params__: + r += '[%s]' % (', '.join(_type_repr(t) + for t in self.__union_params__)) + return r + + def __getitem__(self, parameters): + if self.__union_params__ is not None: + raise TypeError( + "Cannot subscript an existing Union. Use Union[u, t] instead.") + if parameters == (): + raise TypeError("Cannot take a Union of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + return self.__class__(self.__name__, self.__bases__, + dict(self.__dict__), parameters, _root=True) + + def __eq__(self, other): + if not isinstance(other, UnionMeta): + return NotImplemented + return self.__union_set_params__ == other.__union_set_params__ + + def __hash__(self): + return hash(self.__union_set_params__) + + def __instancecheck__(self, obj): + raise TypeError("Unions cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if self.__union_params__ is None: + return isinstance(cls, UnionMeta) + elif isinstance(cls, UnionMeta): + if cls.__union_params__ is None: + return False + return all(issubclass(c, self) for c in (cls.__union_params__)) + elif isinstance(cls, TypeVar): + if cls in self.__union_params__: + return True + if cls.__constraints__: + return issubclass(Union[cls.__constraints__], self) + return False + else: + return any(issubclass(cls, t) for t in self.__union_params__) + + +class Union(Final, metaclass=UnionMeta, _root=True): + """Union type; Union[X, Y] means either X or Y. + + To define a union, use e.g. Union[int, str]. Details: + + - The arguments must be types and there must be at least one. + + - None as an argument is a special case and is replaced by + type(None). + + - Unions of unions are flattened, e.g.:: + + Union[Union[int, str], float] == Union[int, str, float] + + - Unions of a single argument vanish, e.g.:: + + Union[int] == int # The constructor actually returns int + + - Redundant arguments are skipped, e.g.:: + + Union[int, str, int] == Union[int, str] + + - When comparing unions, the argument order is ignored, e.g.:: + + Union[int, str] == Union[str, int] + + - When two arguments have a subclass relationship, the least + derived argument is kept, e.g.:: + + class Employee: pass + class Manager(Employee): pass + Union[int, Employee, Manager] == Union[int, Employee] + Union[Manager, int, Employee] == Union[int, Employee] + Union[Employee, Manager] == Employee + + - Corollary: if Any is present it is the sole survivor, e.g.:: + + Union[int, Any] == Any + + - Similar for object:: + + Union[int, object] == object + + - To cut a tie: Union[object, Any] == Union[Any, object] == Any. + + - You cannot subclass or instantiate a union. + + - You cannot write Union[X][Y] (what would it mean?). + + - You can use Optional[X] as a shorthand for Union[X, None]. + """ + + # Unsubscripted Union type has params set to None. + __union_params__ = None + __union_set_params__ = None + + +class OptionalMeta(TypingMeta): + """Metaclass for Optional.""" + + def __new__(cls, name, bases, namespace, _root=False): + return super().__new__(cls, name, bases, namespace, _root=_root) + + def __getitem__(self, arg): + arg = _type_check(arg, "Optional[t] requires a single type.") + return Union[arg, type(None)] + + +class Optional(Final, metaclass=OptionalMeta, _root=True): + """Optional type. + + Optional[X] is equivalent to Union[X, type(None)]. + """ + + __slots__ = () + + +class TupleMeta(TypingMeta): + """Metaclass for Tuple.""" + + def __new__(cls, name, bases, namespace, parameters=None, + use_ellipsis=False, _root=False): + self = super().__new__(cls, name, bases, namespace, _root=_root) + self.__tuple_params__ = parameters + self.__tuple_use_ellipsis__ = use_ellipsis + return self + + def _get_type_vars(self, tvars): + if self.__tuple_params__: + _get_type_vars(self.__tuple_params__, tvars) + + def _eval_type(self, globalns, localns): + tp = self.__tuple_params__ + if tp is None: + return self + p = tuple(_eval_type(t, globalns, localns) for t in tp) + if p == self.__tuple_params__: + return self + else: + return self.__class__(self.__name__, self.__bases__, {}, + p, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__tuple_params__ is not None: + params = [_type_repr(p) for p in self.__tuple_params__] + if self.__tuple_use_ellipsis__: + params.append('...') + if not params: + params.append('()') + r += '[%s]' % ( + ', '.join(params)) + return r + + def __getitem__(self, parameters): + if self.__tuple_params__ is not None: + raise TypeError("Cannot re-parameterize %r" % (self,)) + if not isinstance(parameters, tuple): + parameters = (parameters,) + if len(parameters) == 2 and parameters[1] == Ellipsis: + parameters = parameters[:1] + use_ellipsis = True + msg = "Tuple[t, ...]: t must be a type." + else: + use_ellipsis = False + msg = "Tuple[t0, t1, ...]: each t must be a type." + parameters = tuple(_type_check(p, msg) for p in parameters) + return self.__class__(self.__name__, self.__bases__, + dict(self.__dict__), parameters, + use_ellipsis=use_ellipsis, _root=True) + + def __eq__(self, other): + if not isinstance(other, TupleMeta): + return NotImplemented + return (self.__tuple_params__ == other.__tuple_params__ and + self.__tuple_use_ellipsis__ == other.__tuple_use_ellipsis__) + + def __hash__(self): + return hash(self.__tuple_params__) + + def __instancecheck__(self, obj): + raise TypeError("Tuples cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if not isinstance(cls, type): + return super().__subclasscheck__(cls) # To TypeError. + if issubclass(cls, tuple): + return True # Special case. + if not isinstance(cls, TupleMeta): + return super().__subclasscheck__(cls) # False. + if self.__tuple_params__ is None: + return True + if cls.__tuple_params__ is None: + return False # ??? + if cls.__tuple_use_ellipsis__ != self.__tuple_use_ellipsis__: + return False + # Covariance. + return (len(self.__tuple_params__) == len(cls.__tuple_params__) and + all(issubclass(x, p) + for x, p in zip(cls.__tuple_params__, + self.__tuple_params__))) + + +class Tuple(Final, metaclass=TupleMeta, _root=True): + """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. + + Example: Tuple[T1, T2] is a tuple of two elements corresponding + to type variables T1 and T2. Tuple[int, float, str] is a tuple + of an int, a float and a string. + + To specify a variable-length tuple of homogeneous type, use Sequence[T]. + """ + + __slots__ = () + + +class CallableMeta(TypingMeta): + """Metaclass for Callable.""" + + def __new__(cls, name, bases, namespace, _root=False, + args=None, result=None): + if args is None and result is None: + pass # Must be 'class Callable'. + else: + if args is not Ellipsis: + if not isinstance(args, list): + raise TypeError("Callable[args, result]: " + "args must be a list." + " Got %.100r." % (args,)) + msg = "Callable[[arg, ...], result]: each arg must be a type." + args = tuple(_type_check(arg, msg) for arg in args) + msg = "Callable[args, result]: result must be a type." + result = _type_check(result, msg) + self = super().__new__(cls, name, bases, namespace, _root=_root) + self.__args__ = args + self.__result__ = result + return self + + def _get_type_vars(self, tvars): + if self.__args__: + _get_type_vars(self.__args__, tvars) + + def _eval_type(self, globalns, localns): + if self.__args__ is None and self.__result__ is None: + return self + if self.__args__ is Ellipsis: + args = self.__args__ + else: + args = [_eval_type(t, globalns, localns) for t in self.__args__] + result = _eval_type(self.__result__, globalns, localns) + if args == self.__args__ and result == self.__result__: + return self + else: + return self.__class__(self.__name__, self.__bases__, {}, + args=args, result=result, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__args__ is not None or self.__result__ is not None: + if self.__args__ is Ellipsis: + args_r = '...' + else: + args_r = '[%s]' % ', '.join(_type_repr(t) + for t in self.__args__) + r += '[%s, %s]' % (args_r, _type_repr(self.__result__)) + return r + + def __getitem__(self, parameters): + if self.__args__ is not None or self.__result__ is not None: + raise TypeError("This Callable type is already parameterized.") + if not isinstance(parameters, tuple) or len(parameters) != 2: + raise TypeError( + "Callable must be used as Callable[[arg, ...], result].") + args, result = parameters + return self.__class__(self.__name__, self.__bases__, + dict(self.__dict__), _root=True, + args=args, result=result) + + def __eq__(self, other): + if not isinstance(other, CallableMeta): + return NotImplemented + return (self.__args__ == other.__args__ and + self.__result__ == other.__result__) + + def __hash__(self): + return hash(self.__args__) ^ hash(self.__result__) + + def __instancecheck__(self, obj): + # For unparametrized Callable we allow this, because + # typing.Callable should be equivalent to + # collections.abc.Callable. + if self.__args__ is None and self.__result__ is None: + return isinstance(obj, collections_abc.Callable) + else: + raise TypeError("Callable[] cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if not isinstance(cls, CallableMeta): + return super().__subclasscheck__(cls) + if self.__args__ is None and self.__result__ is None: + return True + # We're not doing covariance or contravariance -- this is *invariance*. + return self == cls + + +class Callable(Final, metaclass=CallableMeta, _root=True): + """Callable type; Callable[[int], str] is a function of (int) -> str. + + The subscription syntax must always be used with exactly two + values: the argument list and the return type. The argument list + must be a list of types; the return type must be a single type. + + There is no syntax to indicate optional or keyword arguments, + such function types are rarely used as callback types. + """ + + __slots__ = () + + +def _gorg(a): + """Return the farthest origin of a generic class.""" + assert isinstance(a, GenericMeta) + while a.__origin__ is not None: + a = a.__origin__ + return a + + +def _geqv(a, b): + """Return whether two generic classes are equivalent. + + The intention is to consider generic class X and any of its + parameterized forms (X[T], X[int], etc.) as equivalent. + + However, X is not equivalent to a subclass of X. + + The relation is reflexive, symmetric and transitive. + """ + assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) + # Reduce each to its origin. + return _gorg(a) is _gorg(b) + + +def _next_in_mro(cls): + """Helper for Generic.__new__. + + Returns the class after the last occurrence of Generic or + Generic[...] in cls.__mro__. + """ + next_in_mro = object + # Look for the last occurrence of Generic or Generic[...]. + for i, c in enumerate(cls.__mro__[:-1]): + if isinstance(c, GenericMeta) and _gorg(c) is Generic: + next_in_mro = cls.__mro__[i+1] + return next_in_mro + + +class GenericMeta(TypingMeta, abc.ABCMeta): + """Metaclass for generic types.""" + + def __new__(cls, name, bases, namespace, + tvars=None, args=None, origin=None, extra=None): + self = super().__new__(cls, name, bases, namespace, _root=True) + + if tvars is not None: + # Called from __getitem__() below. + assert origin is not None + assert all(isinstance(t, TypeVar) for t in tvars), tvars + else: + # Called from class statement. + assert tvars is None, tvars + assert args is None, args + assert origin is None, origin + + # Get the full set of tvars from the bases. + tvars = _type_vars(bases) + # Look for Generic[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...]. + gvars = None + for base in bases: + if base is Generic: + raise TypeError("Cannot inherit from plain Generic") + if (isinstance(base, GenericMeta) and + base.__origin__ is Generic): + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + raise TypeError( + "Some type variables (%s) " + "are not listed in Generic[%s]" % + (", ".join(str(t) for t in tvars if t not in gvarset), + ", ".join(str(g) for g in gvars))) + tvars = gvars + + self.__parameters__ = tvars + self.__args__ = args + self.__origin__ = origin + self.__extra__ = extra + # Speed hack (https://github.com/python/typing/issues/196). + self.__next_in_mro__ = _next_in_mro(self) + return self + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def __repr__(self): + if self.__origin__ is not None: + r = repr(self.__origin__) + else: + r = super().__repr__() + if self.__args__: + r += '[%s]' % ( + ', '.join(_type_repr(p) for p in self.__args__)) + if self.__parameters__: + r += '<%s>' % ( + ', '.join(_type_repr(p) for p in self.__parameters__)) + return r + + def __eq__(self, other): + if not isinstance(other, GenericMeta): + return NotImplemented + if self.__origin__ is not None: + return (self.__origin__ is other.__origin__ and + self.__args__ == other.__args__ and + self.__parameters__ == other.__parameters__) + else: + return self is other + + def __hash__(self): + return hash((self.__name__, self.__parameters__)) + + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + if not params: + raise TypeError( + "Parameter list to %s[...] cannot be empty" % _qualname(self)) + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + if self is Generic: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, TypeVar) for p in params): + raise TypeError( + "Parameters to Generic[...] must all be type variables") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Generic[...] must all be unique") + tvars = params + args = None + elif self is _Protocol: + # _Protocol is internal, don't check anything. + tvars = params + args = None + elif self.__origin__ in (Generic, _Protocol): + # Can't subscript Generic[...] or _Protocol[...]. + raise TypeError("Cannot subscript already-subscripted %s" % + repr(self)) + else: + # Subscripting a regular Generic subclass. + if not self.__parameters__: + raise TypeError("%s is not a generic class" % repr(self)) + alen = len(params) + elen = len(self.__parameters__) + if alen != elen: + raise TypeError( + "Too %s parameters for %s; actual %s, expected %s" % + ("many" if alen > elen else "few", repr(self), alen, elen)) + tvars = _type_vars(params) + args = params + return self.__class__(self.__name__, + (self,) + self.__bases__, + dict(self.__dict__), + tvars=tvars, + args=args, + origin=self, + extra=self.__extra__) + + def __instancecheck__(self, instance): + # Since we extend ABC.__subclasscheck__ and + # ABC.__instancecheck__ inlines the cache checking done by the + # latter, we must extend __instancecheck__ too. For simplicity + # we just skip the cache check -- instance checks for generic + # classes are supposed to be rare anyways. + return self.__subclasscheck__(instance.__class__) + + def __subclasscheck__(self, cls): + if cls is Any: + return True + if isinstance(cls, GenericMeta): + # For a class C(Generic[T]) where T is co-variant, + # C[X] is a subclass of C[Y] iff X is a subclass of Y. + origin = self.__origin__ + if origin is not None and origin is cls.__origin__: + assert len(self.__args__) == len(origin.__parameters__) + assert len(cls.__args__) == len(origin.__parameters__) + for p_self, p_cls, p_origin in zip(self.__args__, + cls.__args__, + origin.__parameters__): + if isinstance(p_origin, TypeVar): + if p_origin.__covariant__: + # Covariant -- p_cls must be a subclass of p_self. + if not issubclass(p_cls, p_self): + break + elif p_origin.__contravariant__: + # Contravariant. I think it's the opposite. :-) + if not issubclass(p_self, p_cls): + break + else: + # Invariant -- p_cls and p_self must equal. + if p_self != p_cls: + break + else: + # If the origin's parameter is not a typevar, + # insist on invariance. + if p_self != p_cls: + break + else: + return True + # If we break out of the loop, the superclass gets a chance. + if super().__subclasscheck__(cls): + return True + if self.__extra__ is None or isinstance(cls, GenericMeta): + return False + return issubclass(cls, self.__extra__) + + +# Prevent checks for Generic to crash when defining Generic. +Generic = None + + +class Generic(metaclass=GenericMeta): + """Abstract base class for generic types. + + A generic type is typically declared by inheriting from an + instantiation of this class with one or more type variables. + For example, a generic mapping type might be defined as:: + + class Mapping(Generic[KT, VT]): + def __getitem__(self, key: KT) -> VT: + ... + # Etc. + + This class can then be used as follows:: + + def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: + try: + return mapping[key] + except KeyError: + return default + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls.__origin__ is None: + return cls.__next_in_mro__.__new__(cls) + else: + origin = _gorg(cls) + obj = cls.__next_in_mro__.__new__(origin) + obj.__init__(*args, **kwds) + return obj + + +def cast(typ, val): + """Cast a value to a type. + + This returns the value unchanged. To the type checker this + signals that the return value has the designated type, but at + runtime we intentionally don't check anything (we want this + to be as fast as possible). + """ + return val + + +def _get_defaults(func): + """Internal helper to extract the default arguments, by name.""" + code = func.__code__ + pos_count = code.co_argcount + arg_names = code.co_varnames + arg_names = arg_names[:pos_count] + defaults = func.__defaults__ or () + kwdefaults = func.__kwdefaults__ + res = dict(kwdefaults) if kwdefaults else {} + pos_offset = pos_count - len(defaults) + for name, value in zip(arg_names[pos_offset:], defaults): + assert name not in res + res[name] = value + return res + + +def get_type_hints(obj, globalns=None, localns=None): + """Return type hints for a function or method object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, and if necessary + adds Optional[t] if a default value equal to None is set. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj, and these are also used as the locals. If the + object does not appear to have globals, an exception is raised. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + if getattr(obj, '__no_type_check__', None): + return {} + if globalns is None: + globalns = getattr(obj, '__globals__', {}) + if localns is None: + localns = globalns + elif localns is None: + localns = globalns + defaults = _get_defaults(obj) + hints = dict(obj.__annotations__) + for name, value in hints.items(): + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, globalns, localns) + if name in defaults and defaults[name] is None: + value = Optional[value] + hints[name] = value + return hints + + +def no_type_check(arg): + """Decorator to indicate that annotations are not type hints. + + The argument must be a class or function; if it is a class, it + applies recursively to all methods defined in that class (but not + to methods defined in its superclasses or subclasses). + + This mutates the function(s) in place. + """ + if isinstance(arg, type): + for obj in arg.__dict__.values(): + if isinstance(obj, types.FunctionType): + obj.__no_type_check__ = True + else: + arg.__no_type_check__ = True + return arg + + +def no_type_check_decorator(decorator): + """Decorator to give another decorator the @no_type_check effect. + + This wraps the decorator with something that wraps the decorated + function in @no_type_check. + """ + + @functools.wraps(decorator) + def wrapped_decorator(*args, **kwds): + func = decorator(*args, **kwds) + func = no_type_check(func) + return func + + return wrapped_decorator + + +def _overload_dummy(*args, **kwds): + """Helper for @overload to raise when called.""" + raise NotImplementedError( + "You should not call an overloaded function. " + "A series of @overload-decorated functions " + "outside a stub module should always be followed " + "by an implementation that is not @overload-ed.") + + +def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + """ + return _overload_dummy + + +class _ProtocolMeta(GenericMeta): + """Internal metaclass for _Protocol. + + This exists so _Protocol classes can be generic without deriving + from Generic. + """ + + def __instancecheck__(self, obj): + raise TypeError("Protocols cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not self._is_protocol: + # No structural checks since this isn't a protocol. + return NotImplemented + + if self is _Protocol: + # Every class is a subclass of the empty protocol. + return True + + # Find all attributes defined in the protocol. + attrs = self._get_protocol_attrs() + + for attr in attrs: + if not any(attr in d.__dict__ for d in cls.__mro__): + return False + return True + + def _get_protocol_attrs(self): + # Get all Protocol base classes. + protocol_bases = [] + for c in self.__mro__: + if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': + protocol_bases.append(c) + + # Get attributes included in protocol. + attrs = set() + for base in protocol_bases: + for attr in base.__dict__.keys(): + # Include attributes not defined in any non-protocol bases. + for c in self.__mro__: + if (c is not base and attr in c.__dict__ and + not getattr(c, '_is_protocol', False)): + break + else: + if (not attr.startswith('_abc_') and + attr != '__abstractmethods__' and + attr != '_is_protocol' and + attr != '__dict__' and + attr != '__args__' and + attr != '__slots__' and + attr != '_get_protocol_attrs' and + attr != '__next_in_mro__' and + attr != '__parameters__' and + attr != '__origin__' and + attr != '__extra__' and + attr != '__module__'): + attrs.add(attr) + + return attrs + + +class _Protocol(metaclass=_ProtocolMeta): + """Internal base class for protocol classes. + + This implements a simple-minded structural isinstance check + (similar but more general than the one-offs in collections.abc + such as Hashable). + """ + + __slots__ = () + + _is_protocol = True + + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + +Hashable = collections_abc.Hashable # Not generic. + + +if hasattr(collections_abc, 'Awaitable'): + class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): + __slots__ = () +else: + Awaitable = None + + +if hasattr(collections_abc, 'AsyncIterable'): + + class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): + __slots__ = () + + class AsyncIterator(AsyncIterable[T_co], + extra=collections_abc.AsyncIterator): + __slots__ = () + +else: + AsyncIterable = None + AsyncIterator = None + + +class Iterable(Generic[T_co], extra=collections_abc.Iterable): + __slots__ = () + + +class Iterator(Iterable[T_co], extra=collections_abc.Iterator): + __slots__ = () + + +class SupportsInt(_Protocol): + __slots__ = () + + @abstractmethod + def __int__(self) -> int: + pass + + +class SupportsFloat(_Protocol): + __slots__ = () + + @abstractmethod + def __float__(self) -> float: + pass + + +class SupportsComplex(_Protocol): + __slots__ = () + + @abstractmethod + def __complex__(self) -> complex: + pass + + +class SupportsBytes(_Protocol): + __slots__ = () + + @abstractmethod + def __bytes__(self) -> bytes: + pass + + +class SupportsAbs(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __abs__(self) -> T_co: + pass + + +class SupportsRound(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +if hasattr(collections_abc, 'Reversible'): + class Reversible(Iterable[T_co], extra=collections_abc.Reversible): + __slots__ = () +else: + class Reversible(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __reversed__(self) -> 'Iterator[T_co]': + pass + + +Sized = collections_abc.Sized # Not generic. + + +class Container(Generic[T_co], extra=collections_abc.Container): + __slots__ = () + + +# Callable was defined earlier. + + +class AbstractSet(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Set): + pass + + +class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): + pass + + +# NOTE: Only the value type is covariant. +class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], + extra=collections_abc.Mapping): + pass + + +class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): + pass + +if hasattr(collections_abc, 'Reversible'): + class Sequence(Sized, Reversible[T_co], Container[T_co], + extra=collections_abc.Sequence): + pass +else: + class Sequence(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Sequence): + pass + + +class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): + pass + + +class ByteString(Sequence[int], extra=collections_abc.ByteString): + pass + + +ByteString.register(type(memoryview(b''))) + + +class List(list, MutableSequence[T], extra=list): + + def __new__(cls, *args, **kwds): + if _geqv(cls, List): + raise TypeError("Type List cannot be instantiated; " + "use list() instead") + return list.__new__(cls, *args, **kwds) + + +class Set(set, MutableSet[T], extra=set): + + def __new__(cls, *args, **kwds): + if _geqv(cls, Set): + raise TypeError("Type Set cannot be instantiated; " + "use set() instead") + return set.__new__(cls, *args, **kwds) + + +class _FrozenSetMeta(GenericMeta): + """This metaclass ensures set is not a subclass of FrozenSet. + + Without this metaclass, set would be considered a subclass of + FrozenSet, because FrozenSet.__extra__ is collections.abc.Set, and + set is a subclass of that. + """ + + def __subclasscheck__(self, cls): + if issubclass(cls, Set): + return False + return super().__subclasscheck__(cls) + + +class FrozenSet(frozenset, AbstractSet[T_co], metaclass=_FrozenSetMeta, + extra=frozenset): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, FrozenSet): + raise TypeError("Type FrozenSet cannot be instantiated; " + "use frozenset() instead") + return frozenset.__new__(cls, *args, **kwds) + + +class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): + pass + + +class KeysView(MappingView[KT], AbstractSet[KT], + extra=collections_abc.KeysView): + pass + + +class ItemsView(MappingView[Tuple[KT, VT_co]], + AbstractSet[Tuple[KT, VT_co]], + Generic[KT, VT_co], + extra=collections_abc.ItemsView): + pass + + +class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): + pass + + +if hasattr(contextlib, 'AbstractContextManager'): + class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): + __slots__ = () + __all__.append('ContextManager') + + +class Dict(dict, MutableMapping[KT, VT], extra=dict): + + def __new__(cls, *args, **kwds): + if _geqv(cls, Dict): + raise TypeError("Type Dict cannot be instantiated; " + "use dict() instead") + return dict.__new__(cls, *args, **kwds) + +class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], + extra=collections.defaultdict): + + def __new__(cls, *args, **kwds): + if _geqv(cls, DefaultDict): + raise TypeError("Type DefaultDict cannot be instantiated; " + "use collections.defaultdict() instead") + return collections.defaultdict.__new__(cls, *args, **kwds) + +# Determine what base class to use for Generator. +if hasattr(collections_abc, 'Generator'): + # Sufficiently recent versions of 3.5 have a Generator ABC. + _G_base = collections_abc.Generator +else: + # Fall back on the exact type. + _G_base = types.GeneratorType + + +class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], + extra=_G_base): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Generator): + raise TypeError("Type Generator cannot be instantiated; " + "create a subclass instead") + return super().__new__(cls, *args, **kwds) + + +# Internal type variable used for Type[]. +CT = TypeVar('CT', covariant=True, bound=type) + + +# This is not a real generic class. Don't use outside annotations. +class Type(type, Generic[CT], extra=type): + """A special construct usable to annotate class objects. + + For example, suppose we have the following classes:: + + class User: ... # Abstract base for User classes + class BasicUser(User): ... + class ProUser(User): ... + class TeamUser(User): ... + + And a function that takes a class argument that's a subclass of + User and returns an instance of the corresponding class:: + + U = TypeVar('U', bound=User) + def new_user(user_class: Type[U]) -> U: + user = user_class() + # (Here we could write the user object to a database) + return user + + joe = new_user(BasicUser) + + At this point the type checker knows that joe has type BasicUser. + """ + + +def NamedTuple(typename, fields): + """Typed version of namedtuple. + + Usage:: + + Employee = typing.NamedTuple('Employee', [('name', str), 'id', int)]) + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has one extra attribute: _field_types, + giving a dict mapping field names to types. (The field names + are in the _fields attribute, which is part of the namedtuple + API.) + """ + fields = [(n, t) for n, t in fields] + cls = collections.namedtuple(typename, [n for n, t in fields]) + cls._field_types = dict(fields) + # Set the module to the caller's module (otherwise it'd be 'typing'). + try: + cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + return cls + + +def NewType(name, tp): + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy function that simply returns its argument. Usage:: + + UserId = NewType('UserId', int) + + def name_by_id(user_id: UserId) -> str: + ... + + UserId('user') # Fails type check + + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + + num = UserId(5) + 1 # type: int + """ + + def new_type(x): + return x + + new_type.__name__ = name + new_type.__supertype__ = tp + return new_type + + +# Python-version-specific alias (Python 2: unicode; Python 3: str) +Text = str + + +# Constant that's True when type checking, but False here. +TYPE_CHECKING = False + + +class IO(Generic[AnyStr]): + """Generic base class for TextIO and BinaryIO. + + This is an abstract, generic version of the return of open(). + + NOTE: This does not distinguish between the different possible + classes (text vs. binary, read vs. write vs. read/write, + append-only, unbuffered). The TextIO and BinaryIO subclasses + below capture the distinctions between text vs. binary, which is + pervasive in the interface; however we currently do not offer a + way to track the other distinctions in the type system. + """ + + __slots__ = () + + @abstractproperty + def mode(self) -> str: + pass + + @abstractproperty + def name(self) -> str: + pass + + @abstractmethod + def close(self) -> None: + pass + + @abstractmethod + def closed(self) -> bool: + pass + + @abstractmethod + def fileno(self) -> int: + pass + + @abstractmethod + def flush(self) -> None: + pass + + @abstractmethod + def isatty(self) -> bool: + pass + + @abstractmethod + def read(self, n: int = -1) -> AnyStr: + pass + + @abstractmethod + def readable(self) -> bool: + pass + + @abstractmethod + def readline(self, limit: int = -1) -> AnyStr: + pass + + @abstractmethod + def readlines(self, hint: int = -1) -> List[AnyStr]: + pass + + @abstractmethod + def seek(self, offset: int, whence: int = 0) -> int: + pass + + @abstractmethod + def seekable(self) -> bool: + pass + + @abstractmethod + def tell(self) -> int: + pass + + @abstractmethod + def truncate(self, size: int = None) -> int: + pass + + @abstractmethod + def writable(self) -> bool: + pass + + @abstractmethod + def write(self, s: AnyStr) -> int: + pass + + @abstractmethod + def writelines(self, lines: List[AnyStr]) -> None: + pass + + @abstractmethod + def __enter__(self) -> 'IO[AnyStr]': + pass + + @abstractmethod + def __exit__(self, type, value, traceback) -> None: + pass + + +class BinaryIO(IO[bytes]): + """Typed version of the return of open() in binary mode.""" + + __slots__ = () + + @abstractmethod + def write(self, s: Union[bytes, bytearray]) -> int: + pass + + @abstractmethod + def __enter__(self) -> 'BinaryIO': + pass + + +class TextIO(IO[str]): + """Typed version of the return of open() in text mode.""" + + __slots__ = () + + @abstractproperty + def buffer(self) -> BinaryIO: + pass + + @abstractproperty + def encoding(self) -> str: + pass + + @abstractproperty + def errors(self) -> str: + pass + + @abstractproperty + def line_buffering(self) -> bool: + pass + + @abstractproperty + def newlines(self) -> Any: + pass + + @abstractmethod + def __enter__(self) -> 'TextIO': + pass + + +class io: + """Wrapper namespace for IO generic classes.""" + + __all__ = ['IO', 'TextIO', 'BinaryIO'] + IO = IO + TextIO = TextIO + BinaryIO = BinaryIO + +io.__name__ = __name__ + '.io' +sys.modules[io.__name__] = io + + +Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), + lambda p: p.pattern) +Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), + lambda m: m.re.pattern) + + +class re: + """Wrapper namespace for re type aliases.""" + + __all__ = ['Pattern', 'Match'] + Pattern = Pattern + Match = Match + +re.__name__ = __name__ + '.re' +sys.modules[re.__name__] = re diff --git a/typing_extensions/test_data/python-3.5.3/_collections_abc.py b/typing_extensions/test_data/python-3.5.3/_collections_abc.py new file mode 100644 index 00000000..8bebd69b --- /dev/null +++ b/typing_extensions/test_data/python-3.5.3/_collections_abc.py @@ -0,0 +1,941 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. + +Unit tests are in test_collections. +""" + +from abc import ABCMeta, abstractmethod +import sys + +__all__ = ["Awaitable", "Coroutine", "AsyncIterable", "AsyncIterator", + "Hashable", "Iterable", "Iterator", "Generator", + "Sized", "Container", "Callable", + "Set", "MutableSet", + "Mapping", "MutableMapping", + "MappingView", "KeysView", "ItemsView", "ValuesView", + "Sequence", "MutableSequence", + "ByteString", + ] + +# This module has been renamed from collections.abc to _collections_abc to +# speed up interpreter startup. Some of the types such as MutableMapping are +# required early but collections module imports a lot of other modules. +# See issue #19218 +__name__ = "collections.abc" + +# Private list of types that we want to register with the various ABCs +# so that they will pass tests like: +# it = iter(somebytearray) +# assert isinstance(it, Iterable) +# Note: in other implementations, these types might not be distinct +# and they may have their own implementation specific types that +# are not included on this list. +bytes_iterator = type(iter(b'')) +bytearray_iterator = type(iter(bytearray())) +#callable_iterator = ??? +dict_keyiterator = type(iter({}.keys())) +dict_valueiterator = type(iter({}.values())) +dict_itemiterator = type(iter({}.items())) +list_iterator = type(iter([])) +list_reverseiterator = type(iter(reversed([]))) +range_iterator = type(iter(range(0))) +longrange_iterator = type(iter(range(1 << 1000))) +set_iterator = type(iter(set())) +str_iterator = type(iter("")) +tuple_iterator = type(iter(())) +zip_iterator = type(iter(zip())) +## views ## +dict_keys = type({}.keys()) +dict_values = type({}.values()) +dict_items = type({}.items()) +## misc ## +mappingproxy = type(type.__dict__) +generator = type((lambda: (yield))()) +## coroutine ## +async def _coro(): pass +_coro = _coro() +coroutine = type(_coro) +_coro.close() # Prevent ResourceWarning +del _coro + + +### ONE-TRICK PONIES ### + +class Hashable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __hash__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Hashable: + for B in C.__mro__: + if "__hash__" in B.__dict__: + if B.__dict__["__hash__"]: + return True + break + return NotImplemented + + +class Awaitable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __await__(self): + yield + + @classmethod + def __subclasshook__(cls, C): + if cls is Awaitable: + for B in C.__mro__: + if "__await__" in B.__dict__: + if B.__dict__["__await__"]: + return True + break + return NotImplemented + + +class Coroutine(Awaitable): + + __slots__ = () + + @abstractmethod + def send(self, value): + """Send a value into the coroutine. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the coroutine. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside coroutine. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("coroutine ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Coroutine: + mro = C.__mro__ + for method in ('__await__', 'send', 'throw', 'close'): + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + +Coroutine.register(coroutine) + + +class AsyncIterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __aiter__(self): + return AsyncIterator() + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterable: + if any("__aiter__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class AsyncIterator(AsyncIterable): + + __slots__ = () + + @abstractmethod + async def __anext__(self): + """Return the next item or raise StopAsyncIteration when exhausted.""" + raise StopAsyncIteration + + def __aiter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterator: + if (any("__anext__" in B.__dict__ for B in C.__mro__) and + any("__aiter__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + + +class Iterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __iter__(self): + while False: + yield None + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterable: + if any("__iter__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Iterator(Iterable): + + __slots__ = () + + @abstractmethod + def __next__(self): + 'Return the next item from the iterator. When exhausted, raise StopIteration' + raise StopIteration + + def __iter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterator: + if (any("__next__" in B.__dict__ for B in C.__mro__) and + any("__iter__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + +Iterator.register(bytes_iterator) +Iterator.register(bytearray_iterator) +#Iterator.register(callable_iterator) +Iterator.register(dict_keyiterator) +Iterator.register(dict_valueiterator) +Iterator.register(dict_itemiterator) +Iterator.register(list_iterator) +Iterator.register(list_reverseiterator) +Iterator.register(range_iterator) +Iterator.register(longrange_iterator) +Iterator.register(set_iterator) +Iterator.register(str_iterator) +Iterator.register(tuple_iterator) +Iterator.register(zip_iterator) + + +class Generator(Iterator): + + __slots__ = () + + def __next__(self): + """Return the next item from the generator. + When exhausted, raise StopIteration. + """ + return self.send(None) + + @abstractmethod + def send(self, value): + """Send a value into the generator. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the generator. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside generator. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("generator ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Generator: + mro = C.__mro__ + for method in ('__iter__', '__next__', 'send', 'throw', 'close'): + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + +Generator.register(generator) + + +class Sized(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __len__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Sized: + if any("__len__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Container(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __contains__(self, x): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Container: + if any("__contains__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class Callable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __call__(self, *args, **kwds): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Callable: + if any("__call__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +### SETS ### + + +class Set(Sized, Iterable, Container): + + """A set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__ and __len__. + + To override the comparisons (presumably for speed, as the + semantics are fixed), redefine __le__ and __ge__, + then the other operations will automatically follow suit. + """ + + __slots__ = () + + def __le__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) > len(other): + return False + for elem in self: + if elem not in other: + return False + return True + + def __lt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) < len(other) and self.__le__(other) + + def __gt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) > len(other) and self.__ge__(other) + + def __ge__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) < len(other): + return False + for elem in other: + if elem not in self: + return False + return True + + def __eq__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) == len(other) and self.__le__(other) + + @classmethod + def _from_iterable(cls, it): + '''Construct an instance of the class from any iterable input. + + Must override this method if the class constructor signature + does not accept an iterable for an input. + ''' + return cls(it) + + def __and__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + return self._from_iterable(value for value in other if value in self) + + __rand__ = __and__ + + def isdisjoint(self, other): + 'Return True if two sets have a null intersection.' + for value in other: + if value in self: + return False + return True + + def __or__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + chain = (e for s in (self, other) for e in s) + return self._from_iterable(chain) + + __ror__ = __or__ + + def __sub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in self + if value not in other) + + def __rsub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in other + if value not in self) + + def __xor__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return (self - other) | (other - self) + + __rxor__ = __xor__ + + def _hash(self): + """Compute the hash value of a set. + + Note that we don't define __hash__: not all sets are hashable. + But if you define a hashable set type, its __hash__ should + call this function. + + This must be compatible __eq__. + + All sets ought to compare equal if they contain the same + elements, regardless of how they are implemented, and + regardless of the order of the elements; so there's not much + freedom for __eq__ or __hash__. We match the algorithm used + by the built-in frozenset type. + """ + MAX = sys.maxsize + MASK = 2 * MAX + 1 + n = len(self) + h = 1927868237 * (n + 1) + h &= MASK + for x in self: + hx = hash(x) + h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 + h &= MASK + h = h * 69069 + 907133923 + h &= MASK + if h > MAX: + h -= MASK + 1 + if h == -1: + h = 590923713 + return h + +Set.register(frozenset) + + +class MutableSet(Set): + """A mutable set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__, __len__, + add(), and discard(). + + To override the comparisons (presumably for speed, as the + semantics are fixed), all you have to do is redefine __le__ and + then the other operations will automatically follow suit. + """ + + __slots__ = () + + @abstractmethod + def add(self, value): + """Add an element.""" + raise NotImplementedError + + @abstractmethod + def discard(self, value): + """Remove an element. Do not raise an exception if absent.""" + raise NotImplementedError + + def remove(self, value): + """Remove an element. If not a member, raise a KeyError.""" + if value not in self: + raise KeyError(value) + self.discard(value) + + def pop(self): + """Return the popped value. Raise KeyError if empty.""" + it = iter(self) + try: + value = next(it) + except StopIteration: + raise KeyError + self.discard(value) + return value + + def clear(self): + """This is slow (creates N new iterators!) but effective.""" + try: + while True: + self.pop() + except KeyError: + pass + + def __ior__(self, it): + for value in it: + self.add(value) + return self + + def __iand__(self, it): + for value in (self - it): + self.discard(value) + return self + + def __ixor__(self, it): + if it is self: + self.clear() + else: + if not isinstance(it, Set): + it = self._from_iterable(it) + for value in it: + if value in self: + self.discard(value) + else: + self.add(value) + return self + + def __isub__(self, it): + if it is self: + self.clear() + else: + for value in it: + self.discard(value) + return self + +MutableSet.register(set) + + +### MAPPINGS ### + + +class Mapping(Sized, Iterable, Container): + + __slots__ = () + + """A Mapping is a generic container for associating key/value + pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __iter__, and __len__. + + """ + + @abstractmethod + def __getitem__(self, key): + raise KeyError + + def get(self, key, default=None): + 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' + try: + return self[key] + except KeyError: + return default + + def __contains__(self, key): + try: + self[key] + except KeyError: + return False + else: + return True + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return KeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return ItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return ValuesView(self) + + def __eq__(self, other): + if not isinstance(other, Mapping): + return NotImplemented + return dict(self.items()) == dict(other.items()) + +Mapping.register(mappingproxy) + + +class MappingView(Sized): + + __slots__ = '_mapping', + + def __init__(self, mapping): + self._mapping = mapping + + def __len__(self): + return len(self._mapping) + + def __repr__(self): + return '{0.__class__.__name__}({0._mapping!r})'.format(self) + + +class KeysView(MappingView, Set): + + __slots__ = () + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, key): + return key in self._mapping + + def __iter__(self): + yield from self._mapping + +KeysView.register(dict_keys) + + +class ItemsView(MappingView, Set): + + __slots__ = () + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, item): + key, value = item + try: + v = self._mapping[key] + except KeyError: + return False + else: + return v == value + + def __iter__(self): + for key in self._mapping: + yield (key, self._mapping[key]) + +ItemsView.register(dict_items) + + +class ValuesView(MappingView): + + __slots__ = () + + def __contains__(self, value): + for key in self._mapping: + if value == self._mapping[key]: + return True + return False + + def __iter__(self): + for key in self._mapping: + yield self._mapping[key] + +ValuesView.register(dict_values) + + +class MutableMapping(Mapping): + + __slots__ = () + + """A MutableMapping is a generic container for associating + key/value pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __setitem__, __delitem__, + __iter__, and __len__. + + """ + + @abstractmethod + def __setitem__(self, key, value): + raise KeyError + + @abstractmethod + def __delitem__(self, key): + raise KeyError + + __marker = object() + + def pop(self, key, default=__marker): + '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + ''' + try: + value = self[key] + except KeyError: + if default is self.__marker: + raise + return default + else: + del self[key] + return value + + def popitem(self): + '''D.popitem() -> (k, v), remove and return some (key, value) pair + as a 2-tuple; but raise KeyError if D is empty. + ''' + try: + key = next(iter(self)) + except StopIteration: + raise KeyError + value = self[key] + del self[key] + return key, value + + def clear(self): + 'D.clear() -> None. Remove all items from D.' + try: + while True: + self.popitem() + except KeyError: + pass + + def update(*args, **kwds): + ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. + If E present and has a .keys() method, does: for k in E: D[k] = E[k] + If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v + In either case, this is followed by: for k, v in F.items(): D[k] = v + ''' + if not args: + raise TypeError("descriptor 'update' of 'MutableMapping' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('update expected at most 1 arguments, got %d' % + len(args)) + if args: + other = args[0] + if isinstance(other, Mapping): + for key in other: + self[key] = other[key] + elif hasattr(other, "keys"): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + def setdefault(self, key, default=None): + 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' + try: + return self[key] + except KeyError: + self[key] = default + return default + +MutableMapping.register(dict) + + +### SEQUENCES ### + + +class Sequence(Sized, Iterable, Container): + + """All the operations on a read-only sequence. + + Concrete subclasses must override __new__ or __init__, + __getitem__, and __len__. + """ + + __slots__ = () + + @abstractmethod + def __getitem__(self, index): + raise IndexError + + def __iter__(self): + i = 0 + try: + while True: + v = self[i] + yield v + i += 1 + except IndexError: + return + + def __contains__(self, value): + for v in self: + if v == value: + return True + return False + + def __reversed__(self): + for i in reversed(range(len(self))): + yield self[i] + + def index(self, value, start=0, stop=None): + '''S.index(value, [start, [stop]]) -> integer -- return first index of value. + Raises ValueError if the value is not present. + ''' + if start is not None and start < 0: + start = max(len(self) + start, 0) + if stop is not None and stop < 0: + stop += len(self) + + i = start + while stop is None or i < stop: + try: + if self[i] == value: + return i + except IndexError: + break + i += 1 + raise ValueError + + def count(self, value): + 'S.count(value) -> integer -- return number of occurrences of value' + return sum(1 for v in self if v == value) + +Sequence.register(tuple) +Sequence.register(str) +Sequence.register(range) +Sequence.register(memoryview) + + +class ByteString(Sequence): + + """This unifies bytes and bytearray. + + XXX Should add all their methods. + """ + + __slots__ = () + +ByteString.register(bytes) +ByteString.register(bytearray) + + +class MutableSequence(Sequence): + + __slots__ = () + + """All the operations on a read-write sequence. + + Concrete subclasses must provide __new__ or __init__, + __getitem__, __setitem__, __delitem__, __len__, and insert(). + + """ + + @abstractmethod + def __setitem__(self, index, value): + raise IndexError + + @abstractmethod + def __delitem__(self, index): + raise IndexError + + @abstractmethod + def insert(self, index, value): + 'S.insert(index, value) -- insert value before index' + raise IndexError + + def append(self, value): + 'S.append(value) -- append value to the end of the sequence' + self.insert(len(self), value) + + def clear(self): + 'S.clear() -> None -- remove all items from S' + try: + while True: + self.pop() + except IndexError: + pass + + def reverse(self): + 'S.reverse() -- reverse *IN PLACE*' + n = len(self) + for i in range(n//2): + self[i], self[n-i-1] = self[n-i-1], self[i] + + def extend(self, values): + 'S.extend(iterable) -- extend sequence by appending elements from the iterable' + for v in values: + self.append(v) + + def pop(self, index=-1): + '''S.pop([index]) -> item -- remove and return item at index (default last). + Raise IndexError if list is empty or index is out of range. + ''' + v = self[index] + del self[index] + return v + + def remove(self, value): + '''S.remove(value) -- remove first occurrence of value. + Raise ValueError if the value is not present. + ''' + del self[self.index(value)] + + def __iadd__(self, values): + self.extend(values) + return self + +MutableSequence.register(list) +MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.5.3/abc.py b/typing_extensions/test_data/python-3.5.3/abc.py new file mode 100644 index 00000000..1cbf96a6 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.3/abc.py @@ -0,0 +1,248 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) according to PEP 3119.""" + +from _weakrefset import WeakSet + + +def abstractmethod(funcobj): + """A decorator indicating abstract methods. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract methods are overridden. + The abstract methods can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractmethod + def my_abstract_method(self, ...): + ... + """ + funcobj.__isabstractmethod__ = True + return funcobj + + +class abstractclassmethod(classmethod): + """ + A decorator indicating abstract classmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractclassmethod + def my_abstract_classmethod(cls, ...): + ... + + 'abstractclassmethod' is deprecated. Use 'classmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractstaticmethod(staticmethod): + """ + A decorator indicating abstract staticmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractstaticmethod + def my_abstract_staticmethod(...): + ... + + 'abstractstaticmethod' is deprecated. Use 'staticmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractproperty(property): + """ + A decorator indicating abstract properties. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract properties are overridden. + The abstract properties can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractproperty + def my_abstract_property(self): + ... + + This defines a read-only property; you can also define a read-write + abstract property using the 'long' form of property declaration: + + class C(metaclass=ABCMeta): + def getx(self): ... + def setx(self, value): ... + x = abstractproperty(getx, setx) + + 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' + instead. + """ + + __isabstractmethod__ = True + + +class ABCMeta(type): + + """Metaclass for defining Abstract Base Classes (ABCs). + + Use this metaclass to create an ABC. An ABC can be subclassed + directly, and then acts as a mix-in class. You can also register + unrelated concrete classes (even built-in classes) and unrelated + ABCs as 'virtual subclasses' -- these and their descendants will + be considered subclasses of the registering ABC by the built-in + issubclass() function, but the registering ABC won't show up in + their MRO (Method Resolution Order) nor will method + implementations defined by the registering ABC be callable (not + even via super()). + + """ + + # A global counter that is incremented each time a class is + # registered as a virtual subclass of anything. It forces the + # negative cache to be cleared before its next use. + # Note: this counter is private. Use `abc.get_cache_token()` for + # external code. + _abc_invalidation_counter = 0 + + def __new__(mcls, name, bases, namespace): + cls = super().__new__(mcls, name, bases, namespace) + # Compute set of abstract method names + abstracts = {name + for name, value in namespace.items() + if getattr(value, "__isabstractmethod__", False)} + for base in bases: + for name in getattr(base, "__abstractmethods__", set()): + value = getattr(cls, name, None) + if getattr(value, "__isabstractmethod__", False): + abstracts.add(name) + cls.__abstractmethods__ = frozenset(abstracts) + # Set up inheritance registry + cls._abc_registry = WeakSet() + cls._abc_cache = WeakSet() + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + return cls + + def register(cls, subclass): + """Register a virtual subclass of an ABC. + + Returns the subclass, to allow usage as a class decorator. + """ + if not isinstance(subclass, type): + raise TypeError("Can only register classes") + if issubclass(subclass, cls): + return subclass # Already a subclass + # Subtle: test for cycles *after* testing for "already a subclass"; + # this means we allow X.register(X) and interpret it as a no-op. + if issubclass(cls, subclass): + # This would create a cycle, which is bad for the algorithm below + raise RuntimeError("Refusing to create an inheritance cycle") + cls._abc_registry.add(subclass) + ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache + return subclass + + def _dump_registry(cls, file=None): + """Debug helper to print the ABC registry.""" + print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) + print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) + for name in sorted(cls.__dict__.keys()): + if name.startswith("_abc_"): + value = getattr(cls, name) + print("%s: %r" % (name, value), file=file) + + def __instancecheck__(cls, instance): + """Override for isinstance(instance, cls).""" + # Inline the cache checking + subclass = instance.__class__ + if subclass in cls._abc_cache: + return True + subtype = type(instance) + if subtype is subclass: + if (cls._abc_negative_cache_version == + ABCMeta._abc_invalidation_counter and + subclass in cls._abc_negative_cache): + return False + # Fall back to the subclass check. + return cls.__subclasscheck__(subclass) + return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) + + def __subclasscheck__(cls, subclass): + """Override for issubclass(subclass, cls).""" + # Check cache + if subclass in cls._abc_cache: + return True + # Check negative cache; may have to invalidate + if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: + # Invalidate the negative cache + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + elif subclass in cls._abc_negative_cache: + return False + # Check the subclass hook + ok = cls.__subclasshook__(subclass) + if ok is not NotImplemented: + assert isinstance(ok, bool) + if ok: + cls._abc_cache.add(subclass) + else: + cls._abc_negative_cache.add(subclass) + return ok + # Check if it's a direct subclass + if cls in getattr(subclass, '__mro__', ()): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a registered class (recursive) + for rcls in cls._abc_registry: + if issubclass(subclass, rcls): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a subclass (recursive) + for scls in cls.__subclasses__(): + if issubclass(subclass, scls): + cls._abc_cache.add(subclass) + return True + # No dice; update negative cache + cls._abc_negative_cache.add(subclass) + return False + + +class ABC(metaclass=ABCMeta): + """Helper class that provides a standard way to create an ABC using + inheritance. + """ + pass + + +def get_cache_token(): + """Returns the current ABC cache token. + + The token is an opaque object (supporting equality testing) identifying the + current version of the ABC cache for virtual subclasses. The token changes + with every call to ``register()`` on any ABC. + """ + return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.5.3/collections/__init__.py b/typing_extensions/test_data/python-3.5.3/collections/__init__.py new file mode 100644 index 00000000..bea811db --- /dev/null +++ b/typing_extensions/test_data/python-3.5.3/collections/__init__.py @@ -0,0 +1,1242 @@ +'''This module implements specialized container datatypes providing +alternatives to Python's general purpose built-in containers, dict, +list, set, and tuple. + +* namedtuple factory function for creating tuple subclasses with named fields +* deque list-like container with fast appends and pops on either end +* ChainMap dict-like class for creating a single view of multiple mappings +* Counter dict subclass for counting hashable objects +* OrderedDict dict subclass that remembers the order entries were added +* defaultdict dict subclass that calls a factory function to supply missing values +* UserDict wrapper around dictionary objects for easier dict subclassing +* UserList wrapper around list objects for easier list subclassing +* UserString wrapper around string objects for easier string subclassing + +''' + +__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', + 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] + +# For backwards compatibility, continue to make the collections ABCs +# available through the collections module. +from _collections_abc import * +import _collections_abc +__all__ += _collections_abc.__all__ + +from operator import itemgetter as _itemgetter, eq as _eq +from keyword import iskeyword as _iskeyword +import sys as _sys +import heapq as _heapq +from _weakref import proxy as _proxy +from itertools import repeat as _repeat, chain as _chain, starmap as _starmap +from reprlib import recursive_repr as _recursive_repr + +try: + from _collections import deque +except ImportError: + pass +else: + MutableSequence.register(deque) + +try: + from _collections import defaultdict +except ImportError: + pass + + +################################################################################ +### OrderedDict +################################################################################ + +class _OrderedDictKeysView(KeysView): + + def __reversed__(self): + yield from reversed(self._mapping) + +class _OrderedDictItemsView(ItemsView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield (key, self._mapping[key]) + +class _OrderedDictValuesView(ValuesView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield self._mapping[key] + +class _Link(object): + __slots__ = 'prev', 'next', 'key', '__weakref__' + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as regular dictionaries. + + # The internal self.__map dict maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # The sentinel is in self.__hardroot with a weakref proxy in self.__root. + # The prev links are weakref proxies (to prevent circular references). + # Individual links are kept alive by the hard reference in self.__map. + # Those hard references disappear when a key is deleted from an OrderedDict. + + def __init__(*args, **kwds): + '''Initialize an ordered dictionary. The signature is the same as + regular dictionaries, but keyword arguments are not recommended because + their insertion order is arbitrary. + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'OrderedDict' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__hardroot = _Link() + self.__root = root = _proxy(self.__hardroot) + root.prev = root.next = root + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, + dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link at the end of the linked list, + # and the inherited dictionary is updated with the new key/value pair. + if key not in self: + self.__map[key] = link = Link() + root = self.__root + last = root.prev + link.prev, link.next, link.key = last, root, key + last.next = link + root.prev = proxy(link) + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which gets + # removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link = self.__map.pop(key) + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + link.prev = None + link.next = None + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + # Traverse the linked list in order. + root = self.__root + curr = root.next + while curr is not root: + yield curr.key + curr = curr.next + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + # Traverse the linked list in reverse order. + root = self.__root + curr = root.prev + while curr is not root: + yield curr.key + curr = curr.prev + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + root = self.__root + root.prev = root.next = root + self.__map.clear() + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root.prev + link_prev = link.prev + link_prev.next = root + root.prev = link_prev + else: + link = root.next + link_next = link.next + root.next = link_next + link_next.prev = root + key = link.key + del self.__map[key] + value = dict.pop(self, key) + return key, value + + def move_to_end(self, key, last=True): + '''Move an existing element to the end (or beginning if last==False). + + Raises KeyError if the element does not exist. + When last=True, acts like a fast version of self[key]=self.pop(key). + + ''' + link = self.__map[key] + link_prev = link.prev + link_next = link.next + soft_link = link_next.prev + link_prev.next = link_next + link_next.prev = link_prev + root = self.__root + if last: + last = root.prev + link.prev = last + link.next = root + root.prev = soft_link + last.next = link + else: + first = root.next + link.prev = root + link.next = first + first.prev = soft_link + root.next = link + + def __sizeof__(self): + sizeof = _sys.getsizeof + n = len(self) + 1 # number of links including root + size = sizeof(self.__dict__) # instance dictionary + size += sizeof(self.__map) * 2 # internal dict and inherited dict + size += sizeof(self.__hardroot) * n # link objects + size += sizeof(self.__root) * n # proxy objects + return size + + update = __update = MutableMapping.update + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return _OrderedDictKeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return _OrderedDictItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return _OrderedDictValuesView(self) + + __ne__ = MutableMapping.__ne__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding + value. If key is not found, d is returned if given, otherwise KeyError + is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + @_recursive_repr() + def __repr__(self): + 'od.__repr__() <==> repr(od)' + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self.items())) + + def __reduce__(self): + 'Return state information for pickling' + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + return self.__class__, (), inst_dict or None, None, iter(self.items()) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. + If not specified, the value defaults to None. + + ''' + self = cls() + for key in iterable: + self[key] = value + return self + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return dict.__eq__(self, other) and all(map(_eq, self, other)) + return dict.__eq__(self, other) + + +try: + from _collections import OrderedDict +except ImportError: + # Leave the pure Python version in place. + pass + + +################################################################################ +### namedtuple +################################################################################ + +_class_template = """\ +from builtins import property as _property, tuple as _tuple +from operator import itemgetter as _itemgetter +from collections import OrderedDict + +class {typename}(tuple): + '{typename}({arg_list})' + + __slots__ = () + + _fields = {field_names!r} + + def __new__(_cls, {arg_list}): + 'Create new instance of {typename}({arg_list})' + return _tuple.__new__(_cls, ({arg_list})) + + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + 'Make a new {typename} object from a sequence or iterable' + result = new(cls, iterable) + if len(result) != {num_fields:d}: + raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) + return result + + def _replace(_self, **kwds): + 'Return a new {typename} object replacing specified fields with new values' + result = _self._make(map(kwds.pop, {field_names!r}, _self)) + if kwds: + raise ValueError('Got unexpected field names: %r' % list(kwds)) + return result + + def __repr__(self): + 'Return a nicely formatted representation string' + return self.__class__.__name__ + '({repr_fmt})' % self + + def _asdict(self): + 'Return a new OrderedDict which maps field names to their values.' + return OrderedDict(zip(self._fields, self)) + + def __getnewargs__(self): + 'Return self as a plain tuple. Used by copy and pickle.' + return tuple(self) + +{field_defs} +""" + +_repr_template = '{name}=%r' + +_field_template = '''\ + {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') +''' + +def namedtuple(typename, field_names, verbose=False, rename=False): + """Returns a new subclass of tuple with named fields. + + >>> Point = namedtuple('Point', ['x', 'y']) + >>> Point.__doc__ # docstring for the new class + 'Point(x, y)' + >>> p = Point(11, y=22) # instantiate with positional args or keywords + >>> p[0] + p[1] # indexable like a plain tuple + 33 + >>> x, y = p # unpack like a regular tuple + >>> x, y + (11, 22) + >>> p.x + p.y # fields also accessible by name + 33 + >>> d = p._asdict() # convert to a dictionary + >>> d['x'] + 11 + >>> Point(**d) # convert from a dictionary + Point(x=11, y=22) + >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields + Point(x=100, y=22) + + """ + + # Validate the field names. At the user's option, either generate an error + # message or automatically replace the field name with a valid name. + if isinstance(field_names, str): + field_names = field_names.replace(',', ' ').split() + field_names = list(map(str, field_names)) + typename = str(typename) + if rename: + seen = set() + for index, name in enumerate(field_names): + if (not name.isidentifier() + or _iskeyword(name) + or name.startswith('_') + or name in seen): + field_names[index] = '_%d' % index + seen.add(name) + for name in [typename] + field_names: + if type(name) != str: + raise TypeError('Type names and field names must be strings') + if not name.isidentifier(): + raise ValueError('Type names and field names must be valid ' + 'identifiers: %r' % name) + if _iskeyword(name): + raise ValueError('Type names and field names cannot be a ' + 'keyword: %r' % name) + seen = set() + for name in field_names: + if name.startswith('_') and not rename: + raise ValueError('Field names cannot start with an underscore: ' + '%r' % name) + if name in seen: + raise ValueError('Encountered duplicate field name: %r' % name) + seen.add(name) + + # Fill-in the class template + class_definition = _class_template.format( + typename = typename, + field_names = tuple(field_names), + num_fields = len(field_names), + arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], + repr_fmt = ', '.join(_repr_template.format(name=name) + for name in field_names), + field_defs = '\n'.join(_field_template.format(index=index, name=name) + for index, name in enumerate(field_names)) + ) + + # Execute the template string in a temporary namespace and support + # tracing utilities by setting a value for frame.f_globals['__name__'] + namespace = dict(__name__='namedtuple_%s' % typename) + exec(class_definition, namespace) + result = namespace[typename] + result._source = class_definition + if verbose: + print(result._source) + + # For pickling to work, the __module__ variable needs to be set to the frame + # where the named tuple is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + + return result + + +######################################################################## +### Counter +######################################################################## + +def _count_elements(mapping, iterable): + 'Tally elements from the iterable.' + mapping_get = mapping.get + for elem in iterable: + mapping[elem] = mapping_get(elem, 0) + 1 + +try: # Load C helper function if available + from _collections import _count_elements +except ImportError: + pass + +class Counter(dict): + '''Dict subclass for counting hashable items. Sometimes called a bag + or multiset. Elements are stored as dictionary keys and their counts + are stored as dictionary values. + + >>> c = Counter('abcdeabcdabcaba') # count elements from a string + + >>> c.most_common(3) # three most common elements + [('a', 5), ('b', 4), ('c', 3)] + >>> sorted(c) # list all unique elements + ['a', 'b', 'c', 'd', 'e'] + >>> ''.join(sorted(c.elements())) # list elements with repetitions + 'aaaaabbbbcccdde' + >>> sum(c.values()) # total of all counts + 15 + + >>> c['a'] # count of letter 'a' + 5 + >>> for elem in 'shazam': # update counts from an iterable + ... c[elem] += 1 # by adding 1 to each element's count + >>> c['a'] # now there are seven 'a' + 7 + >>> del c['b'] # remove all 'b' + >>> c['b'] # now there are zero 'b' + 0 + + >>> d = Counter('simsalabim') # make another counter + >>> c.update(d) # add in the second counter + >>> c['a'] # now there are nine 'a' + 9 + + >>> c.clear() # empty the counter + >>> c + Counter() + + Note: If a count is set to zero or reduced to zero, it will remain + in the counter until the entry is deleted or the counter is cleared: + + >>> c = Counter('aaabbc') + >>> c['b'] -= 2 # reduce the count of 'b' by two + >>> c.most_common() # 'b' is still in, but its count is zero + [('a', 3), ('c', 1), ('b', 0)] + + ''' + # References: + # http://en.wikipedia.org/wiki/Multiset + # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html + # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm + # http://code.activestate.com/recipes/259174/ + # Knuth, TAOCP Vol. II section 4.6.3 + + def __init__(*args, **kwds): + '''Create a new, empty Counter object. And if given, count elements + from an input iterable. Or, initialize the count from another mapping + of elements to their counts. + + >>> c = Counter() # a new, empty counter + >>> c = Counter('gallahad') # a new counter from an iterable + >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping + >>> c = Counter(a=4, b=2) # a new counter from keyword args + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + super(Counter, self).__init__() + self.update(*args, **kwds) + + def __missing__(self, key): + 'The count of elements not in the Counter is zero.' + # Needed so that self[missing_item] does not raise KeyError + return 0 + + def most_common(self, n=None): + '''List the n most common elements and their counts from the most + common to the least. If n is None, then list all element counts. + + >>> Counter('abcdeabcdabcaba').most_common(3) + [('a', 5), ('b', 4), ('c', 3)] + + ''' + # Emulate Bag.sortedByCount from Smalltalk + if n is None: + return sorted(self.items(), key=_itemgetter(1), reverse=True) + return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) + + def elements(self): + '''Iterator over elements repeating each as many times as its count. + + >>> c = Counter('ABCABC') + >>> sorted(c.elements()) + ['A', 'A', 'B', 'B', 'C', 'C'] + + # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) + >>> product = 1 + >>> for factor in prime_factors.elements(): # loop over factors + ... product *= factor # and multiply them + >>> product + 1836 + + Note, if an element's count has been set to zero or is a negative + number, elements() will ignore it. + + ''' + # Emulate Bag.do from Smalltalk and Multiset.begin from C++. + return _chain.from_iterable(_starmap(_repeat, self.items())) + + # Override dict methods where necessary + + @classmethod + def fromkeys(cls, iterable, v=None): + # There is no equivalent method for counters because setting v=1 + # means that no element can have a count greater than one. + raise NotImplementedError( + 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + + def update(*args, **kwds): + '''Like dict.update() but add counts instead of replacing them. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.update('witch') # add elements from another iterable + >>> d = Counter('watch') + >>> c.update(d) # add elements from another counter + >>> c['h'] # four 'h' in which, witch, and watch + 4 + + ''' + # The regular dict.update() operation makes no sense here because the + # replace behavior results in the some of original untouched counts + # being mixed-in with all of the other counts for a mismash that + # doesn't have a straight-forward interpretation in most counting + # contexts. Instead, we implement straight-addition. Both the inputs + # and outputs are allowed to contain zero and negative counts. + + if not args: + raise TypeError("descriptor 'update' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + if isinstance(iterable, Mapping): + if self: + self_get = self.get + for elem, count in iterable.items(): + self[elem] = count + self_get(elem, 0) + else: + super(Counter, self).update(iterable) # fast path when counter is empty + else: + _count_elements(self, iterable) + if kwds: + self.update(kwds) + + def subtract(*args, **kwds): + '''Like dict.update() but subtracts counts instead of replacing them. + Counts can be reduced below zero. Both the inputs and outputs are + allowed to contain zero and negative counts. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.subtract('witch') # subtract elements from another iterable + >>> c.subtract(Counter('watch')) # subtract elements from another counter + >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch + 0 + >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch + -1 + + ''' + if not args: + raise TypeError("descriptor 'subtract' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + self_get = self.get + if isinstance(iterable, Mapping): + for elem, count in iterable.items(): + self[elem] = self_get(elem, 0) - count + else: + for elem in iterable: + self[elem] = self_get(elem, 0) - 1 + if kwds: + self.subtract(kwds) + + def copy(self): + 'Return a shallow copy.' + return self.__class__(self) + + def __reduce__(self): + return self.__class__, (dict(self),) + + def __delitem__(self, elem): + 'Like dict.__delitem__() but does not raise KeyError for missing values.' + if elem in self: + super().__delitem__(elem) + + def __repr__(self): + if not self: + return '%s()' % self.__class__.__name__ + try: + items = ', '.join(map('%r: %r'.__mod__, self.most_common())) + return '%s({%s})' % (self.__class__.__name__, items) + except TypeError: + # handle case where values are not orderable + return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) + + # Multiset-style mathematical operations discussed in: + # Knuth TAOCP Volume II section 4.6.3 exercise 19 + # and at http://en.wikipedia.org/wiki/Multiset + # + # Outputs guaranteed to only include positive counts. + # + # To strip negative and zero counts, add-in an empty counter: + # c += Counter() + + def __add__(self, other): + '''Add counts from two counters. + + >>> Counter('abbb') + Counter('bcc') + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count + other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __sub__(self, other): + ''' Subtract count, but keep only results with positive counts. + + >>> Counter('abbbc') - Counter('bccd') + Counter({'b': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count - other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count < 0: + result[elem] = 0 - count + return result + + def __or__(self, other): + '''Union is the maximum of value in either of the input counters. + + >>> Counter('abbb') | Counter('bcc') + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = other_count if count < other_count else count + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __and__(self, other): + ''' Intersection is the minimum of corresponding counts. + + >>> Counter('abbb') & Counter('bcc') + Counter({'b': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = count if count < other_count else other_count + if newcount > 0: + result[elem] = newcount + return result + + def __pos__(self): + 'Adds an empty counter, effectively stripping negative and zero counts' + result = Counter() + for elem, count in self.items(): + if count > 0: + result[elem] = count + return result + + def __neg__(self): + '''Subtracts from an empty counter. Strips positive and zero counts, + and flips the sign on negative counts. + + ''' + result = Counter() + for elem, count in self.items(): + if count < 0: + result[elem] = 0 - count + return result + + def _keep_positive(self): + '''Internal method to strip elements with a negative or zero count''' + nonpositive = [elem for elem, count in self.items() if not count > 0] + for elem in nonpositive: + del self[elem] + return self + + def __iadd__(self, other): + '''Inplace add from another counter, keeping only positive counts. + + >>> c = Counter('abbb') + >>> c += Counter('bcc') + >>> c + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] += count + return self._keep_positive() + + def __isub__(self, other): + '''Inplace subtract counter, but keep only results with positive counts. + + >>> c = Counter('abbbc') + >>> c -= Counter('bccd') + >>> c + Counter({'b': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] -= count + return self._keep_positive() + + def __ior__(self, other): + '''Inplace union is the maximum of value from either counter. + + >>> c = Counter('abbb') + >>> c |= Counter('bcc') + >>> c + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + for elem, other_count in other.items(): + count = self[elem] + if other_count > count: + self[elem] = other_count + return self._keep_positive() + + def __iand__(self, other): + '''Inplace intersection is the minimum of corresponding counts. + + >>> c = Counter('abbb') + >>> c &= Counter('bcc') + >>> c + Counter({'b': 1}) + + ''' + for elem, count in self.items(): + other_count = other[elem] + if other_count < count: + self[elem] = other_count + return self._keep_positive() + + +######################################################################## +### ChainMap (helper for configparser and string.Template) +######################################################################## + +class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + be accessed or updated using the *maps* attribute. There is no other + state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self, m=None): # like Django's Context.push() + '''New ChainMap with a new map followed by all previous maps. + If no map is provided, an empty dict is used. + ''' + if m is None: + m = {} + return self.__class__(m, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +################################################################################ +### UserDict +################################################################################ + +class UserDict(MutableMapping): + + # Start by filling-out the abstract methods + def __init__(*args, **kwargs): + if not args: + raise TypeError("descriptor '__init__' of 'UserDict' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + if args: + dict = args[0] + elif 'dict' in kwargs: + dict = kwargs.pop('dict') + import warnings + warnings.warn("Passing 'dict' as keyword argument is deprecated", + PendingDeprecationWarning, stacklevel=2) + else: + dict = None + self.data = {} + if dict is not None: + self.update(dict) + if len(kwargs): + self.update(kwargs) + def __len__(self): return len(self.data) + def __getitem__(self, key): + if key in self.data: + return self.data[key] + if hasattr(self.__class__, "__missing__"): + return self.__class__.__missing__(self, key) + raise KeyError(key) + def __setitem__(self, key, item): self.data[key] = item + def __delitem__(self, key): del self.data[key] + def __iter__(self): + return iter(self.data) + + # Modify __contains__ to work correctly when __missing__ is present + def __contains__(self, key): + return key in self.data + + # Now, add the methods in dicts but not in MutableMapping + def __repr__(self): return repr(self.data) + def copy(self): + if self.__class__ is UserDict: + return UserDict(self.data.copy()) + import copy + data = self.data + try: + self.data = {} + c = copy.copy(self) + finally: + self.data = data + c.update(self) + return c + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + + +################################################################################ +### UserList +################################################################################ + +class UserList(MutableSequence): + """A more or less complete user-defined wrapper around list objects.""" + def __init__(self, initlist=None): + self.data = [] + if initlist is not None: + # XXX should this accept an arbitrary sequence? + if type(initlist) == type(self.data): + self.data[:] = initlist + elif isinstance(initlist, UserList): + self.data[:] = initlist.data[:] + else: + self.data = list(initlist) + def __repr__(self): return repr(self.data) + def __lt__(self, other): return self.data < self.__cast(other) + def __le__(self, other): return self.data <= self.__cast(other) + def __eq__(self, other): return self.data == self.__cast(other) + def __gt__(self, other): return self.data > self.__cast(other) + def __ge__(self, other): return self.data >= self.__cast(other) + def __cast(self, other): + return other.data if isinstance(other, UserList) else other + def __contains__(self, item): return item in self.data + def __len__(self): return len(self.data) + def __getitem__(self, i): return self.data[i] + def __setitem__(self, i, item): self.data[i] = item + def __delitem__(self, i): del self.data[i] + def __add__(self, other): + if isinstance(other, UserList): + return self.__class__(self.data + other.data) + elif isinstance(other, type(self.data)): + return self.__class__(self.data + other) + return self.__class__(self.data + list(other)) + def __radd__(self, other): + if isinstance(other, UserList): + return self.__class__(other.data + self.data) + elif isinstance(other, type(self.data)): + return self.__class__(other + self.data) + return self.__class__(list(other) + self.data) + def __iadd__(self, other): + if isinstance(other, UserList): + self.data += other.data + elif isinstance(other, type(self.data)): + self.data += other + else: + self.data += list(other) + return self + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __imul__(self, n): + self.data *= n + return self + def append(self, item): self.data.append(item) + def insert(self, i, item): self.data.insert(i, item) + def pop(self, i=-1): return self.data.pop(i) + def remove(self, item): self.data.remove(item) + def clear(self): self.data.clear() + def copy(self): return self.__class__(self) + def count(self, item): return self.data.count(item) + def index(self, item, *args): return self.data.index(item, *args) + def reverse(self): self.data.reverse() + def sort(self, *args, **kwds): self.data.sort(*args, **kwds) + def extend(self, other): + if isinstance(other, UserList): + self.data.extend(other.data) + else: + self.data.extend(other) + + + +################################################################################ +### UserString +################################################################################ + +class UserString(Sequence): + def __init__(self, seq): + if isinstance(seq, str): + self.data = seq + elif isinstance(seq, UserString): + self.data = seq.data[:] + else: + self.data = str(seq) + def __str__(self): return str(self.data) + def __repr__(self): return repr(self.data) + def __int__(self): return int(self.data) + def __float__(self): return float(self.data) + def __complex__(self): return complex(self.data) + def __hash__(self): return hash(self.data) + def __getnewargs__(self): + return (self.data[:],) + + def __eq__(self, string): + if isinstance(string, UserString): + return self.data == string.data + return self.data == string + def __lt__(self, string): + if isinstance(string, UserString): + return self.data < string.data + return self.data < string + def __le__(self, string): + if isinstance(string, UserString): + return self.data <= string.data + return self.data <= string + def __gt__(self, string): + if isinstance(string, UserString): + return self.data > string.data + return self.data > string + def __ge__(self, string): + if isinstance(string, UserString): + return self.data >= string.data + return self.data >= string + + def __contains__(self, char): + if isinstance(char, UserString): + char = char.data + return char in self.data + + def __len__(self): return len(self.data) + def __getitem__(self, index): return self.__class__(self.data[index]) + def __add__(self, other): + if isinstance(other, UserString): + return self.__class__(self.data + other.data) + elif isinstance(other, str): + return self.__class__(self.data + other) + return self.__class__(self.data + str(other)) + def __radd__(self, other): + if isinstance(other, str): + return self.__class__(other + self.data) + return self.__class__(str(other) + self.data) + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __mod__(self, args): + return self.__class__(self.data % args) + def __rmod__(self, format): + return self.__class__(format % args) + + # the following methods are defined in alphabetical order: + def capitalize(self): return self.__class__(self.data.capitalize()) + def casefold(self): + return self.__class__(self.data.casefold()) + def center(self, width, *args): + return self.__class__(self.data.center(width, *args)) + def count(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.count(sub, start, end) + def encode(self, encoding=None, errors=None): # XXX improve this? + if encoding: + if errors: + return self.__class__(self.data.encode(encoding, errors)) + return self.__class__(self.data.encode(encoding)) + return self.__class__(self.data.encode()) + def endswith(self, suffix, start=0, end=_sys.maxsize): + return self.data.endswith(suffix, start, end) + def expandtabs(self, tabsize=8): + return self.__class__(self.data.expandtabs(tabsize)) + def find(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.find(sub, start, end) + def format(self, *args, **kwds): + return self.data.format(*args, **kwds) + def format_map(self, mapping): + return self.data.format_map(mapping) + def index(self, sub, start=0, end=_sys.maxsize): + return self.data.index(sub, start, end) + def isalpha(self): return self.data.isalpha() + def isalnum(self): return self.data.isalnum() + def isdecimal(self): return self.data.isdecimal() + def isdigit(self): return self.data.isdigit() + def isidentifier(self): return self.data.isidentifier() + def islower(self): return self.data.islower() + def isnumeric(self): return self.data.isnumeric() + def isprintable(self): return self.data.isprintable() + def isspace(self): return self.data.isspace() + def istitle(self): return self.data.istitle() + def isupper(self): return self.data.isupper() + def join(self, seq): return self.data.join(seq) + def ljust(self, width, *args): + return self.__class__(self.data.ljust(width, *args)) + def lower(self): return self.__class__(self.data.lower()) + def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) + maketrans = str.maketrans + def partition(self, sep): + return self.data.partition(sep) + def replace(self, old, new, maxsplit=-1): + if isinstance(old, UserString): + old = old.data + if isinstance(new, UserString): + new = new.data + return self.__class__(self.data.replace(old, new, maxsplit)) + def rfind(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.rfind(sub, start, end) + def rindex(self, sub, start=0, end=_sys.maxsize): + return self.data.rindex(sub, start, end) + def rjust(self, width, *args): + return self.__class__(self.data.rjust(width, *args)) + def rpartition(self, sep): + return self.data.rpartition(sep) + def rstrip(self, chars=None): + return self.__class__(self.data.rstrip(chars)) + def split(self, sep=None, maxsplit=-1): + return self.data.split(sep, maxsplit) + def rsplit(self, sep=None, maxsplit=-1): + return self.data.rsplit(sep, maxsplit) + def splitlines(self, keepends=False): return self.data.splitlines(keepends) + def startswith(self, prefix, start=0, end=_sys.maxsize): + return self.data.startswith(prefix, start, end) + def strip(self, chars=None): return self.__class__(self.data.strip(chars)) + def swapcase(self): return self.__class__(self.data.swapcase()) + def title(self): return self.__class__(self.data.title()) + def translate(self, *args): + return self.__class__(self.data.translate(*args)) + def upper(self): return self.__class__(self.data.upper()) + def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.5.3/collections/__main__.py b/typing_extensions/test_data/python-3.5.3/collections/__main__.py new file mode 100644 index 00000000..763e38e0 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.3/collections/__main__.py @@ -0,0 +1,38 @@ +################################################################################ +### Simple tests +################################################################################ + +# verify that instances can be pickled +from collections import namedtuple +from pickle import loads, dumps +Point = namedtuple('Point', 'x, y', True) +p = Point(x=10, y=20) +assert p == loads(dumps(p)) + +# test and demonstrate ability to override methods +class Point(namedtuple('Point', 'x y')): + __slots__ = () + @property + def hypot(self): + return (self.x ** 2 + self.y ** 2) ** 0.5 + def __str__(self): + return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) + +for p in Point(3, 4), Point(14, 5/7.): + print (p) + +class Point(namedtuple('Point', 'x y')): + 'Point class with optimized _make() and _replace() without error-checking' + __slots__ = () + _make = classmethod(tuple.__new__) + def _replace(self, _map=map, **kwds): + return self._make(_map(kwds.get, ('x', 'y'), self)) + +print(Point(11, 22)._replace(x=100)) + +Point3D = namedtuple('Point3D', Point._fields + ('z',)) +print(Point3D.__doc__) + +import doctest, collections +TestResults = namedtuple('TestResults', 'failed attempted') +print(TestResults(*doctest.testmod(collections))) diff --git a/typing_extensions/test_data/python-3.5.3/collections/abc.py b/typing_extensions/test_data/python-3.5.3/collections/abc.py new file mode 100644 index 00000000..891600d1 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.3/collections/abc.py @@ -0,0 +1,2 @@ +from _collections_abc import * +from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.5.3/typing.py b/typing_extensions/test_data/python-3.5.3/typing.py new file mode 100644 index 00000000..34845b74 --- /dev/null +++ b/typing_extensions/test_data/python-3.5.3/typing.py @@ -0,0 +1,2160 @@ +import abc +from abc import abstractmethod, abstractproperty +import collections +import contextlib +import functools +import re as stdlib_re # Avoid confusion with the re we export. +import sys +import types +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'Any', + 'Callable', + 'ClassVar', + 'Generic', + 'Optional', + 'Tuple', + 'Type', + 'TypeVar', + 'Union', + + # ABCs (from collections.abc). + 'AbstractSet', # collections.abc.Set. + 'ByteString', + 'Container', + 'Hashable', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'Mapping', + 'MappingView', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Sequence', + 'Sized', + 'ValuesView', + # The following are added depending on presence + # of their non-generic counterparts in stdlib: + # Awaitable, + # AsyncIterator, + # AsyncIterable, + # Coroutine, + # Collection, + # ContextManager + + # Structural checks, a.k.a. protocols. + 'Reversible', + 'SupportsAbs', + 'SupportsFloat', + 'SupportsInt', + 'SupportsRound', + + # Concrete collection types. + 'Dict', + 'DefaultDict', + 'List', + 'Set', + 'FrozenSet', + 'NamedTuple', # Not really a type. + 'Generator', + + # One-off things. + 'AnyStr', + 'cast', + 'get_type_hints', + 'NewType', + 'no_type_check', + 'no_type_check_decorator', + 'overload', + 'Text', + 'TYPE_CHECKING', +] + +# The pseudo-submodules 're' and 'io' are part of the public +# namespace, but excluded from __all__ because they might stomp on +# legitimate imports of those modules. + + +def _qualname(x): + if sys.version_info[:2] >= (3, 3): + return x.__qualname__ + else: + # Fall back to just name. + return x.__name__ + + +def _trim_name(nm): + if nm.startswith('_') and nm not in ('_TypeAlias', + '_ForwardRef', '_TypingBase', '_FinalTypingBase'): + nm = nm[1:] + return nm + + +class TypingMeta(type): + """Metaclass for most types defined in typing module + (not a part of public API). + + This overrides __new__() to require an extra keyword parameter + '_root', which serves as a guard against naive subclassing of the + typing classes. Any legitimate class defined using a metaclass + derived from TypingMeta must pass _root=True. + + This also defines a dummy constructor (all the work for most typing + constructs is done in __new__) and a nicer repr(). + """ + + _is_protocol = False + + def __new__(cls, name, bases, namespace, *, _root=False): + if not _root: + raise TypeError("Cannot subclass %s" % + (', '.join(map(_type_repr, bases)) or '()')) + return super().__new__(cls, name, bases, namespace) + + def __init__(self, *args, **kwds): + pass + + def _eval_type(self, globalns, localns): + """Override this in subclasses to interpret forward references. + + For example, List['C'] is internally stored as + List[_ForwardRef('C')], which should evaluate to List[C], + where C is an object found in globalns or localns (searching + localns first, of course). + """ + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + qname = _trim_name(_qualname(self)) + return '%s.%s' % (self.__module__, qname) + + +class _TypingBase(metaclass=TypingMeta, _root=True): + """Internal indicator of special typing constructs.""" + + __slots__ = () + + def __init__(self, *args, **kwds): + pass + + def __new__(cls, *args, **kwds): + """Constructor. + + This only exists to give a better error message in case + someone tries to subclass a special typing object (not a good idea). + """ + if (len(args) == 3 and + isinstance(args[0], str) and + isinstance(args[1], tuple)): + # Close enough. + raise TypeError("Cannot subclass %r" % cls) + return super().__new__(cls) + + # Things that are not classes also need these. + def _eval_type(self, globalns, localns): + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + cls = type(self) + qname = _trim_name(_qualname(cls)) + return '%s.%s' % (cls.__module__, qname) + + def __call__(self, *args, **kwds): + raise TypeError("Cannot instantiate %r" % type(self)) + + +class _FinalTypingBase(_TypingBase, _root=True): + """Internal mix-in class to prevent instantiation. + + Prevents instantiation unless _root=True is given in class call. + It is used to create pseudo-singleton instances Any, Union, Optional, etc. + """ + + __slots__ = () + + def __new__(cls, *args, _root=False, **kwds): + self = super().__new__(cls, *args, **kwds) + if _root is True: + return self + raise TypeError("Cannot instantiate %r" % cls) + + def __reduce__(self): + return _trim_name(type(self).__name__) + + +class _ForwardRef(_TypingBase, _root=True): + """Internal wrapper to hold a forward reference.""" + + __slots__ = ('__forward_arg__', '__forward_code__', + '__forward_evaluated__', '__forward_value__') + + def __init__(self, arg): + super().__init__(arg) + if not isinstance(arg, str): + raise TypeError('Forward reference must be a string -- got %r' % (arg,)) + try: + code = compile(arg, '', 'eval') + except SyntaxError: + raise SyntaxError('Forward reference must be an expression -- got %r' % + (arg,)) + self.__forward_arg__ = arg + self.__forward_code__ = code + self.__forward_evaluated__ = False + self.__forward_value__ = None + + def _eval_type(self, globalns, localns): + if not self.__forward_evaluated__ or localns is not globalns: + if globalns is None and localns is None: + globalns = localns = {} + elif globalns is None: + globalns = localns + elif localns is None: + localns = globalns + self.__forward_value__ = _type_check( + eval(self.__forward_code__, globalns, localns), + "Forward references must evaluate to types.") + self.__forward_evaluated__ = True + return self.__forward_value__ + + def __eq__(self, other): + if not isinstance(other, _ForwardRef): + return NotImplemented + return (self.__forward_arg__ == other.__forward_arg__ and + self.__forward_value__ == other.__forward_value__) + + def __hash__(self): + return hash((self.__forward_arg__, self.__forward_value__)) + + def __instancecheck__(self, obj): + raise TypeError("Forward references cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Forward references cannot be used with issubclass().") + + def __repr__(self): + return '_ForwardRef(%r)' % (self.__forward_arg__,) + + +class _TypeAlias(_TypingBase, _root=True): + """Internal helper class for defining generic variants of concrete types. + + Note that this is not a type; let's call it a pseudo-type. It cannot + be used in instance and subclass checks in parameterized form, i.e. + ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning + ``False``. + """ + + __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') + + def __init__(self, name, type_var, impl_type, type_checker): + """Initializer. + + Args: + name: The name, e.g. 'Pattern'. + type_var: The type parameter, e.g. AnyStr, or the + specific type, e.g. str. + impl_type: The implementation type. + type_checker: Function that takes an impl_type instance. + and returns a value that should be a type_var instance. + """ + assert isinstance(name, str), repr(name) + assert isinstance(impl_type, type), repr(impl_type) + assert not isinstance(impl_type, TypingMeta), repr(impl_type) + assert isinstance(type_var, (type, _TypingBase)), repr(type_var) + self.name = name + self.type_var = type_var + self.impl_type = impl_type + self.type_checker = type_checker + + def __repr__(self): + return "%s[%s]" % (self.name, _type_repr(self.type_var)) + + def __getitem__(self, parameter): + if not isinstance(self.type_var, TypeVar): + raise TypeError("%s cannot be further parameterized." % self) + if self.type_var.__constraints__ and isinstance(parameter, type): + if not issubclass(parameter, self.type_var.__constraints__): + raise TypeError("%s is not a valid substitution for %s." % + (parameter, self.type_var)) + if isinstance(parameter, TypeVar) and parameter is not self.type_var: + raise TypeError("%s cannot be re-parameterized." % self) + return self.__class__(self.name, parameter, + self.impl_type, self.type_checker) + + def __eq__(self, other): + if not isinstance(other, _TypeAlias): + return NotImplemented + return self.name == other.name and self.type_var == other.type_var + + def __hash__(self): + return hash((self.name, self.type_var)) + + def __instancecheck__(self, obj): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with isinstance().") + return isinstance(obj, self.impl_type) + + def __subclasscheck__(self, cls): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with issubclass().") + return issubclass(cls, self.impl_type) + + +def _get_type_vars(types, tvars): + for t in types: + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + t._get_type_vars(tvars) + + +def _type_vars(types): + tvars = [] + _get_type_vars(types, tvars) + return tuple(tvars) + + +def _eval_type(t, globalns, localns): + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + return t._eval_type(globalns, localns) + return t + + +def _type_check(arg, msg): + """Check that the argument is a type, and return it (internal helper). + + As a special case, accept None and return type(None) instead. + Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. + + The msg argument is a human-readable error message, e.g. + + "Union[arg, ...]: arg should be a type." + + We append the repr() of the actual value (truncated to 100 chars). + """ + if arg is None: + return type(None) + if isinstance(arg, str): + arg = _ForwardRef(arg) + if (isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or + not isinstance(arg, (type, _TypingBase)) and not callable(arg)): + raise TypeError(msg + " Got %.100r." % (arg,)) + # Bare Union etc. are not valid as type arguments + if (type(arg).__name__ in ('_Union', '_Optional') + and not getattr(arg, '__origin__', None) + or isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol)): + raise TypeError("Plain %s is not valid as type argument" % arg) + return arg + + +def _type_repr(obj): + """Return the repr() of an object, special-casing types (internal helper). + + If obj is a type, we return a shorter version than the default + type.__repr__, based on the module and qualified name, which is + typically enough to uniquely identify a type. For everything + else, we fall back on repr(obj). + """ + if isinstance(obj, type) and not isinstance(obj, TypingMeta): + if obj.__module__ == 'builtins': + return _qualname(obj) + return '%s.%s' % (obj.__module__, _qualname(obj)) + if obj is ...: + return('...') + if isinstance(obj, types.FunctionType): + return obj.__name__ + return repr(obj) + + +class _Any(_FinalTypingBase, _root=True): + """Special type indicating an unconstrained type. + + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + or class checks. + """ + + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("Any cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Any cannot be used with issubclass().") + + +Any = _Any(_root=True) + + +class TypeVar(_TypingBase, _root=True): + """Type variable. + + Usage:: + + T = TypeVar('T') # Can be anything + A = TypeVar('A', str, bytes) # Must be str or bytes + + Type variables exist primarily for the benefit of static type + checkers. They serve as the parameters for generic types as well + as for generic function definitions. See class Generic for more + information on generic types. Generic functions work as follows: + + def repeat(x: T, n: int) -> List[T]: + '''Return a list containing n references to x.''' + return [x]*n + + def longest(x: A, y: A) -> A: + '''Return the longest of two strings.''' + return x if len(x) >= len(y) else y + + The latter example's signature is essentially the overloading + of (str, str) -> str and (bytes, bytes) -> bytes. Also note + that if the arguments are instances of some subclass of str, + the return type is still plain str. + + At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. + + Type variables defined with covariant=True or contravariant=True + can be used do declare covariant or contravariant generic types. + See PEP 484 for more details. By default generic types are invariant + in all type variables. + + Type variables can be introspected. e.g.: + + T.__name__ == 'T' + T.__constraints__ == () + T.__covariant__ == False + T.__contravariant__ = False + A.__constraints__ == (str, bytes) + """ + + __slots__ = ('__name__', '__bound__', '__constraints__', + '__covariant__', '__contravariant__') + + def __init__(self, name, *constraints, bound=None, + covariant=False, contravariant=False): + super().__init__(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant) + self.__name__ = name + if covariant and contravariant: + raise ValueError("Bivariant types are not supported.") + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if constraints and bound is not None: + raise TypeError("Constraints cannot be combined with bound=...") + if constraints and len(constraints) == 1: + raise TypeError("A single constraint is not allowed") + msg = "TypeVar(name, constraint, ...): constraints must be types." + self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) + if bound: + self.__bound__ = _type_check(bound, "Bound must be a type.") + else: + self.__bound__ = None + + def _get_type_vars(self, tvars): + if self not in tvars: + tvars.append(self) + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __instancecheck__(self, instance): + raise TypeError("Type variables cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Type variables cannot be used with issubclass().") + + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = TypeVar('T') # Any type. +KT = TypeVar('KT') # Key type. +VT = TypeVar('VT') # Value type. +T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. +V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. +VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. +T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +# A useful type variable with constraints. This represents string types. +# (This one *is* for export!) +AnyStr = TypeVar('AnyStr', bytes, str) + + +def _replace_arg(arg, tvars, args): + """An internal helper function: replace arg if it is a type variable + found in tvars with corresponding substitution from args or + with corresponding substitution sub-tree if arg is a generic type. + """ + + if tvars is None: + tvars = [] + if hasattr(arg, '_subs_tree'): + return arg._subs_tree(tvars, args) + if isinstance(arg, TypeVar): + for i, tvar in enumerate(tvars): + if arg == tvar: + return args[i] + return arg + + +def _subs_tree(cls, tvars=None, args=None): + """An internal helper function: calculate substitution tree + for generic cls after replacing its type parameters with + substitutions in tvars -> args (if any). + Repeat the same following __origin__'s. + + Return a list of arguments with all possible substitutions + performed. Arguments that are generic classes themselves are represented + as tuples (so that no new classes are created by this function). + For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] + """ + + if cls.__origin__ is None: + return cls + # Make of chain of origins (i.e. cls -> cls.__origin__) + current = cls.__origin__ + orig_chain = [] + while current.__origin__ is not None: + orig_chain.append(current) + current = current.__origin__ + # Replace type variables in __args__ if asked ... + tree_args = [] + for arg in cls.__args__: + tree_args.append(_replace_arg(arg, tvars, args)) + # ... then continue replacing down the origin chain. + for ocls in orig_chain: + new_tree_args = [] + for i, arg in enumerate(ocls.__args__): + new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) + tree_args = new_tree_args + return tree_args + + +def _remove_dups_flatten(parameters): + """An internal helper for Union creation and substitution: flatten Union's + among parameters, then remove duplicates and strict subclasses. + """ + + # Flatten out Union[Union[...], ...]. + params = [] + for p in parameters: + if isinstance(p, _Union) and p.__origin__ is Union: + params.extend(p.__args__) + elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: + params.extend(p[1:]) + else: + params.append(p) + # Weed out strict duplicates, preserving the first of each occurrence. + all_params = set(params) + if len(all_params) < len(params): + new_params = [] + for t in params: + if t in all_params: + new_params.append(t) + all_params.remove(t) + params = new_params + assert not all_params, all_params + # Weed out subclasses. + # E.g. Union[int, Employee, Manager] == Union[int, Employee]. + # If object is present it will be sole survivor among proper classes. + # Never discard type variables. + # (In particular, Union[str, AnyStr] != AnyStr.) + all_params = set(params) + for t1 in params: + if not isinstance(t1, type): + continue + if any(isinstance(t2, type) and issubclass(t1, t2) + for t2 in all_params - {t1} + if not (isinstance(t2, GenericMeta) and + t2.__origin__ is not None)): + all_params.remove(t1) + return tuple(t for t in params if t in all_params) + + +def _check_generic(cls, parameters): + # Check correct count for parameters of a generic cls (internal helper). + if not cls.__parameters__: + raise TypeError("%s is not a generic class" % repr(cls)) + alen = len(parameters) + elen = len(cls.__parameters__) + if alen != elen: + raise TypeError("Too %s parameters for %s; actual %s, expected %s" % + ("many" if alen > elen else "few", repr(cls), alen, elen)) + + +_cleanups = [] + + +def _tp_cache(func): + """Internal wrapper caching __getitem__ of generic types with a fallback to + original function for non-hashable arguments. + """ + + cached = functools.lru_cache()(func) + _cleanups.append(cached.cache_clear) + @functools.wraps(func) + def inner(*args, **kwds): + try: + return cached(*args, **kwds) + except TypeError: + pass # All real errors (not unhashable args) are raised below. + return func(*args, **kwds) + return inner + + +class _Union(_FinalTypingBase, _root=True): + """Union type; Union[X, Y] means either X or Y. + + To define a union, use e.g. Union[int, str]. Details: + + - The arguments must be types and there must be at least one. + + - None as an argument is a special case and is replaced by + type(None). + + - Unions of unions are flattened, e.g.:: + + Union[Union[int, str], float] == Union[int, str, float] + + - Unions of a single argument vanish, e.g.:: + + Union[int] == int # The constructor actually returns int + + - Redundant arguments are skipped, e.g.:: + + Union[int, str, int] == Union[int, str] + + - When comparing unions, the argument order is ignored, e.g.:: + + Union[int, str] == Union[str, int] + + - When two arguments have a subclass relationship, the least + derived argument is kept, e.g.:: + + class Employee: pass + class Manager(Employee): pass + Union[int, Employee, Manager] == Union[int, Employee] + Union[Manager, int, Employee] == Union[int, Employee] + Union[Employee, Manager] == Employee + + - Similar for object:: + + Union[int, object] == object + + - You cannot subclass or instantiate a union. + + - You can use Optional[X] as a shorthand for Union[X, None]. + """ + + __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') + + def __new__(cls, parameters=None, origin=None, *args, _root=False): + self = super().__new__(cls, parameters, origin, *args, _root=_root) + if origin is None: + self.__parameters__ = None + self.__args__ = None + self.__origin__ = None + self.__tree_hash__ = hash(frozenset(('Union',))) + return self + if not isinstance(parameters, tuple): + raise TypeError("Expected parameters=") + if origin is Union: + parameters = _remove_dups_flatten(parameters) + # It's not a union if there's only one type left. + if len(parameters) == 1: + return parameters[0] + self.__parameters__ = _type_vars(parameters) + self.__args__ = parameters + self.__origin__ = origin + # Pre-calculate the __hash__ on instantiation. + # This improves speed for complex substitutions. + subs_tree = self._subs_tree() + if isinstance(subs_tree, tuple): + self.__tree_hash__ = hash(frozenset(subs_tree)) + else: + self.__tree_hash__ = hash(subs_tree) + return self + + def _eval_type(self, globalns, localns): + if self.__args__ is None: + return self + ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) + ev_origin = _eval_type(self.__origin__, globalns, localns) + if ev_args == self.__args__ and ev_origin == self.__origin__: + # Everything is already evaluated. + return self + return self.__class__(ev_args, ev_origin, _root=True) + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + tree = self._subs_tree() + if not isinstance(tree, tuple): + return repr(tree) + return tree[0]._tree_repr(tree) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super().__repr__() + '[%s]' % ', '.join(arg_list) + + @_tp_cache + def __getitem__(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Union of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if self.__origin__ is None: + msg = "Union[arg, ...]: each arg must be a type." + else: + msg = "Parameters to generic types must be types." + parameters = tuple(_type_check(p, msg) for p in parameters) + if self is not Union: + _check_generic(self, parameters) + return self.__class__(parameters, origin=self, _root=True) + + def _subs_tree(self, tvars=None, args=None): + if self is Union: + return Union # Nothing to substitute + tree_args = _subs_tree(self, tvars, args) + tree_args = _remove_dups_flatten(tree_args) + if len(tree_args) == 1: + return tree_args[0] # Union of a single type is that type + return (Union,) + tree_args + + def __eq__(self, other): + if not isinstance(other, _Union): + return self._subs_tree() == other + return self.__tree_hash__ == other.__tree_hash__ + + def __hash__(self): + return self.__tree_hash__ + + def __instancecheck__(self, obj): + raise TypeError("Unions cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Unions cannot be used with issubclass().") + + +Union = _Union(_root=True) + + +class _Optional(_FinalTypingBase, _root=True): + """Optional type. + + Optional[X] is equivalent to Union[X, None]. + """ + + __slots__ = () + + @_tp_cache + def __getitem__(self, arg): + arg = _type_check(arg, "Optional[t] requires a single type.") + return Union[arg, type(None)] + + +Optional = _Optional(_root=True) + + +def _gorg(a): + """Return the farthest origin of a generic class (internal helper).""" + assert isinstance(a, GenericMeta) + while a.__origin__ is not None: + a = a.__origin__ + return a + + +def _geqv(a, b): + """Return whether two generic classes are equivalent (internal helper). + + The intention is to consider generic class X and any of its + parameterized forms (X[T], X[int], etc.) as equivalent. + + However, X is not equivalent to a subclass of X. + + The relation is reflexive, symmetric and transitive. + """ + assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) + # Reduce each to its origin. + return _gorg(a) is _gorg(b) + + +def _next_in_mro(cls): + """Helper for Generic.__new__. + + Returns the class after the last occurrence of Generic or + Generic[...] in cls.__mro__. + """ + next_in_mro = object + # Look for the last occurrence of Generic or Generic[...]. + for i, c in enumerate(cls.__mro__[:-1]): + if isinstance(c, GenericMeta) and _gorg(c) is Generic: + next_in_mro = cls.__mro__[i+1] + return next_in_mro + + +def _valid_for_check(cls): + """An internal helper to prohibit isinstance([1], List[str]) etc.""" + if cls is Generic: + raise TypeError("Class %r cannot be used with class " + "or instance checks" % cls) + if (cls.__origin__ is not None and + sys._getframe(3).f_globals['__name__'] not in ['abc', 'functools']): + raise TypeError("Parameterized generics cannot be used with class " + "or instance checks") + + +def _make_subclasshook(cls): + """Construct a __subclasshook__ callable that incorporates + the associated __extra__ class in subclass checks performed + against cls. + """ + if isinstance(cls.__extra__, abc.ABCMeta): + # The logic mirrors that of ABCMeta.__subclasscheck__. + # Registered classes need not be checked here because + # cls and its extra share the same _abc_registry. + def __extrahook__(subclass): + _valid_for_check(cls) + res = cls.__extra__.__subclasshook__(subclass) + if res is not NotImplemented: + return res + if cls.__extra__ in subclass.__mro__: + return True + for scls in cls.__extra__.__subclasses__(): + if isinstance(scls, GenericMeta): + continue + if issubclass(subclass, scls): + return True + return NotImplemented + else: + # For non-ABC extras we'll just call issubclass(). + def __extrahook__(subclass): + _valid_for_check(cls) + if cls.__extra__ and issubclass(subclass, cls.__extra__): + return True + return NotImplemented + return __extrahook__ + + +def _no_slots_copy(dct): + """Internal helper: copy class __dict__ and clean slots class variables. + (They will be re-created if necessary by normal class machinery.) + """ + dict_copy = dict(dct) + if '__slots__' in dict_copy: + for slot in dict_copy['__slots__']: + dict_copy.pop(slot, None) + return dict_copy + + +class GenericMeta(TypingMeta, abc.ABCMeta): + """Metaclass for generic types.""" + + def __new__(cls, name, bases, namespace, + tvars=None, args=None, origin=None, extra=None, orig_bases=None): + if tvars is not None: + # Called from __getitem__() below. + assert origin is not None + assert all(isinstance(t, TypeVar) for t in tvars), tvars + else: + # Called from class statement. + assert tvars is None, tvars + assert args is None, args + assert origin is None, origin + + # Get the full set of tvars from the bases. + tvars = _type_vars(bases) + # Look for Generic[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...]. + gvars = None + for base in bases: + if base is Generic: + raise TypeError("Cannot inherit from plain Generic") + if (isinstance(base, GenericMeta) and + base.__origin__ is Generic): + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + raise TypeError( + "Some type variables (%s) " + "are not listed in Generic[%s]" % + (", ".join(str(t) for t in tvars if t not in gvarset), + ", ".join(str(g) for g in gvars))) + tvars = gvars + + initial_bases = bases + if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: + bases = (extra,) + bases + bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) + + # remove bare Generic from bases if there are other generic bases + if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): + bases = tuple(b for b in bases if b is not Generic) + self = super().__new__(cls, name, bases, namespace, _root=True) + + self.__parameters__ = tvars + # Be prepared that GenericMeta will be subclassed by TupleMeta + # and CallableMeta, those two allow ..., (), or [] in __args___. + self.__args__ = tuple(... if a is _TypingEllipsis else + () if a is _TypingEmpty else + a for a in args) if args else None + self.__origin__ = origin + self.__extra__ = extra + # Speed hack (https://github.com/python/typing/issues/196). + self.__next_in_mro__ = _next_in_mro(self) + # Preserve base classes on subclassing (__bases__ are type erased now). + if orig_bases is None: + self.__orig_bases__ = initial_bases + + # This allows unparameterized generic collections to be used + # with issubclass() and isinstance() in the same way as their + # collections.abc counterparts (e.g., isinstance([], Iterable)). + if ('__subclasshook__' not in namespace and extra # allow overriding + or hasattr(self.__subclasshook__, '__name__') and + self.__subclasshook__.__name__ == '__extrahook__'): + self.__subclasshook__ = _make_subclasshook(self) + if isinstance(extra, abc.ABCMeta): + self._abc_registry = extra._abc_registry + + if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. + self.__qualname__ = origin.__qualname__ + self.__tree_hash__ = hash(self._subs_tree()) if origin else hash((self.__name__,)) + return self + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def _eval_type(self, globalns, localns): + ev_origin = (self.__origin__._eval_type(globalns, localns) + if self.__origin__ else None) + ev_args = tuple(_eval_type(a, globalns, localns) for a + in self.__args__) if self.__args__ else None + if ev_origin == self.__origin__ and ev_args == self.__args__: + return self + return self.__class__(self.__name__, + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=_type_vars(ev_args) if ev_args else None, + args=ev_args, + origin=ev_origin, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if arg == (): + arg_list.append('()') + elif not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super().__repr__() + '[%s]' % ', '.join(arg_list) + + def _subs_tree(self, tvars=None, args=None): + if self.__origin__ is None: + return self + tree_args = _subs_tree(self, tvars, args) + return (_gorg(self),) + tuple(tree_args) + + def __eq__(self, other): + if not isinstance(other, GenericMeta): + return NotImplemented + if self.__origin__ is None or other.__origin__ is None: + return self is other + return self.__tree_hash__ == other.__tree_hash__ + + def __hash__(self): + return self.__tree_hash__ + + @_tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + if not params and not _gorg(self) is Tuple: + raise TypeError( + "Parameter list to %s[...] cannot be empty" % _qualname(self)) + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + if self is Generic: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, TypeVar) for p in params): + raise TypeError( + "Parameters to Generic[...] must all be type variables") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Generic[...] must all be unique") + tvars = params + args = params + elif self in (Tuple, Callable): + tvars = _type_vars(params) + args = params + elif self is _Protocol: + # _Protocol is internal, don't check anything. + tvars = params + args = params + elif self.__origin__ in (Generic, _Protocol): + # Can't subscript Generic[...] or _Protocol[...]. + raise TypeError("Cannot subscript already-subscripted %s" % + repr(self)) + else: + # Subscripting a regular Generic subclass. + _check_generic(self, params) + tvars = _type_vars(params) + args = params + return self.__class__(self.__name__, + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=tvars, + args=args, + origin=self, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __instancecheck__(self, instance): + # Since we extend ABC.__subclasscheck__ and + # ABC.__instancecheck__ inlines the cache checking done by the + # latter, we must extend __instancecheck__ too. For simplicity + # we just skip the cache check -- instance checks for generic + # classes are supposed to be rare anyways. + return issubclass(instance.__class__, self) + + def __copy__(self): + return self.__class__(self.__name__, self.__bases__, + _no_slots_copy(self.__dict__), + self.__parameters__, self.__args__, self.__origin__, + self.__extra__, self.__orig_bases__) + + +# Prevent checks for Generic to crash when defining Generic. +Generic = None + + +def _generic_new(base_cls, cls, *args, **kwds): + # Assure type is erased on instantiation, + # but attempt to store it in __orig_class__ + if cls.__origin__ is None: + return base_cls.__new__(cls) + else: + origin = _gorg(cls) + obj = base_cls.__new__(origin) + try: + obj.__orig_class__ = cls + except AttributeError: + pass + obj.__init__(*args, **kwds) + return obj + + +class Generic(metaclass=GenericMeta): + """Abstract base class for generic types. + + A generic type is typically declared by inheriting from + this class parameterized with one or more type variables. + For example, a generic mapping type might be defined as:: + + class Mapping(Generic[KT, VT]): + def __getitem__(self, key: KT) -> VT: + ... + # Etc. + + This class can then be used as follows:: + + def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: + try: + return mapping[key] + except KeyError: + return default + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Generic): + raise TypeError("Type Generic cannot be instantiated; " + "it can be used only as a base class") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +class _TypingEmpty: + """Internal placeholder for () or []. Used by TupleMeta and CallableMeta + to allow empty list/tuple in specific places, without allowing them + to sneak in where prohibited. + """ + + +class _TypingEllipsis: + """Internal placeholder for ... (ellipsis).""" + + +class TupleMeta(GenericMeta): + """Metaclass for Tuple (internal).""" + + @_tp_cache + def __getitem__(self, parameters): + if self.__origin__ is not None or not _geqv(self, Tuple): + # Normal generic rules apply if this is not the first subscription + # or a subscription of a subclass. + return super().__getitem__(parameters) + if parameters == (): + return super().__getitem__((_TypingEmpty,)) + if not isinstance(parameters, tuple): + parameters = (parameters,) + if len(parameters) == 2 and parameters[1] is ...: + msg = "Tuple[t, ...]: t must be a type." + p = _type_check(parameters[0], msg) + return super().__getitem__((p, _TypingEllipsis)) + msg = "Tuple[t0, t1, ...]: each t must be a type." + parameters = tuple(_type_check(p, msg) for p in parameters) + return super().__getitem__(parameters) + + def __instancecheck__(self, obj): + if self.__args__ == None: + return isinstance(obj, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with isinstance().") + + def __subclasscheck__(self, cls): + if self.__args__ == None: + return issubclass(cls, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with issubclass().") + + +class Tuple(tuple, extra=tuple, metaclass=TupleMeta): + """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. + + Example: Tuple[T1, T2] is a tuple of two elements corresponding + to type variables T1 and T2. Tuple[int, float, str] is a tuple + of an int, a float and a string. + + To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Tuple): + raise TypeError("Type Tuple cannot be instantiated; " + "use tuple() instead") + return _generic_new(tuple, cls, *args, **kwds) + + +class CallableMeta(GenericMeta): + """Metaclass for Callable (internal).""" + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + if _gorg(self) is not Callable: + return super()._tree_repr(tree) + # For actual Callable (not its subclass) we override + # super()._tree_repr() for nice formatting. + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + if arg_list[0] == '...': + return repr(tree[0]) + '[..., %s]' % arg_list[1] + return (repr(tree[0]) + + '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) + + def __getitem__(self, parameters): + """A thin wrapper around __getitem_inner__ to provide the latter + with hashable arguments to improve speed. + """ + + if self.__origin__ is not None or not _geqv(self, Callable): + return super().__getitem__(parameters) + if not isinstance(parameters, tuple) or len(parameters) != 2: + raise TypeError("Callable must be used as " + "Callable[[arg, ...], result].") + args, result = parameters + if args is Ellipsis: + parameters = (Ellipsis, result) + else: + if not isinstance(args, list): + raise TypeError("Callable[args, result]: args must be a list." + " Got %.100r." % (args,)) + parameters = (tuple(args), result) + return self.__getitem_inner__(parameters) + + @_tp_cache + def __getitem_inner__(self, parameters): + args, result = parameters + msg = "Callable[args, result]: result must be a type." + result = _type_check(result, msg) + if args is Ellipsis: + return super().__getitem__((_TypingEllipsis, result)) + msg = "Callable[[arg, ...], result]: each arg must be a type." + args = tuple(_type_check(arg, msg) for arg in args) + parameters = args + (result,) + return super().__getitem__(parameters) + + +class Callable(extra=collections_abc.Callable, metaclass = CallableMeta): + """Callable type; Callable[[int], str] is a function of (int) -> str. + + The subscription syntax must always be used with exactly two + values: the argument list and the return type. The argument list + must be a list of types or ellipsis; the return type must be a single type. + + There is no syntax to indicate optional or keyword arguments, + such function types are rarely used as callback types. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Callable): + raise TypeError("Type Callable cannot be instantiated; " + "use a non-abstract subclass instead") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +class _ClassVar(_FinalTypingBase, _root=True): + """Special type construct to mark class variables. + + An annotation wrapped in ClassVar indicates that a given + attribute is intended to be used as a class variable and + should not be set on instances of that class. Usage:: + + class Starship: + stats: ClassVar[Dict[str, int]] = {} # class variable + damage: int = 10 # instance variable + + ClassVar accepts only types and cannot be further subscribed. + + Note that ClassVar is not a class itself, and should not + be used with isinstance() or issubclass(). + """ + + __slots__ = ('__type__',) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls(_type_check(item, + '{} accepts only single type.'.format(cls.__name__[1:])), + _root=True) + raise TypeError('{} cannot be further subscripted' + .format(cls.__name__[1:])) + + def _eval_type(self, globalns, localns): + new_tp = _eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += '[{}]'.format(_type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _ClassVar): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + +ClassVar = _ClassVar(_root=True) + + +def cast(typ, val): + """Cast a value to a type. + + This returns the value unchanged. To the type checker this + signals that the return value has the designated type, but at + runtime we intentionally don't check anything (we want this + to be as fast as possible). + """ + return val + + +def _get_defaults(func): + """Internal helper to extract the default arguments, by name.""" + try: + code = func.__code__ + except AttributeError: + # Some built-in functions don't have __code__, __defaults__, etc. + return {} + pos_count = code.co_argcount + arg_names = code.co_varnames + arg_names = arg_names[:pos_count] + defaults = func.__defaults__ or () + kwdefaults = func.__kwdefaults__ + res = dict(kwdefaults) if kwdefaults else {} + pos_offset = pos_count - len(defaults) + for name, value in zip(arg_names[pos_offset:], defaults): + assert name not in res + res[name] = value + return res + + +def get_type_hints(obj, globalns=None, localns=None): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, and if necessary + adds Optional[t] if a default value equal to None is set. + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj, and these are also used as the locals. If the + object does not appear to have globals, an exception is raised. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + + if getattr(obj, '__no_type_check__', None): + return {} + if globalns is None: + globalns = getattr(obj, '__globals__', {}) + if localns is None: + localns = globalns + elif localns is None: + localns = globalns + # Classes require a special treatment. + if isinstance(obj, type): + hints = {} + for base in reversed(obj.__mro__): + ann = base.__dict__.get('__annotations__', {}) + for name, value in ann.items(): + if value is None: + value = type(None) + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, globalns, localns) + hints[name] = value + return hints + hints = getattr(obj, '__annotations__', None) + if hints is None: + # Return empty annotations for something that _could_ have them. + if (isinstance(obj, types.FunctionType) or + isinstance(obj, types.BuiltinFunctionType) or + isinstance(obj, types.MethodType) or + isinstance(obj, types.ModuleType)): + return {} + else: + raise TypeError('{!r} is not a module, class, method, ' + 'or function.'.format(obj)) + defaults = _get_defaults(obj) + hints = dict(hints) + for name, value in hints.items(): + if value is None: + value = type(None) + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, globalns, localns) + if name in defaults and defaults[name] is None: + value = Optional[value] + hints[name] = value + return hints + + +def no_type_check(arg): + """Decorator to indicate that annotations are not type hints. + + The argument must be a class or function; if it is a class, it + applies recursively to all methods and classes defined in that class + (but not to methods defined in its superclasses or subclasses). + + This mutates the function(s) or class(es) in place. + """ + if isinstance(arg, type): + arg_attrs = arg.__dict__.copy() + for attr, val in arg.__dict__.items(): + if val in arg.__bases__: + arg_attrs.pop(attr) + for obj in arg_attrs.values(): + if isinstance(obj, types.FunctionType): + obj.__no_type_check__ = True + if isinstance(obj, type): + no_type_check(obj) + try: + arg.__no_type_check__ = True + except TypeError: # built-in classes + pass + return arg + + +def no_type_check_decorator(decorator): + """Decorator to give another decorator the @no_type_check effect. + + This wraps the decorator with something that wraps the decorated + function in @no_type_check. + """ + + @functools.wraps(decorator) + def wrapped_decorator(*args, **kwds): + func = decorator(*args, **kwds) + func = no_type_check(func) + return func + + return wrapped_decorator + + +def _overload_dummy(*args, **kwds): + """Helper for @overload to raise when called.""" + raise NotImplementedError( + "You should not call an overloaded function. " + "A series of @overload-decorated functions " + "outside a stub module should always be followed " + "by an implementation that is not @overload-ed.") + + +def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + """ + return _overload_dummy + + +class _ProtocolMeta(GenericMeta): + """Internal metaclass for _Protocol. + + This exists so _Protocol classes can be generic without deriving + from Generic. + """ + + def __instancecheck__(self, obj): + if _Protocol not in self.__bases__: + return super().__instancecheck__(obj) + raise TypeError("Protocols cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not self._is_protocol: + # No structural checks since this isn't a protocol. + return NotImplemented + + if self is _Protocol: + # Every class is a subclass of the empty protocol. + return True + + # Find all attributes defined in the protocol. + attrs = self._get_protocol_attrs() + + for attr in attrs: + if not any(attr in d.__dict__ for d in cls.__mro__): + return False + return True + + def _get_protocol_attrs(self): + # Get all Protocol base classes. + protocol_bases = [] + for c in self.__mro__: + if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': + protocol_bases.append(c) + + # Get attributes included in protocol. + attrs = set() + for base in protocol_bases: + for attr in base.__dict__.keys(): + # Include attributes not defined in any non-protocol bases. + for c in self.__mro__: + if (c is not base and attr in c.__dict__ and + not getattr(c, '_is_protocol', False)): + break + else: + if (not attr.startswith('_abc_') and + attr != '__abstractmethods__' and + attr != '__annotations__' and + attr != '__weakref__' and + attr != '_is_protocol' and + attr != '__dict__' and + attr != '__args__' and + attr != '__slots__' and + attr != '_get_protocol_attrs' and + attr != '__next_in_mro__' and + attr != '__parameters__' and + attr != '__origin__' and + attr != '__orig_bases__' and + attr != '__extra__' and + attr != '__tree_hash__' and + attr != '__module__'): + attrs.add(attr) + + return attrs + + +class _Protocol(metaclass=_ProtocolMeta): + """Internal base class for protocol classes. + + This implements a simple-minded structural issubclass check + (similar but more general than the one-offs in collections.abc + such as Hashable). + """ + + __slots__ = () + + _is_protocol = True + + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + +Hashable = collections_abc.Hashable # Not generic. + + +if hasattr(collections_abc, 'Awaitable'): + class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): + __slots__ = () + + __all__.append('Awaitable') + + +if hasattr(collections_abc, 'Coroutine'): + class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co], + extra=collections_abc.Coroutine): + __slots__ = () + + __all__.append('Coroutine') + + +if hasattr(collections_abc, 'AsyncIterable'): + + class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): + __slots__ = () + + class AsyncIterator(AsyncIterable[T_co], + extra=collections_abc.AsyncIterator): + __slots__ = () + + __all__.append('AsyncIterable') + __all__.append('AsyncIterator') + + +class Iterable(Generic[T_co], extra=collections_abc.Iterable): + __slots__ = () + + +class Iterator(Iterable[T_co], extra=collections_abc.Iterator): + __slots__ = () + + +class SupportsInt(_Protocol): + __slots__ = () + + @abstractmethod + def __int__(self) -> int: + pass + + +class SupportsFloat(_Protocol): + __slots__ = () + + @abstractmethod + def __float__(self) -> float: + pass + + +class SupportsComplex(_Protocol): + __slots__ = () + + @abstractmethod + def __complex__(self) -> complex: + pass + + +class SupportsBytes(_Protocol): + __slots__ = () + + @abstractmethod + def __bytes__(self) -> bytes: + pass + + +class SupportsAbs(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __abs__(self) -> T_co: + pass + + +class SupportsRound(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +if hasattr(collections_abc, 'Reversible'): + class Reversible(Iterable[T_co], extra=collections_abc.Reversible): + __slots__ = () +else: + class Reversible(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __reversed__(self) -> 'Iterator[T_co]': + pass + + +Sized = collections_abc.Sized # Not generic. + + +class Container(Generic[T_co], extra=collections_abc.Container): + __slots__ = () + + +if hasattr(collections_abc, 'Collection'): + class Collection(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Collection): + __slots__ = () + + __all__.append('Collection') + + +# Callable was defined earlier. + +if hasattr(collections_abc, 'Collection'): + class AbstractSet(Collection[T_co], + extra=collections_abc.Set): + __slots__ = () +else: + class AbstractSet(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Set): + __slots__ = () + + +class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): + __slots__ = () + + +# NOTE: It is only covariant in the value type. +if hasattr(collections_abc, 'Collection'): + class Mapping(Collection[KT], Generic[KT, VT_co], + extra=collections_abc.Mapping): + __slots__ = () +else: + class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], + extra=collections_abc.Mapping): + __slots__ = () + + +class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): + __slots__ = () + +if hasattr(collections_abc, 'Reversible'): + if hasattr(collections_abc, 'Collection'): + class Sequence(Reversible[T_co], Collection[T_co], + extra=collections_abc.Sequence): + __slots__ = () + else: + class Sequence(Sized, Reversible[T_co], Container[T_co], + extra=collections_abc.Sequence): + __slots__ = () +else: + class Sequence(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Sequence): + __slots__ = () + + +class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): + __slots__ = () + + +class ByteString(Sequence[int], extra=collections_abc.ByteString): + __slots__ = () + + +class List(list, MutableSequence[T], extra=list): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, List): + raise TypeError("Type List cannot be instantiated; " + "use list() instead") + return _generic_new(list, cls, *args, **kwds) + + +class Set(set, MutableSet[T], extra=set): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Set): + raise TypeError("Type Set cannot be instantiated; " + "use set() instead") + return _generic_new(set, cls, *args, **kwds) + + +class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, FrozenSet): + raise TypeError("Type FrozenSet cannot be instantiated; " + "use frozenset() instead") + return _generic_new(frozenset, cls, *args, **kwds) + + +class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): + __slots__ = () + + +class KeysView(MappingView[KT], AbstractSet[KT], + extra=collections_abc.KeysView): + __slots__ = () + + +class ItemsView(MappingView[Tuple[KT, VT_co]], + AbstractSet[Tuple[KT, VT_co]], + Generic[KT, VT_co], + extra=collections_abc.ItemsView): + __slots__ = () + + +class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): + __slots__ = () + + +if hasattr(contextlib, 'AbstractContextManager'): + class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): + __slots__ = () + __all__.append('ContextManager') + + +class Dict(dict, MutableMapping[KT, VT], extra=dict): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Dict): + raise TypeError("Type Dict cannot be instantiated; " + "use dict() instead") + return _generic_new(dict, cls, *args, **kwds) + +class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], + extra=collections.defaultdict): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, DefaultDict): + raise TypeError("Type DefaultDict cannot be instantiated; " + "use collections.defaultdict() instead") + return _generic_new(collections.defaultdict, cls, *args, **kwds) + +# Determine what base class to use for Generator. +if hasattr(collections_abc, 'Generator'): + # Sufficiently recent versions of 3.5 have a Generator ABC. + _G_base = collections_abc.Generator +else: + # Fall back on the exact type. + _G_base = types.GeneratorType + + +class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], + extra=_G_base): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Generator): + raise TypeError("Type Generator cannot be instantiated; " + "create a subclass instead") + return _generic_new(_G_base, cls, *args, **kwds) + + +# Internal type variable used for Type[]. +CT_co = TypeVar('CT_co', covariant=True, bound=type) + + +# This is not a real generic class. Don't use outside annotations. +class Type(Generic[CT_co], extra=type): + """A special construct usable to annotate class objects. + + For example, suppose we have the following classes:: + + class User: ... # Abstract base for User classes + class BasicUser(User): ... + class ProUser(User): ... + class TeamUser(User): ... + + And a function that takes a class argument that's a subclass of + User and returns an instance of the corresponding class:: + + U = TypeVar('U', bound=User) + def new_user(user_class: Type[U]) -> U: + user = user_class() + # (Here we could write the user object to a database) + return user + + joe = new_user(BasicUser) + + At this point the type checker knows that joe has type BasicUser. + """ + + __slots__ = () + + +def _make_nmtuple(name, types): + msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" + types = [(n, _type_check(t, msg)) for n, t in types] + nm_tpl = collections.namedtuple(name, [n for n, t in types]) + nm_tpl._field_types = dict(types) + try: + nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + return nm_tpl + + +_PY36 = sys.version_info[:2] >= (3, 6) + + +class NamedTupleMeta(type): + + def __new__(cls, typename, bases, ns): + if ns.get('_root', False): + return super().__new__(cls, typename, bases, ns) + if not _PY36: + raise TypeError("Class syntax for NamedTuple is only supported" + " in Python 3.6+") + types = ns.get('__annotations__', {}) + return _make_nmtuple(typename, types.items()) + +class NamedTuple(metaclass=NamedTupleMeta): + """Typed version of namedtuple. + + Usage in Python versions >= 3.6:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has one extra attribute: _field_types, + giving a dict mapping field names to types. (The field names + are in the _fields attribute, which is part of the namedtuple + API.) Alternative equivalent keyword syntax is also accepted:: + + Employee = NamedTuple('Employee', name=str, id=int) + + In Python versions <= 3.5 use:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ + _root = True + + def __new__(self, typename, fields=None, **kwargs): + if kwargs and not _PY36: + raise TypeError("Keyword syntax for NamedTuple is only supported" + " in Python 3.6+") + if fields is None: + fields = kwargs.items() + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + return _make_nmtuple(typename, fields) + + +def NewType(name, tp): + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy function that simply returns its argument. Usage:: + + UserId = NewType('UserId', int) + + def name_by_id(user_id: UserId) -> str: + ... + + UserId('user') # Fails type check + + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + + num = UserId(5) + 1 # type: int + """ + + def new_type(x): + return x + + new_type.__name__ = name + new_type.__supertype__ = tp + return new_type + + +# Python-version-specific alias (Python 2: unicode; Python 3: str) +Text = str + + +# Constant that's True when type checking, but False here. +TYPE_CHECKING = False + + +class IO(Generic[AnyStr]): + """Generic base class for TextIO and BinaryIO. + + This is an abstract, generic version of the return of open(). + + NOTE: This does not distinguish between the different possible + classes (text vs. binary, read vs. write vs. read/write, + append-only, unbuffered). The TextIO and BinaryIO subclasses + below capture the distinctions between text vs. binary, which is + pervasive in the interface; however we currently do not offer a + way to track the other distinctions in the type system. + """ + + __slots__ = () + + @abstractproperty + def mode(self) -> str: + pass + + @abstractproperty + def name(self) -> str: + pass + + @abstractmethod + def close(self) -> None: + pass + + @abstractmethod + def closed(self) -> bool: + pass + + @abstractmethod + def fileno(self) -> int: + pass + + @abstractmethod + def flush(self) -> None: + pass + + @abstractmethod + def isatty(self) -> bool: + pass + + @abstractmethod + def read(self, n: int = -1) -> AnyStr: + pass + + @abstractmethod + def readable(self) -> bool: + pass + + @abstractmethod + def readline(self, limit: int = -1) -> AnyStr: + pass + + @abstractmethod + def readlines(self, hint: int = -1) -> List[AnyStr]: + pass + + @abstractmethod + def seek(self, offset: int, whence: int = 0) -> int: + pass + + @abstractmethod + def seekable(self) -> bool: + pass + + @abstractmethod + def tell(self) -> int: + pass + + @abstractmethod + def truncate(self, size: int = None) -> int: + pass + + @abstractmethod + def writable(self) -> bool: + pass + + @abstractmethod + def write(self, s: AnyStr) -> int: + pass + + @abstractmethod + def writelines(self, lines: List[AnyStr]) -> None: + pass + + @abstractmethod + def __enter__(self) -> 'IO[AnyStr]': + pass + + @abstractmethod + def __exit__(self, type, value, traceback) -> None: + pass + + +class BinaryIO(IO[bytes]): + """Typed version of the return of open() in binary mode.""" + + __slots__ = () + + @abstractmethod + def write(self, s: Union[bytes, bytearray]) -> int: + pass + + @abstractmethod + def __enter__(self) -> 'BinaryIO': + pass + + +class TextIO(IO[str]): + """Typed version of the return of open() in text mode.""" + + __slots__ = () + + @abstractproperty + def buffer(self) -> BinaryIO: + pass + + @abstractproperty + def encoding(self) -> str: + pass + + @abstractproperty + def errors(self) -> Optional[str]: + pass + + @abstractproperty + def line_buffering(self) -> bool: + pass + + @abstractproperty + def newlines(self) -> Any: + pass + + @abstractmethod + def __enter__(self) -> 'TextIO': + pass + + +class io: + """Wrapper namespace for IO generic classes.""" + + __all__ = ['IO', 'TextIO', 'BinaryIO'] + IO = IO + TextIO = TextIO + BinaryIO = BinaryIO + +io.__name__ = __name__ + '.io' +sys.modules[io.__name__] = io + + +Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), + lambda p: p.pattern) +Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), + lambda m: m.re.pattern) + + +class re: + """Wrapper namespace for re type aliases.""" + + __all__ = ['Pattern', 'Match'] + Pattern = Pattern + Match = Match + +re.__name__ = __name__ + '.re' +sys.modules[re.__name__] = re diff --git a/typing_extensions/test_data/python-3.6.0/_collections_abc.py b/typing_extensions/test_data/python-3.6.0/_collections_abc.py new file mode 100644 index 00000000..b172f3f3 --- /dev/null +++ b/typing_extensions/test_data/python-3.6.0/_collections_abc.py @@ -0,0 +1,1007 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. + +Unit tests are in test_collections. +""" + +from abc import ABCMeta, abstractmethod +import sys + +__all__ = ["Awaitable", "Coroutine", + "AsyncIterable", "AsyncIterator", "AsyncGenerator", + "Hashable", "Iterable", "Iterator", "Generator", "Reversible", + "Sized", "Container", "Callable", "Collection", + "Set", "MutableSet", + "Mapping", "MutableMapping", + "MappingView", "KeysView", "ItemsView", "ValuesView", + "Sequence", "MutableSequence", + "ByteString", + ] + +# This module has been renamed from collections.abc to _collections_abc to +# speed up interpreter startup. Some of the types such as MutableMapping are +# required early but collections module imports a lot of other modules. +# See issue #19218 +__name__ = "collections.abc" + +# Private list of types that we want to register with the various ABCs +# so that they will pass tests like: +# it = iter(somebytearray) +# assert isinstance(it, Iterable) +# Note: in other implementations, these types might not be distinct +# and they may have their own implementation specific types that +# are not included on this list. +bytes_iterator = type(iter(b'')) +bytearray_iterator = type(iter(bytearray())) +#callable_iterator = ??? +dict_keyiterator = type(iter({}.keys())) +dict_valueiterator = type(iter({}.values())) +dict_itemiterator = type(iter({}.items())) +list_iterator = type(iter([])) +list_reverseiterator = type(iter(reversed([]))) +range_iterator = type(iter(range(0))) +longrange_iterator = type(iter(range(1 << 1000))) +set_iterator = type(iter(set())) +str_iterator = type(iter("")) +tuple_iterator = type(iter(())) +zip_iterator = type(iter(zip())) +## views ## +dict_keys = type({}.keys()) +dict_values = type({}.values()) +dict_items = type({}.items()) +## misc ## +mappingproxy = type(type.__dict__) +generator = type((lambda: (yield))()) +## coroutine ## +async def _coro(): pass +_coro = _coro() +coroutine = type(_coro) +_coro.close() # Prevent ResourceWarning +del _coro +## asynchronous generator ## +async def _ag(): yield +_ag = _ag() +async_generator = type(_ag) +del _ag + + +### ONE-TRICK PONIES ### + +def _check_methods(C, *methods): + mro = C.__mro__ + for method in methods: + for B in mro: + if method in B.__dict__: + if B.__dict__[method] is None: + return NotImplemented + break + else: + return NotImplemented + return True + +class Hashable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __hash__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Hashable: + return _check_methods(C, "__hash__") + return NotImplemented + + +class Awaitable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __await__(self): + yield + + @classmethod + def __subclasshook__(cls, C): + if cls is Awaitable: + return _check_methods(C, "__await__") + return NotImplemented + + +class Coroutine(Awaitable): + + __slots__ = () + + @abstractmethod + def send(self, value): + """Send a value into the coroutine. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the coroutine. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside coroutine. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("coroutine ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Coroutine: + return _check_methods(C, '__await__', 'send', 'throw', 'close') + return NotImplemented + + +Coroutine.register(coroutine) + + +class AsyncIterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __aiter__(self): + return AsyncIterator() + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterable: + return _check_methods(C, "__aiter__") + return NotImplemented + + +class AsyncIterator(AsyncIterable): + + __slots__ = () + + @abstractmethod + async def __anext__(self): + """Return the next item or raise StopAsyncIteration when exhausted.""" + raise StopAsyncIteration + + def __aiter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterator: + return _check_methods(C, "__anext__", "__aiter__") + return NotImplemented + + +class AsyncGenerator(AsyncIterator): + + __slots__ = () + + async def __anext__(self): + """Return the next item from the asynchronous generator. + When exhausted, raise StopAsyncIteration. + """ + return await self.asend(None) + + @abstractmethod + async def asend(self, value): + """Send a value into the asynchronous generator. + Return next yielded value or raise StopAsyncIteration. + """ + raise StopAsyncIteration + + @abstractmethod + async def athrow(self, typ, val=None, tb=None): + """Raise an exception in the asynchronous generator. + Return next yielded value or raise StopAsyncIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + async def aclose(self): + """Raise GeneratorExit inside coroutine. + """ + try: + await self.athrow(GeneratorExit) + except (GeneratorExit, StopAsyncIteration): + pass + else: + raise RuntimeError("asynchronous generator ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncGenerator: + return _check_methods(C, '__aiter__', '__anext__', + 'asend', 'athrow', 'aclose') + return NotImplemented + + +AsyncGenerator.register(async_generator) + + +class Iterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __iter__(self): + while False: + yield None + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterable: + return _check_methods(C, "__iter__") + return NotImplemented + + +class Iterator(Iterable): + + __slots__ = () + + @abstractmethod + def __next__(self): + 'Return the next item from the iterator. When exhausted, raise StopIteration' + raise StopIteration + + def __iter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterator: + return _check_methods(C, '__iter__', '__next__') + return NotImplemented + +Iterator.register(bytes_iterator) +Iterator.register(bytearray_iterator) +#Iterator.register(callable_iterator) +Iterator.register(dict_keyiterator) +Iterator.register(dict_valueiterator) +Iterator.register(dict_itemiterator) +Iterator.register(list_iterator) +Iterator.register(list_reverseiterator) +Iterator.register(range_iterator) +Iterator.register(longrange_iterator) +Iterator.register(set_iterator) +Iterator.register(str_iterator) +Iterator.register(tuple_iterator) +Iterator.register(zip_iterator) + + +class Reversible(Iterable): + + __slots__ = () + + @abstractmethod + def __reversed__(self): + while False: + yield None + + @classmethod + def __subclasshook__(cls, C): + if cls is Reversible: + return _check_methods(C, "__reversed__", "__iter__") + return NotImplemented + + +class Generator(Iterator): + + __slots__ = () + + def __next__(self): + """Return the next item from the generator. + When exhausted, raise StopIteration. + """ + return self.send(None) + + @abstractmethod + def send(self, value): + """Send a value into the generator. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the generator. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside generator. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("generator ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Generator: + return _check_methods(C, '__iter__', '__next__', + 'send', 'throw', 'close') + return NotImplemented + +Generator.register(generator) + + +class Sized(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __len__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Sized: + return _check_methods(C, "__len__") + return NotImplemented + + +class Container(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __contains__(self, x): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Container: + return _check_methods(C, "__contains__") + return NotImplemented + +class Collection(Sized, Iterable, Container): + + __slots__ = () + + @classmethod + def __subclasshook__(cls, C): + if cls is Collection: + return _check_methods(C, "__len__", "__iter__", "__contains__") + return NotImplemented + +class Callable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __call__(self, *args, **kwds): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Callable: + return _check_methods(C, "__call__") + return NotImplemented + + +### SETS ### + + +class Set(Collection): + + """A set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__ and __len__. + + To override the comparisons (presumably for speed, as the + semantics are fixed), redefine __le__ and __ge__, + then the other operations will automatically follow suit. + """ + + __slots__ = () + + def __le__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) > len(other): + return False + for elem in self: + if elem not in other: + return False + return True + + def __lt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) < len(other) and self.__le__(other) + + def __gt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) > len(other) and self.__ge__(other) + + def __ge__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) < len(other): + return False + for elem in other: + if elem not in self: + return False + return True + + def __eq__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) == len(other) and self.__le__(other) + + @classmethod + def _from_iterable(cls, it): + '''Construct an instance of the class from any iterable input. + + Must override this method if the class constructor signature + does not accept an iterable for an input. + ''' + return cls(it) + + def __and__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + return self._from_iterable(value for value in other if value in self) + + __rand__ = __and__ + + def isdisjoint(self, other): + 'Return True if two sets have a null intersection.' + for value in other: + if value in self: + return False + return True + + def __or__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + chain = (e for s in (self, other) for e in s) + return self._from_iterable(chain) + + __ror__ = __or__ + + def __sub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in self + if value not in other) + + def __rsub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in other + if value not in self) + + def __xor__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return (self - other) | (other - self) + + __rxor__ = __xor__ + + def _hash(self): + """Compute the hash value of a set. + + Note that we don't define __hash__: not all sets are hashable. + But if you define a hashable set type, its __hash__ should + call this function. + + This must be compatible __eq__. + + All sets ought to compare equal if they contain the same + elements, regardless of how they are implemented, and + regardless of the order of the elements; so there's not much + freedom for __eq__ or __hash__. We match the algorithm used + by the built-in frozenset type. + """ + MAX = sys.maxsize + MASK = 2 * MAX + 1 + n = len(self) + h = 1927868237 * (n + 1) + h &= MASK + for x in self: + hx = hash(x) + h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 + h &= MASK + h = h * 69069 + 907133923 + h &= MASK + if h > MAX: + h -= MASK + 1 + if h == -1: + h = 590923713 + return h + +Set.register(frozenset) + + +class MutableSet(Set): + """A mutable set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__, __len__, + add(), and discard(). + + To override the comparisons (presumably for speed, as the + semantics are fixed), all you have to do is redefine __le__ and + then the other operations will automatically follow suit. + """ + + __slots__ = () + + @abstractmethod + def add(self, value): + """Add an element.""" + raise NotImplementedError + + @abstractmethod + def discard(self, value): + """Remove an element. Do not raise an exception if absent.""" + raise NotImplementedError + + def remove(self, value): + """Remove an element. If not a member, raise a KeyError.""" + if value not in self: + raise KeyError(value) + self.discard(value) + + def pop(self): + """Return the popped value. Raise KeyError if empty.""" + it = iter(self) + try: + value = next(it) + except StopIteration: + raise KeyError + self.discard(value) + return value + + def clear(self): + """This is slow (creates N new iterators!) but effective.""" + try: + while True: + self.pop() + except KeyError: + pass + + def __ior__(self, it): + for value in it: + self.add(value) + return self + + def __iand__(self, it): + for value in (self - it): + self.discard(value) + return self + + def __ixor__(self, it): + if it is self: + self.clear() + else: + if not isinstance(it, Set): + it = self._from_iterable(it) + for value in it: + if value in self: + self.discard(value) + else: + self.add(value) + return self + + def __isub__(self, it): + if it is self: + self.clear() + else: + for value in it: + self.discard(value) + return self + +MutableSet.register(set) + + +### MAPPINGS ### + + +class Mapping(Collection): + + __slots__ = () + + """A Mapping is a generic container for associating key/value + pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __iter__, and __len__. + + """ + + @abstractmethod + def __getitem__(self, key): + raise KeyError + + def get(self, key, default=None): + 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' + try: + return self[key] + except KeyError: + return default + + def __contains__(self, key): + try: + self[key] + except KeyError: + return False + else: + return True + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return KeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return ItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return ValuesView(self) + + def __eq__(self, other): + if not isinstance(other, Mapping): + return NotImplemented + return dict(self.items()) == dict(other.items()) + + __reversed__ = None + +Mapping.register(mappingproxy) + + +class MappingView(Sized): + + __slots__ = '_mapping', + + def __init__(self, mapping): + self._mapping = mapping + + def __len__(self): + return len(self._mapping) + + def __repr__(self): + return '{0.__class__.__name__}({0._mapping!r})'.format(self) + + +class KeysView(MappingView, Set): + + __slots__ = () + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, key): + return key in self._mapping + + def __iter__(self): + yield from self._mapping + +KeysView.register(dict_keys) + + +class ItemsView(MappingView, Set): + + __slots__ = () + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, item): + key, value = item + try: + v = self._mapping[key] + except KeyError: + return False + else: + return v is value or v == value + + def __iter__(self): + for key in self._mapping: + yield (key, self._mapping[key]) + +ItemsView.register(dict_items) + + +class ValuesView(MappingView): + + __slots__ = () + + def __contains__(self, value): + for key in self._mapping: + v = self._mapping[key] + if v is value or v == value: + return True + return False + + def __iter__(self): + for key in self._mapping: + yield self._mapping[key] + +ValuesView.register(dict_values) + + +class MutableMapping(Mapping): + + __slots__ = () + + """A MutableMapping is a generic container for associating + key/value pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __setitem__, __delitem__, + __iter__, and __len__. + + """ + + @abstractmethod + def __setitem__(self, key, value): + raise KeyError + + @abstractmethod + def __delitem__(self, key): + raise KeyError + + __marker = object() + + def pop(self, key, default=__marker): + '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + ''' + try: + value = self[key] + except KeyError: + if default is self.__marker: + raise + return default + else: + del self[key] + return value + + def popitem(self): + '''D.popitem() -> (k, v), remove and return some (key, value) pair + as a 2-tuple; but raise KeyError if D is empty. + ''' + try: + key = next(iter(self)) + except StopIteration: + raise KeyError + value = self[key] + del self[key] + return key, value + + def clear(self): + 'D.clear() -> None. Remove all items from D.' + try: + while True: + self.popitem() + except KeyError: + pass + + def update(*args, **kwds): + ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. + If E present and has a .keys() method, does: for k in E: D[k] = E[k] + If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v + In either case, this is followed by: for k, v in F.items(): D[k] = v + ''' + if not args: + raise TypeError("descriptor 'update' of 'MutableMapping' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('update expected at most 1 arguments, got %d' % + len(args)) + if args: + other = args[0] + if isinstance(other, Mapping): + for key in other: + self[key] = other[key] + elif hasattr(other, "keys"): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + def setdefault(self, key, default=None): + 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' + try: + return self[key] + except KeyError: + self[key] = default + return default + +MutableMapping.register(dict) + + +### SEQUENCES ### + + +class Sequence(Reversible, Collection): + + """All the operations on a read-only sequence. + + Concrete subclasses must override __new__ or __init__, + __getitem__, and __len__. + """ + + __slots__ = () + + @abstractmethod + def __getitem__(self, index): + raise IndexError + + def __iter__(self): + i = 0 + try: + while True: + v = self[i] + yield v + i += 1 + except IndexError: + return + + def __contains__(self, value): + for v in self: + if v is value or v == value: + return True + return False + + def __reversed__(self): + for i in reversed(range(len(self))): + yield self[i] + + def index(self, value, start=0, stop=None): + '''S.index(value, [start, [stop]]) -> integer -- return first index of value. + Raises ValueError if the value is not present. + ''' + if start is not None and start < 0: + start = max(len(self) + start, 0) + if stop is not None and stop < 0: + stop += len(self) + + i = start + while stop is None or i < stop: + try: + if self[i] == value: + return i + except IndexError: + break + i += 1 + raise ValueError + + def count(self, value): + 'S.count(value) -> integer -- return number of occurrences of value' + return sum(1 for v in self if v == value) + +Sequence.register(tuple) +Sequence.register(str) +Sequence.register(range) +Sequence.register(memoryview) + + +class ByteString(Sequence): + + """This unifies bytes and bytearray. + + XXX Should add all their methods. + """ + + __slots__ = () + +ByteString.register(bytes) +ByteString.register(bytearray) + + +class MutableSequence(Sequence): + + __slots__ = () + + """All the operations on a read-write sequence. + + Concrete subclasses must provide __new__ or __init__, + __getitem__, __setitem__, __delitem__, __len__, and insert(). + + """ + + @abstractmethod + def __setitem__(self, index, value): + raise IndexError + + @abstractmethod + def __delitem__(self, index): + raise IndexError + + @abstractmethod + def insert(self, index, value): + 'S.insert(index, value) -- insert value before index' + raise IndexError + + def append(self, value): + 'S.append(value) -- append value to the end of the sequence' + self.insert(len(self), value) + + def clear(self): + 'S.clear() -> None -- remove all items from S' + try: + while True: + self.pop() + except IndexError: + pass + + def reverse(self): + 'S.reverse() -- reverse *IN PLACE*' + n = len(self) + for i in range(n//2): + self[i], self[n-i-1] = self[n-i-1], self[i] + + def extend(self, values): + 'S.extend(iterable) -- extend sequence by appending elements from the iterable' + for v in values: + self.append(v) + + def pop(self, index=-1): + '''S.pop([index]) -> item -- remove and return item at index (default last). + Raise IndexError if list is empty or index is out of range. + ''' + v = self[index] + del self[index] + return v + + def remove(self, value): + '''S.remove(value) -- remove first occurrence of value. + Raise ValueError if the value is not present. + ''' + del self[self.index(value)] + + def __iadd__(self, values): + self.extend(values) + return self + +MutableSequence.register(list) +MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.6.0/abc.py b/typing_extensions/test_data/python-3.6.0/abc.py new file mode 100644 index 00000000..1cbf96a6 --- /dev/null +++ b/typing_extensions/test_data/python-3.6.0/abc.py @@ -0,0 +1,248 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) according to PEP 3119.""" + +from _weakrefset import WeakSet + + +def abstractmethod(funcobj): + """A decorator indicating abstract methods. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract methods are overridden. + The abstract methods can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractmethod + def my_abstract_method(self, ...): + ... + """ + funcobj.__isabstractmethod__ = True + return funcobj + + +class abstractclassmethod(classmethod): + """ + A decorator indicating abstract classmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractclassmethod + def my_abstract_classmethod(cls, ...): + ... + + 'abstractclassmethod' is deprecated. Use 'classmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractstaticmethod(staticmethod): + """ + A decorator indicating abstract staticmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractstaticmethod + def my_abstract_staticmethod(...): + ... + + 'abstractstaticmethod' is deprecated. Use 'staticmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractproperty(property): + """ + A decorator indicating abstract properties. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract properties are overridden. + The abstract properties can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractproperty + def my_abstract_property(self): + ... + + This defines a read-only property; you can also define a read-write + abstract property using the 'long' form of property declaration: + + class C(metaclass=ABCMeta): + def getx(self): ... + def setx(self, value): ... + x = abstractproperty(getx, setx) + + 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' + instead. + """ + + __isabstractmethod__ = True + + +class ABCMeta(type): + + """Metaclass for defining Abstract Base Classes (ABCs). + + Use this metaclass to create an ABC. An ABC can be subclassed + directly, and then acts as a mix-in class. You can also register + unrelated concrete classes (even built-in classes) and unrelated + ABCs as 'virtual subclasses' -- these and their descendants will + be considered subclasses of the registering ABC by the built-in + issubclass() function, but the registering ABC won't show up in + their MRO (Method Resolution Order) nor will method + implementations defined by the registering ABC be callable (not + even via super()). + + """ + + # A global counter that is incremented each time a class is + # registered as a virtual subclass of anything. It forces the + # negative cache to be cleared before its next use. + # Note: this counter is private. Use `abc.get_cache_token()` for + # external code. + _abc_invalidation_counter = 0 + + def __new__(mcls, name, bases, namespace): + cls = super().__new__(mcls, name, bases, namespace) + # Compute set of abstract method names + abstracts = {name + for name, value in namespace.items() + if getattr(value, "__isabstractmethod__", False)} + for base in bases: + for name in getattr(base, "__abstractmethods__", set()): + value = getattr(cls, name, None) + if getattr(value, "__isabstractmethod__", False): + abstracts.add(name) + cls.__abstractmethods__ = frozenset(abstracts) + # Set up inheritance registry + cls._abc_registry = WeakSet() + cls._abc_cache = WeakSet() + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + return cls + + def register(cls, subclass): + """Register a virtual subclass of an ABC. + + Returns the subclass, to allow usage as a class decorator. + """ + if not isinstance(subclass, type): + raise TypeError("Can only register classes") + if issubclass(subclass, cls): + return subclass # Already a subclass + # Subtle: test for cycles *after* testing for "already a subclass"; + # this means we allow X.register(X) and interpret it as a no-op. + if issubclass(cls, subclass): + # This would create a cycle, which is bad for the algorithm below + raise RuntimeError("Refusing to create an inheritance cycle") + cls._abc_registry.add(subclass) + ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache + return subclass + + def _dump_registry(cls, file=None): + """Debug helper to print the ABC registry.""" + print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) + print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) + for name in sorted(cls.__dict__.keys()): + if name.startswith("_abc_"): + value = getattr(cls, name) + print("%s: %r" % (name, value), file=file) + + def __instancecheck__(cls, instance): + """Override for isinstance(instance, cls).""" + # Inline the cache checking + subclass = instance.__class__ + if subclass in cls._abc_cache: + return True + subtype = type(instance) + if subtype is subclass: + if (cls._abc_negative_cache_version == + ABCMeta._abc_invalidation_counter and + subclass in cls._abc_negative_cache): + return False + # Fall back to the subclass check. + return cls.__subclasscheck__(subclass) + return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) + + def __subclasscheck__(cls, subclass): + """Override for issubclass(subclass, cls).""" + # Check cache + if subclass in cls._abc_cache: + return True + # Check negative cache; may have to invalidate + if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: + # Invalidate the negative cache + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + elif subclass in cls._abc_negative_cache: + return False + # Check the subclass hook + ok = cls.__subclasshook__(subclass) + if ok is not NotImplemented: + assert isinstance(ok, bool) + if ok: + cls._abc_cache.add(subclass) + else: + cls._abc_negative_cache.add(subclass) + return ok + # Check if it's a direct subclass + if cls in getattr(subclass, '__mro__', ()): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a registered class (recursive) + for rcls in cls._abc_registry: + if issubclass(subclass, rcls): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a subclass (recursive) + for scls in cls.__subclasses__(): + if issubclass(subclass, scls): + cls._abc_cache.add(subclass) + return True + # No dice; update negative cache + cls._abc_negative_cache.add(subclass) + return False + + +class ABC(metaclass=ABCMeta): + """Helper class that provides a standard way to create an ABC using + inheritance. + """ + pass + + +def get_cache_token(): + """Returns the current ABC cache token. + + The token is an opaque object (supporting equality testing) identifying the + current version of the ABC cache for virtual subclasses. The token changes + with every call to ``register()`` on any ABC. + """ + return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.6.0/collections/__init__.py b/typing_extensions/test_data/python-3.6.0/collections/__init__.py new file mode 100644 index 00000000..bcc42919 --- /dev/null +++ b/typing_extensions/test_data/python-3.6.0/collections/__init__.py @@ -0,0 +1,1243 @@ +'''This module implements specialized container datatypes providing +alternatives to Python's general purpose built-in containers, dict, +list, set, and tuple. + +* namedtuple factory function for creating tuple subclasses with named fields +* deque list-like container with fast appends and pops on either end +* ChainMap dict-like class for creating a single view of multiple mappings +* Counter dict subclass for counting hashable objects +* OrderedDict dict subclass that remembers the order entries were added +* defaultdict dict subclass that calls a factory function to supply missing values +* UserDict wrapper around dictionary objects for easier dict subclassing +* UserList wrapper around list objects for easier list subclassing +* UserString wrapper around string objects for easier string subclassing + +''' + +__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', + 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] + +# For backwards compatibility, continue to make the collections ABCs +# available through the collections module. +from _collections_abc import * +import _collections_abc +__all__ += _collections_abc.__all__ + +from operator import itemgetter as _itemgetter, eq as _eq +from keyword import iskeyword as _iskeyword +import sys as _sys +import heapq as _heapq +from _weakref import proxy as _proxy +from itertools import repeat as _repeat, chain as _chain, starmap as _starmap +from reprlib import recursive_repr as _recursive_repr + +try: + from _collections import deque +except ImportError: + pass +else: + MutableSequence.register(deque) + +try: + from _collections import defaultdict +except ImportError: + pass + + +################################################################################ +### OrderedDict +################################################################################ + +class _OrderedDictKeysView(KeysView): + + def __reversed__(self): + yield from reversed(self._mapping) + +class _OrderedDictItemsView(ItemsView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield (key, self._mapping[key]) + +class _OrderedDictValuesView(ValuesView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield self._mapping[key] + +class _Link(object): + __slots__ = 'prev', 'next', 'key', '__weakref__' + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as regular dictionaries. + + # The internal self.__map dict maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # The sentinel is in self.__hardroot with a weakref proxy in self.__root. + # The prev links are weakref proxies (to prevent circular references). + # Individual links are kept alive by the hard reference in self.__map. + # Those hard references disappear when a key is deleted from an OrderedDict. + + def __init__(*args, **kwds): + '''Initialize an ordered dictionary. The signature is the same as + regular dictionaries, but keyword arguments are not recommended because + their insertion order is arbitrary. + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'OrderedDict' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__hardroot = _Link() + self.__root = root = _proxy(self.__hardroot) + root.prev = root.next = root + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, + dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link at the end of the linked list, + # and the inherited dictionary is updated with the new key/value pair. + if key not in self: + self.__map[key] = link = Link() + root = self.__root + last = root.prev + link.prev, link.next, link.key = last, root, key + last.next = link + root.prev = proxy(link) + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which gets + # removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link = self.__map.pop(key) + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + link.prev = None + link.next = None + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + # Traverse the linked list in order. + root = self.__root + curr = root.next + while curr is not root: + yield curr.key + curr = curr.next + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + # Traverse the linked list in reverse order. + root = self.__root + curr = root.prev + while curr is not root: + yield curr.key + curr = curr.prev + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + root = self.__root + root.prev = root.next = root + self.__map.clear() + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root.prev + link_prev = link.prev + link_prev.next = root + root.prev = link_prev + else: + link = root.next + link_next = link.next + root.next = link_next + link_next.prev = root + key = link.key + del self.__map[key] + value = dict.pop(self, key) + return key, value + + def move_to_end(self, key, last=True): + '''Move an existing element to the end (or beginning if last==False). + + Raises KeyError if the element does not exist. + When last=True, acts like a fast version of self[key]=self.pop(key). + + ''' + link = self.__map[key] + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + root = self.__root + if last: + last = root.prev + link.prev = last + link.next = root + last.next = root.prev = link + else: + first = root.next + link.prev = root + link.next = first + root.next = first.prev = link + + def __sizeof__(self): + sizeof = _sys.getsizeof + n = len(self) + 1 # number of links including root + size = sizeof(self.__dict__) # instance dictionary + size += sizeof(self.__map) * 2 # internal dict and inherited dict + size += sizeof(self.__hardroot) * n # link objects + size += sizeof(self.__root) * n # proxy objects + return size + + update = __update = MutableMapping.update + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return _OrderedDictKeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return _OrderedDictItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return _OrderedDictValuesView(self) + + __ne__ = MutableMapping.__ne__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding + value. If key is not found, d is returned if given, otherwise KeyError + is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + @_recursive_repr() + def __repr__(self): + 'od.__repr__() <==> repr(od)' + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self.items())) + + def __reduce__(self): + 'Return state information for pickling' + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + return self.__class__, (), inst_dict or None, None, iter(self.items()) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. + If not specified, the value defaults to None. + + ''' + self = cls() + for key in iterable: + self[key] = value + return self + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return dict.__eq__(self, other) and all(map(_eq, self, other)) + return dict.__eq__(self, other) + + +try: + from _collections import OrderedDict +except ImportError: + # Leave the pure Python version in place. + pass + + +################################################################################ +### namedtuple +################################################################################ + +_class_template = """\ +from builtins import property as _property, tuple as _tuple +from operator import itemgetter as _itemgetter +from collections import OrderedDict + +class {typename}(tuple): + '{typename}({arg_list})' + + __slots__ = () + + _fields = {field_names!r} + + def __new__(_cls, {arg_list}): + 'Create new instance of {typename}({arg_list})' + return _tuple.__new__(_cls, ({arg_list})) + + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + 'Make a new {typename} object from a sequence or iterable' + result = new(cls, iterable) + if len(result) != {num_fields:d}: + raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) + return result + + def _replace(_self, **kwds): + 'Return a new {typename} object replacing specified fields with new values' + result = _self._make(map(kwds.pop, {field_names!r}, _self)) + if kwds: + raise ValueError('Got unexpected field names: %r' % list(kwds)) + return result + + def __repr__(self): + 'Return a nicely formatted representation string' + return self.__class__.__name__ + '({repr_fmt})' % self + + def _asdict(self): + 'Return a new OrderedDict which maps field names to their values.' + return OrderedDict(zip(self._fields, self)) + + def __getnewargs__(self): + 'Return self as a plain tuple. Used by copy and pickle.' + return tuple(self) + +{field_defs} +""" + +_repr_template = '{name}=%r' + +_field_template = '''\ + {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') +''' + +def namedtuple(typename, field_names, *, verbose=False, rename=False, module=None): + """Returns a new subclass of tuple with named fields. + + >>> Point = namedtuple('Point', ['x', 'y']) + >>> Point.__doc__ # docstring for the new class + 'Point(x, y)' + >>> p = Point(11, y=22) # instantiate with positional args or keywords + >>> p[0] + p[1] # indexable like a plain tuple + 33 + >>> x, y = p # unpack like a regular tuple + >>> x, y + (11, 22) + >>> p.x + p.y # fields also accessible by name + 33 + >>> d = p._asdict() # convert to a dictionary + >>> d['x'] + 11 + >>> Point(**d) # convert from a dictionary + Point(x=11, y=22) + >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields + Point(x=100, y=22) + + """ + + # Validate the field names. At the user's option, either generate an error + # message or automatically replace the field name with a valid name. + if isinstance(field_names, str): + field_names = field_names.replace(',', ' ').split() + field_names = list(map(str, field_names)) + typename = str(typename) + if rename: + seen = set() + for index, name in enumerate(field_names): + if (not name.isidentifier() + or _iskeyword(name) + or name.startswith('_') + or name in seen): + field_names[index] = '_%d' % index + seen.add(name) + for name in [typename] + field_names: + if type(name) is not str: + raise TypeError('Type names and field names must be strings') + if not name.isidentifier(): + raise ValueError('Type names and field names must be valid ' + 'identifiers: %r' % name) + if _iskeyword(name): + raise ValueError('Type names and field names cannot be a ' + 'keyword: %r' % name) + seen = set() + for name in field_names: + if name.startswith('_') and not rename: + raise ValueError('Field names cannot start with an underscore: ' + '%r' % name) + if name in seen: + raise ValueError('Encountered duplicate field name: %r' % name) + seen.add(name) + + # Fill-in the class template + class_definition = _class_template.format( + typename = typename, + field_names = tuple(field_names), + num_fields = len(field_names), + arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], + repr_fmt = ', '.join(_repr_template.format(name=name) + for name in field_names), + field_defs = '\n'.join(_field_template.format(index=index, name=name) + for index, name in enumerate(field_names)) + ) + + # Execute the template string in a temporary namespace and support + # tracing utilities by setting a value for frame.f_globals['__name__'] + namespace = dict(__name__='namedtuple_%s' % typename) + exec(class_definition, namespace) + result = namespace[typename] + result._source = class_definition + if verbose: + print(result._source) + + # For pickling to work, the __module__ variable needs to be set to the frame + # where the named tuple is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython), or where the user has + # specified a particular module. + if module is None: + try: + module = _sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + if module is not None: + result.__module__ = module + + return result + + +######################################################################## +### Counter +######################################################################## + +def _count_elements(mapping, iterable): + 'Tally elements from the iterable.' + mapping_get = mapping.get + for elem in iterable: + mapping[elem] = mapping_get(elem, 0) + 1 + +try: # Load C helper function if available + from _collections import _count_elements +except ImportError: + pass + +class Counter(dict): + '''Dict subclass for counting hashable items. Sometimes called a bag + or multiset. Elements are stored as dictionary keys and their counts + are stored as dictionary values. + + >>> c = Counter('abcdeabcdabcaba') # count elements from a string + + >>> c.most_common(3) # three most common elements + [('a', 5), ('b', 4), ('c', 3)] + >>> sorted(c) # list all unique elements + ['a', 'b', 'c', 'd', 'e'] + >>> ''.join(sorted(c.elements())) # list elements with repetitions + 'aaaaabbbbcccdde' + >>> sum(c.values()) # total of all counts + 15 + + >>> c['a'] # count of letter 'a' + 5 + >>> for elem in 'shazam': # update counts from an iterable + ... c[elem] += 1 # by adding 1 to each element's count + >>> c['a'] # now there are seven 'a' + 7 + >>> del c['b'] # remove all 'b' + >>> c['b'] # now there are zero 'b' + 0 + + >>> d = Counter('simsalabim') # make another counter + >>> c.update(d) # add in the second counter + >>> c['a'] # now there are nine 'a' + 9 + + >>> c.clear() # empty the counter + >>> c + Counter() + + Note: If a count is set to zero or reduced to zero, it will remain + in the counter until the entry is deleted or the counter is cleared: + + >>> c = Counter('aaabbc') + >>> c['b'] -= 2 # reduce the count of 'b' by two + >>> c.most_common() # 'b' is still in, but its count is zero + [('a', 3), ('c', 1), ('b', 0)] + + ''' + # References: + # http://en.wikipedia.org/wiki/Multiset + # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html + # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm + # http://code.activestate.com/recipes/259174/ + # Knuth, TAOCP Vol. II section 4.6.3 + + def __init__(*args, **kwds): + '''Create a new, empty Counter object. And if given, count elements + from an input iterable. Or, initialize the count from another mapping + of elements to their counts. + + >>> c = Counter() # a new, empty counter + >>> c = Counter('gallahad') # a new counter from an iterable + >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping + >>> c = Counter(a=4, b=2) # a new counter from keyword args + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + super(Counter, self).__init__() + self.update(*args, **kwds) + + def __missing__(self, key): + 'The count of elements not in the Counter is zero.' + # Needed so that self[missing_item] does not raise KeyError + return 0 + + def most_common(self, n=None): + '''List the n most common elements and their counts from the most + common to the least. If n is None, then list all element counts. + + >>> Counter('abcdeabcdabcaba').most_common(3) + [('a', 5), ('b', 4), ('c', 3)] + + ''' + # Emulate Bag.sortedByCount from Smalltalk + if n is None: + return sorted(self.items(), key=_itemgetter(1), reverse=True) + return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) + + def elements(self): + '''Iterator over elements repeating each as many times as its count. + + >>> c = Counter('ABCABC') + >>> sorted(c.elements()) + ['A', 'A', 'B', 'B', 'C', 'C'] + + # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) + >>> product = 1 + >>> for factor in prime_factors.elements(): # loop over factors + ... product *= factor # and multiply them + >>> product + 1836 + + Note, if an element's count has been set to zero or is a negative + number, elements() will ignore it. + + ''' + # Emulate Bag.do from Smalltalk and Multiset.begin from C++. + return _chain.from_iterable(_starmap(_repeat, self.items())) + + # Override dict methods where necessary + + @classmethod + def fromkeys(cls, iterable, v=None): + # There is no equivalent method for counters because setting v=1 + # means that no element can have a count greater than one. + raise NotImplementedError( + 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + + def update(*args, **kwds): + '''Like dict.update() but add counts instead of replacing them. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.update('witch') # add elements from another iterable + >>> d = Counter('watch') + >>> c.update(d) # add elements from another counter + >>> c['h'] # four 'h' in which, witch, and watch + 4 + + ''' + # The regular dict.update() operation makes no sense here because the + # replace behavior results in the some of original untouched counts + # being mixed-in with all of the other counts for a mismash that + # doesn't have a straight-forward interpretation in most counting + # contexts. Instead, we implement straight-addition. Both the inputs + # and outputs are allowed to contain zero and negative counts. + + if not args: + raise TypeError("descriptor 'update' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + if isinstance(iterable, Mapping): + if self: + self_get = self.get + for elem, count in iterable.items(): + self[elem] = count + self_get(elem, 0) + else: + super(Counter, self).update(iterable) # fast path when counter is empty + else: + _count_elements(self, iterable) + if kwds: + self.update(kwds) + + def subtract(*args, **kwds): + '''Like dict.update() but subtracts counts instead of replacing them. + Counts can be reduced below zero. Both the inputs and outputs are + allowed to contain zero and negative counts. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.subtract('witch') # subtract elements from another iterable + >>> c.subtract(Counter('watch')) # subtract elements from another counter + >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch + 0 + >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch + -1 + + ''' + if not args: + raise TypeError("descriptor 'subtract' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + self_get = self.get + if isinstance(iterable, Mapping): + for elem, count in iterable.items(): + self[elem] = self_get(elem, 0) - count + else: + for elem in iterable: + self[elem] = self_get(elem, 0) - 1 + if kwds: + self.subtract(kwds) + + def copy(self): + 'Return a shallow copy.' + return self.__class__(self) + + def __reduce__(self): + return self.__class__, (dict(self),) + + def __delitem__(self, elem): + 'Like dict.__delitem__() but does not raise KeyError for missing values.' + if elem in self: + super().__delitem__(elem) + + def __repr__(self): + if not self: + return '%s()' % self.__class__.__name__ + try: + items = ', '.join(map('%r: %r'.__mod__, self.most_common())) + return '%s({%s})' % (self.__class__.__name__, items) + except TypeError: + # handle case where values are not orderable + return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) + + # Multiset-style mathematical operations discussed in: + # Knuth TAOCP Volume II section 4.6.3 exercise 19 + # and at http://en.wikipedia.org/wiki/Multiset + # + # Outputs guaranteed to only include positive counts. + # + # To strip negative and zero counts, add-in an empty counter: + # c += Counter() + + def __add__(self, other): + '''Add counts from two counters. + + >>> Counter('abbb') + Counter('bcc') + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count + other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __sub__(self, other): + ''' Subtract count, but keep only results with positive counts. + + >>> Counter('abbbc') - Counter('bccd') + Counter({'b': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count - other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count < 0: + result[elem] = 0 - count + return result + + def __or__(self, other): + '''Union is the maximum of value in either of the input counters. + + >>> Counter('abbb') | Counter('bcc') + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = other_count if count < other_count else count + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __and__(self, other): + ''' Intersection is the minimum of corresponding counts. + + >>> Counter('abbb') & Counter('bcc') + Counter({'b': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = count if count < other_count else other_count + if newcount > 0: + result[elem] = newcount + return result + + def __pos__(self): + 'Adds an empty counter, effectively stripping negative and zero counts' + result = Counter() + for elem, count in self.items(): + if count > 0: + result[elem] = count + return result + + def __neg__(self): + '''Subtracts from an empty counter. Strips positive and zero counts, + and flips the sign on negative counts. + + ''' + result = Counter() + for elem, count in self.items(): + if count < 0: + result[elem] = 0 - count + return result + + def _keep_positive(self): + '''Internal method to strip elements with a negative or zero count''' + nonpositive = [elem for elem, count in self.items() if not count > 0] + for elem in nonpositive: + del self[elem] + return self + + def __iadd__(self, other): + '''Inplace add from another counter, keeping only positive counts. + + >>> c = Counter('abbb') + >>> c += Counter('bcc') + >>> c + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] += count + return self._keep_positive() + + def __isub__(self, other): + '''Inplace subtract counter, but keep only results with positive counts. + + >>> c = Counter('abbbc') + >>> c -= Counter('bccd') + >>> c + Counter({'b': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] -= count + return self._keep_positive() + + def __ior__(self, other): + '''Inplace union is the maximum of value from either counter. + + >>> c = Counter('abbb') + >>> c |= Counter('bcc') + >>> c + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + for elem, other_count in other.items(): + count = self[elem] + if other_count > count: + self[elem] = other_count + return self._keep_positive() + + def __iand__(self, other): + '''Inplace intersection is the minimum of corresponding counts. + + >>> c = Counter('abbb') + >>> c &= Counter('bcc') + >>> c + Counter({'b': 1}) + + ''' + for elem, count in self.items(): + other_count = other[elem] + if other_count < count: + self[elem] = other_count + return self._keep_positive() + + +######################################################################## +### ChainMap +######################################################################## + +class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + be accessed or updated using the *maps* attribute. There is no other + state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self, m=None): # like Django's Context.push() + '''New ChainMap with a new map followed by all previous maps. + If no map is provided, an empty dict is used. + ''' + if m is None: + m = {} + return self.__class__(m, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +################################################################################ +### UserDict +################################################################################ + +class UserDict(MutableMapping): + + # Start by filling-out the abstract methods + def __init__(*args, **kwargs): + if not args: + raise TypeError("descriptor '__init__' of 'UserDict' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + if args: + dict = args[0] + elif 'dict' in kwargs: + dict = kwargs.pop('dict') + import warnings + warnings.warn("Passing 'dict' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + dict = None + self.data = {} + if dict is not None: + self.update(dict) + if len(kwargs): + self.update(kwargs) + def __len__(self): return len(self.data) + def __getitem__(self, key): + if key in self.data: + return self.data[key] + if hasattr(self.__class__, "__missing__"): + return self.__class__.__missing__(self, key) + raise KeyError(key) + def __setitem__(self, key, item): self.data[key] = item + def __delitem__(self, key): del self.data[key] + def __iter__(self): + return iter(self.data) + + # Modify __contains__ to work correctly when __missing__ is present + def __contains__(self, key): + return key in self.data + + # Now, add the methods in dicts but not in MutableMapping + def __repr__(self): return repr(self.data) + def copy(self): + if self.__class__ is UserDict: + return UserDict(self.data.copy()) + import copy + data = self.data + try: + self.data = {} + c = copy.copy(self) + finally: + self.data = data + c.update(self) + return c + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + + +################################################################################ +### UserList +################################################################################ + +class UserList(MutableSequence): + """A more or less complete user-defined wrapper around list objects.""" + def __init__(self, initlist=None): + self.data = [] + if initlist is not None: + # XXX should this accept an arbitrary sequence? + if type(initlist) == type(self.data): + self.data[:] = initlist + elif isinstance(initlist, UserList): + self.data[:] = initlist.data[:] + else: + self.data = list(initlist) + def __repr__(self): return repr(self.data) + def __lt__(self, other): return self.data < self.__cast(other) + def __le__(self, other): return self.data <= self.__cast(other) + def __eq__(self, other): return self.data == self.__cast(other) + def __gt__(self, other): return self.data > self.__cast(other) + def __ge__(self, other): return self.data >= self.__cast(other) + def __cast(self, other): + return other.data if isinstance(other, UserList) else other + def __contains__(self, item): return item in self.data + def __len__(self): return len(self.data) + def __getitem__(self, i): return self.data[i] + def __setitem__(self, i, item): self.data[i] = item + def __delitem__(self, i): del self.data[i] + def __add__(self, other): + if isinstance(other, UserList): + return self.__class__(self.data + other.data) + elif isinstance(other, type(self.data)): + return self.__class__(self.data + other) + return self.__class__(self.data + list(other)) + def __radd__(self, other): + if isinstance(other, UserList): + return self.__class__(other.data + self.data) + elif isinstance(other, type(self.data)): + return self.__class__(other + self.data) + return self.__class__(list(other) + self.data) + def __iadd__(self, other): + if isinstance(other, UserList): + self.data += other.data + elif isinstance(other, type(self.data)): + self.data += other + else: + self.data += list(other) + return self + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __imul__(self, n): + self.data *= n + return self + def append(self, item): self.data.append(item) + def insert(self, i, item): self.data.insert(i, item) + def pop(self, i=-1): return self.data.pop(i) + def remove(self, item): self.data.remove(item) + def clear(self): self.data.clear() + def copy(self): return self.__class__(self) + def count(self, item): return self.data.count(item) + def index(self, item, *args): return self.data.index(item, *args) + def reverse(self): self.data.reverse() + def sort(self, *args, **kwds): self.data.sort(*args, **kwds) + def extend(self, other): + if isinstance(other, UserList): + self.data.extend(other.data) + else: + self.data.extend(other) + + + +################################################################################ +### UserString +################################################################################ + +class UserString(Sequence): + def __init__(self, seq): + if isinstance(seq, str): + self.data = seq + elif isinstance(seq, UserString): + self.data = seq.data[:] + else: + self.data = str(seq) + def __str__(self): return str(self.data) + def __repr__(self): return repr(self.data) + def __int__(self): return int(self.data) + def __float__(self): return float(self.data) + def __complex__(self): return complex(self.data) + def __hash__(self): return hash(self.data) + def __getnewargs__(self): + return (self.data[:],) + + def __eq__(self, string): + if isinstance(string, UserString): + return self.data == string.data + return self.data == string + def __lt__(self, string): + if isinstance(string, UserString): + return self.data < string.data + return self.data < string + def __le__(self, string): + if isinstance(string, UserString): + return self.data <= string.data + return self.data <= string + def __gt__(self, string): + if isinstance(string, UserString): + return self.data > string.data + return self.data > string + def __ge__(self, string): + if isinstance(string, UserString): + return self.data >= string.data + return self.data >= string + + def __contains__(self, char): + if isinstance(char, UserString): + char = char.data + return char in self.data + + def __len__(self): return len(self.data) + def __getitem__(self, index): return self.__class__(self.data[index]) + def __add__(self, other): + if isinstance(other, UserString): + return self.__class__(self.data + other.data) + elif isinstance(other, str): + return self.__class__(self.data + other) + return self.__class__(self.data + str(other)) + def __radd__(self, other): + if isinstance(other, str): + return self.__class__(other + self.data) + return self.__class__(str(other) + self.data) + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __mod__(self, args): + return self.__class__(self.data % args) + def __rmod__(self, format): + return self.__class__(format % args) + + # the following methods are defined in alphabetical order: + def capitalize(self): return self.__class__(self.data.capitalize()) + def casefold(self): + return self.__class__(self.data.casefold()) + def center(self, width, *args): + return self.__class__(self.data.center(width, *args)) + def count(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.count(sub, start, end) + def encode(self, encoding=None, errors=None): # XXX improve this? + if encoding: + if errors: + return self.__class__(self.data.encode(encoding, errors)) + return self.__class__(self.data.encode(encoding)) + return self.__class__(self.data.encode()) + def endswith(self, suffix, start=0, end=_sys.maxsize): + return self.data.endswith(suffix, start, end) + def expandtabs(self, tabsize=8): + return self.__class__(self.data.expandtabs(tabsize)) + def find(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.find(sub, start, end) + def format(self, *args, **kwds): + return self.data.format(*args, **kwds) + def format_map(self, mapping): + return self.data.format_map(mapping) + def index(self, sub, start=0, end=_sys.maxsize): + return self.data.index(sub, start, end) + def isalpha(self): return self.data.isalpha() + def isalnum(self): return self.data.isalnum() + def isdecimal(self): return self.data.isdecimal() + def isdigit(self): return self.data.isdigit() + def isidentifier(self): return self.data.isidentifier() + def islower(self): return self.data.islower() + def isnumeric(self): return self.data.isnumeric() + def isprintable(self): return self.data.isprintable() + def isspace(self): return self.data.isspace() + def istitle(self): return self.data.istitle() + def isupper(self): return self.data.isupper() + def join(self, seq): return self.data.join(seq) + def ljust(self, width, *args): + return self.__class__(self.data.ljust(width, *args)) + def lower(self): return self.__class__(self.data.lower()) + def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) + maketrans = str.maketrans + def partition(self, sep): + return self.data.partition(sep) + def replace(self, old, new, maxsplit=-1): + if isinstance(old, UserString): + old = old.data + if isinstance(new, UserString): + new = new.data + return self.__class__(self.data.replace(old, new, maxsplit)) + def rfind(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.rfind(sub, start, end) + def rindex(self, sub, start=0, end=_sys.maxsize): + return self.data.rindex(sub, start, end) + def rjust(self, width, *args): + return self.__class__(self.data.rjust(width, *args)) + def rpartition(self, sep): + return self.data.rpartition(sep) + def rstrip(self, chars=None): + return self.__class__(self.data.rstrip(chars)) + def split(self, sep=None, maxsplit=-1): + return self.data.split(sep, maxsplit) + def rsplit(self, sep=None, maxsplit=-1): + return self.data.rsplit(sep, maxsplit) + def splitlines(self, keepends=False): return self.data.splitlines(keepends) + def startswith(self, prefix, start=0, end=_sys.maxsize): + return self.data.startswith(prefix, start, end) + def strip(self, chars=None): return self.__class__(self.data.strip(chars)) + def swapcase(self): return self.__class__(self.data.swapcase()) + def title(self): return self.__class__(self.data.title()) + def translate(self, *args): + return self.__class__(self.data.translate(*args)) + def upper(self): return self.__class__(self.data.upper()) + def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.6.0/collections/abc.py b/typing_extensions/test_data/python-3.6.0/collections/abc.py new file mode 100644 index 00000000..891600d1 --- /dev/null +++ b/typing_extensions/test_data/python-3.6.0/collections/abc.py @@ -0,0 +1,2 @@ +from _collections_abc import * +from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.6.0/typing.py b/typing_extensions/test_data/python-3.6.0/typing.py new file mode 100644 index 00000000..34845b74 --- /dev/null +++ b/typing_extensions/test_data/python-3.6.0/typing.py @@ -0,0 +1,2160 @@ +import abc +from abc import abstractmethod, abstractproperty +import collections +import contextlib +import functools +import re as stdlib_re # Avoid confusion with the re we export. +import sys +import types +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'Any', + 'Callable', + 'ClassVar', + 'Generic', + 'Optional', + 'Tuple', + 'Type', + 'TypeVar', + 'Union', + + # ABCs (from collections.abc). + 'AbstractSet', # collections.abc.Set. + 'ByteString', + 'Container', + 'Hashable', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'Mapping', + 'MappingView', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Sequence', + 'Sized', + 'ValuesView', + # The following are added depending on presence + # of their non-generic counterparts in stdlib: + # Awaitable, + # AsyncIterator, + # AsyncIterable, + # Coroutine, + # Collection, + # ContextManager + + # Structural checks, a.k.a. protocols. + 'Reversible', + 'SupportsAbs', + 'SupportsFloat', + 'SupportsInt', + 'SupportsRound', + + # Concrete collection types. + 'Dict', + 'DefaultDict', + 'List', + 'Set', + 'FrozenSet', + 'NamedTuple', # Not really a type. + 'Generator', + + # One-off things. + 'AnyStr', + 'cast', + 'get_type_hints', + 'NewType', + 'no_type_check', + 'no_type_check_decorator', + 'overload', + 'Text', + 'TYPE_CHECKING', +] + +# The pseudo-submodules 're' and 'io' are part of the public +# namespace, but excluded from __all__ because they might stomp on +# legitimate imports of those modules. + + +def _qualname(x): + if sys.version_info[:2] >= (3, 3): + return x.__qualname__ + else: + # Fall back to just name. + return x.__name__ + + +def _trim_name(nm): + if nm.startswith('_') and nm not in ('_TypeAlias', + '_ForwardRef', '_TypingBase', '_FinalTypingBase'): + nm = nm[1:] + return nm + + +class TypingMeta(type): + """Metaclass for most types defined in typing module + (not a part of public API). + + This overrides __new__() to require an extra keyword parameter + '_root', which serves as a guard against naive subclassing of the + typing classes. Any legitimate class defined using a metaclass + derived from TypingMeta must pass _root=True. + + This also defines a dummy constructor (all the work for most typing + constructs is done in __new__) and a nicer repr(). + """ + + _is_protocol = False + + def __new__(cls, name, bases, namespace, *, _root=False): + if not _root: + raise TypeError("Cannot subclass %s" % + (', '.join(map(_type_repr, bases)) or '()')) + return super().__new__(cls, name, bases, namespace) + + def __init__(self, *args, **kwds): + pass + + def _eval_type(self, globalns, localns): + """Override this in subclasses to interpret forward references. + + For example, List['C'] is internally stored as + List[_ForwardRef('C')], which should evaluate to List[C], + where C is an object found in globalns or localns (searching + localns first, of course). + """ + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + qname = _trim_name(_qualname(self)) + return '%s.%s' % (self.__module__, qname) + + +class _TypingBase(metaclass=TypingMeta, _root=True): + """Internal indicator of special typing constructs.""" + + __slots__ = () + + def __init__(self, *args, **kwds): + pass + + def __new__(cls, *args, **kwds): + """Constructor. + + This only exists to give a better error message in case + someone tries to subclass a special typing object (not a good idea). + """ + if (len(args) == 3 and + isinstance(args[0], str) and + isinstance(args[1], tuple)): + # Close enough. + raise TypeError("Cannot subclass %r" % cls) + return super().__new__(cls) + + # Things that are not classes also need these. + def _eval_type(self, globalns, localns): + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + cls = type(self) + qname = _trim_name(_qualname(cls)) + return '%s.%s' % (cls.__module__, qname) + + def __call__(self, *args, **kwds): + raise TypeError("Cannot instantiate %r" % type(self)) + + +class _FinalTypingBase(_TypingBase, _root=True): + """Internal mix-in class to prevent instantiation. + + Prevents instantiation unless _root=True is given in class call. + It is used to create pseudo-singleton instances Any, Union, Optional, etc. + """ + + __slots__ = () + + def __new__(cls, *args, _root=False, **kwds): + self = super().__new__(cls, *args, **kwds) + if _root is True: + return self + raise TypeError("Cannot instantiate %r" % cls) + + def __reduce__(self): + return _trim_name(type(self).__name__) + + +class _ForwardRef(_TypingBase, _root=True): + """Internal wrapper to hold a forward reference.""" + + __slots__ = ('__forward_arg__', '__forward_code__', + '__forward_evaluated__', '__forward_value__') + + def __init__(self, arg): + super().__init__(arg) + if not isinstance(arg, str): + raise TypeError('Forward reference must be a string -- got %r' % (arg,)) + try: + code = compile(arg, '', 'eval') + except SyntaxError: + raise SyntaxError('Forward reference must be an expression -- got %r' % + (arg,)) + self.__forward_arg__ = arg + self.__forward_code__ = code + self.__forward_evaluated__ = False + self.__forward_value__ = None + + def _eval_type(self, globalns, localns): + if not self.__forward_evaluated__ or localns is not globalns: + if globalns is None and localns is None: + globalns = localns = {} + elif globalns is None: + globalns = localns + elif localns is None: + localns = globalns + self.__forward_value__ = _type_check( + eval(self.__forward_code__, globalns, localns), + "Forward references must evaluate to types.") + self.__forward_evaluated__ = True + return self.__forward_value__ + + def __eq__(self, other): + if not isinstance(other, _ForwardRef): + return NotImplemented + return (self.__forward_arg__ == other.__forward_arg__ and + self.__forward_value__ == other.__forward_value__) + + def __hash__(self): + return hash((self.__forward_arg__, self.__forward_value__)) + + def __instancecheck__(self, obj): + raise TypeError("Forward references cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Forward references cannot be used with issubclass().") + + def __repr__(self): + return '_ForwardRef(%r)' % (self.__forward_arg__,) + + +class _TypeAlias(_TypingBase, _root=True): + """Internal helper class for defining generic variants of concrete types. + + Note that this is not a type; let's call it a pseudo-type. It cannot + be used in instance and subclass checks in parameterized form, i.e. + ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning + ``False``. + """ + + __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') + + def __init__(self, name, type_var, impl_type, type_checker): + """Initializer. + + Args: + name: The name, e.g. 'Pattern'. + type_var: The type parameter, e.g. AnyStr, or the + specific type, e.g. str. + impl_type: The implementation type. + type_checker: Function that takes an impl_type instance. + and returns a value that should be a type_var instance. + """ + assert isinstance(name, str), repr(name) + assert isinstance(impl_type, type), repr(impl_type) + assert not isinstance(impl_type, TypingMeta), repr(impl_type) + assert isinstance(type_var, (type, _TypingBase)), repr(type_var) + self.name = name + self.type_var = type_var + self.impl_type = impl_type + self.type_checker = type_checker + + def __repr__(self): + return "%s[%s]" % (self.name, _type_repr(self.type_var)) + + def __getitem__(self, parameter): + if not isinstance(self.type_var, TypeVar): + raise TypeError("%s cannot be further parameterized." % self) + if self.type_var.__constraints__ and isinstance(parameter, type): + if not issubclass(parameter, self.type_var.__constraints__): + raise TypeError("%s is not a valid substitution for %s." % + (parameter, self.type_var)) + if isinstance(parameter, TypeVar) and parameter is not self.type_var: + raise TypeError("%s cannot be re-parameterized." % self) + return self.__class__(self.name, parameter, + self.impl_type, self.type_checker) + + def __eq__(self, other): + if not isinstance(other, _TypeAlias): + return NotImplemented + return self.name == other.name and self.type_var == other.type_var + + def __hash__(self): + return hash((self.name, self.type_var)) + + def __instancecheck__(self, obj): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with isinstance().") + return isinstance(obj, self.impl_type) + + def __subclasscheck__(self, cls): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with issubclass().") + return issubclass(cls, self.impl_type) + + +def _get_type_vars(types, tvars): + for t in types: + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + t._get_type_vars(tvars) + + +def _type_vars(types): + tvars = [] + _get_type_vars(types, tvars) + return tuple(tvars) + + +def _eval_type(t, globalns, localns): + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + return t._eval_type(globalns, localns) + return t + + +def _type_check(arg, msg): + """Check that the argument is a type, and return it (internal helper). + + As a special case, accept None and return type(None) instead. + Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. + + The msg argument is a human-readable error message, e.g. + + "Union[arg, ...]: arg should be a type." + + We append the repr() of the actual value (truncated to 100 chars). + """ + if arg is None: + return type(None) + if isinstance(arg, str): + arg = _ForwardRef(arg) + if (isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or + not isinstance(arg, (type, _TypingBase)) and not callable(arg)): + raise TypeError(msg + " Got %.100r." % (arg,)) + # Bare Union etc. are not valid as type arguments + if (type(arg).__name__ in ('_Union', '_Optional') + and not getattr(arg, '__origin__', None) + or isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol)): + raise TypeError("Plain %s is not valid as type argument" % arg) + return arg + + +def _type_repr(obj): + """Return the repr() of an object, special-casing types (internal helper). + + If obj is a type, we return a shorter version than the default + type.__repr__, based on the module and qualified name, which is + typically enough to uniquely identify a type. For everything + else, we fall back on repr(obj). + """ + if isinstance(obj, type) and not isinstance(obj, TypingMeta): + if obj.__module__ == 'builtins': + return _qualname(obj) + return '%s.%s' % (obj.__module__, _qualname(obj)) + if obj is ...: + return('...') + if isinstance(obj, types.FunctionType): + return obj.__name__ + return repr(obj) + + +class _Any(_FinalTypingBase, _root=True): + """Special type indicating an unconstrained type. + + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + or class checks. + """ + + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("Any cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Any cannot be used with issubclass().") + + +Any = _Any(_root=True) + + +class TypeVar(_TypingBase, _root=True): + """Type variable. + + Usage:: + + T = TypeVar('T') # Can be anything + A = TypeVar('A', str, bytes) # Must be str or bytes + + Type variables exist primarily for the benefit of static type + checkers. They serve as the parameters for generic types as well + as for generic function definitions. See class Generic for more + information on generic types. Generic functions work as follows: + + def repeat(x: T, n: int) -> List[T]: + '''Return a list containing n references to x.''' + return [x]*n + + def longest(x: A, y: A) -> A: + '''Return the longest of two strings.''' + return x if len(x) >= len(y) else y + + The latter example's signature is essentially the overloading + of (str, str) -> str and (bytes, bytes) -> bytes. Also note + that if the arguments are instances of some subclass of str, + the return type is still plain str. + + At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. + + Type variables defined with covariant=True or contravariant=True + can be used do declare covariant or contravariant generic types. + See PEP 484 for more details. By default generic types are invariant + in all type variables. + + Type variables can be introspected. e.g.: + + T.__name__ == 'T' + T.__constraints__ == () + T.__covariant__ == False + T.__contravariant__ = False + A.__constraints__ == (str, bytes) + """ + + __slots__ = ('__name__', '__bound__', '__constraints__', + '__covariant__', '__contravariant__') + + def __init__(self, name, *constraints, bound=None, + covariant=False, contravariant=False): + super().__init__(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant) + self.__name__ = name + if covariant and contravariant: + raise ValueError("Bivariant types are not supported.") + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if constraints and bound is not None: + raise TypeError("Constraints cannot be combined with bound=...") + if constraints and len(constraints) == 1: + raise TypeError("A single constraint is not allowed") + msg = "TypeVar(name, constraint, ...): constraints must be types." + self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) + if bound: + self.__bound__ = _type_check(bound, "Bound must be a type.") + else: + self.__bound__ = None + + def _get_type_vars(self, tvars): + if self not in tvars: + tvars.append(self) + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __instancecheck__(self, instance): + raise TypeError("Type variables cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Type variables cannot be used with issubclass().") + + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = TypeVar('T') # Any type. +KT = TypeVar('KT') # Key type. +VT = TypeVar('VT') # Value type. +T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. +V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. +VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. +T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +# A useful type variable with constraints. This represents string types. +# (This one *is* for export!) +AnyStr = TypeVar('AnyStr', bytes, str) + + +def _replace_arg(arg, tvars, args): + """An internal helper function: replace arg if it is a type variable + found in tvars with corresponding substitution from args or + with corresponding substitution sub-tree if arg is a generic type. + """ + + if tvars is None: + tvars = [] + if hasattr(arg, '_subs_tree'): + return arg._subs_tree(tvars, args) + if isinstance(arg, TypeVar): + for i, tvar in enumerate(tvars): + if arg == tvar: + return args[i] + return arg + + +def _subs_tree(cls, tvars=None, args=None): + """An internal helper function: calculate substitution tree + for generic cls after replacing its type parameters with + substitutions in tvars -> args (if any). + Repeat the same following __origin__'s. + + Return a list of arguments with all possible substitutions + performed. Arguments that are generic classes themselves are represented + as tuples (so that no new classes are created by this function). + For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] + """ + + if cls.__origin__ is None: + return cls + # Make of chain of origins (i.e. cls -> cls.__origin__) + current = cls.__origin__ + orig_chain = [] + while current.__origin__ is not None: + orig_chain.append(current) + current = current.__origin__ + # Replace type variables in __args__ if asked ... + tree_args = [] + for arg in cls.__args__: + tree_args.append(_replace_arg(arg, tvars, args)) + # ... then continue replacing down the origin chain. + for ocls in orig_chain: + new_tree_args = [] + for i, arg in enumerate(ocls.__args__): + new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) + tree_args = new_tree_args + return tree_args + + +def _remove_dups_flatten(parameters): + """An internal helper for Union creation and substitution: flatten Union's + among parameters, then remove duplicates and strict subclasses. + """ + + # Flatten out Union[Union[...], ...]. + params = [] + for p in parameters: + if isinstance(p, _Union) and p.__origin__ is Union: + params.extend(p.__args__) + elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: + params.extend(p[1:]) + else: + params.append(p) + # Weed out strict duplicates, preserving the first of each occurrence. + all_params = set(params) + if len(all_params) < len(params): + new_params = [] + for t in params: + if t in all_params: + new_params.append(t) + all_params.remove(t) + params = new_params + assert not all_params, all_params + # Weed out subclasses. + # E.g. Union[int, Employee, Manager] == Union[int, Employee]. + # If object is present it will be sole survivor among proper classes. + # Never discard type variables. + # (In particular, Union[str, AnyStr] != AnyStr.) + all_params = set(params) + for t1 in params: + if not isinstance(t1, type): + continue + if any(isinstance(t2, type) and issubclass(t1, t2) + for t2 in all_params - {t1} + if not (isinstance(t2, GenericMeta) and + t2.__origin__ is not None)): + all_params.remove(t1) + return tuple(t for t in params if t in all_params) + + +def _check_generic(cls, parameters): + # Check correct count for parameters of a generic cls (internal helper). + if not cls.__parameters__: + raise TypeError("%s is not a generic class" % repr(cls)) + alen = len(parameters) + elen = len(cls.__parameters__) + if alen != elen: + raise TypeError("Too %s parameters for %s; actual %s, expected %s" % + ("many" if alen > elen else "few", repr(cls), alen, elen)) + + +_cleanups = [] + + +def _tp_cache(func): + """Internal wrapper caching __getitem__ of generic types with a fallback to + original function for non-hashable arguments. + """ + + cached = functools.lru_cache()(func) + _cleanups.append(cached.cache_clear) + @functools.wraps(func) + def inner(*args, **kwds): + try: + return cached(*args, **kwds) + except TypeError: + pass # All real errors (not unhashable args) are raised below. + return func(*args, **kwds) + return inner + + +class _Union(_FinalTypingBase, _root=True): + """Union type; Union[X, Y] means either X or Y. + + To define a union, use e.g. Union[int, str]. Details: + + - The arguments must be types and there must be at least one. + + - None as an argument is a special case and is replaced by + type(None). + + - Unions of unions are flattened, e.g.:: + + Union[Union[int, str], float] == Union[int, str, float] + + - Unions of a single argument vanish, e.g.:: + + Union[int] == int # The constructor actually returns int + + - Redundant arguments are skipped, e.g.:: + + Union[int, str, int] == Union[int, str] + + - When comparing unions, the argument order is ignored, e.g.:: + + Union[int, str] == Union[str, int] + + - When two arguments have a subclass relationship, the least + derived argument is kept, e.g.:: + + class Employee: pass + class Manager(Employee): pass + Union[int, Employee, Manager] == Union[int, Employee] + Union[Manager, int, Employee] == Union[int, Employee] + Union[Employee, Manager] == Employee + + - Similar for object:: + + Union[int, object] == object + + - You cannot subclass or instantiate a union. + + - You can use Optional[X] as a shorthand for Union[X, None]. + """ + + __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') + + def __new__(cls, parameters=None, origin=None, *args, _root=False): + self = super().__new__(cls, parameters, origin, *args, _root=_root) + if origin is None: + self.__parameters__ = None + self.__args__ = None + self.__origin__ = None + self.__tree_hash__ = hash(frozenset(('Union',))) + return self + if not isinstance(parameters, tuple): + raise TypeError("Expected parameters=") + if origin is Union: + parameters = _remove_dups_flatten(parameters) + # It's not a union if there's only one type left. + if len(parameters) == 1: + return parameters[0] + self.__parameters__ = _type_vars(parameters) + self.__args__ = parameters + self.__origin__ = origin + # Pre-calculate the __hash__ on instantiation. + # This improves speed for complex substitutions. + subs_tree = self._subs_tree() + if isinstance(subs_tree, tuple): + self.__tree_hash__ = hash(frozenset(subs_tree)) + else: + self.__tree_hash__ = hash(subs_tree) + return self + + def _eval_type(self, globalns, localns): + if self.__args__ is None: + return self + ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) + ev_origin = _eval_type(self.__origin__, globalns, localns) + if ev_args == self.__args__ and ev_origin == self.__origin__: + # Everything is already evaluated. + return self + return self.__class__(ev_args, ev_origin, _root=True) + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + tree = self._subs_tree() + if not isinstance(tree, tuple): + return repr(tree) + return tree[0]._tree_repr(tree) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super().__repr__() + '[%s]' % ', '.join(arg_list) + + @_tp_cache + def __getitem__(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Union of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if self.__origin__ is None: + msg = "Union[arg, ...]: each arg must be a type." + else: + msg = "Parameters to generic types must be types." + parameters = tuple(_type_check(p, msg) for p in parameters) + if self is not Union: + _check_generic(self, parameters) + return self.__class__(parameters, origin=self, _root=True) + + def _subs_tree(self, tvars=None, args=None): + if self is Union: + return Union # Nothing to substitute + tree_args = _subs_tree(self, tvars, args) + tree_args = _remove_dups_flatten(tree_args) + if len(tree_args) == 1: + return tree_args[0] # Union of a single type is that type + return (Union,) + tree_args + + def __eq__(self, other): + if not isinstance(other, _Union): + return self._subs_tree() == other + return self.__tree_hash__ == other.__tree_hash__ + + def __hash__(self): + return self.__tree_hash__ + + def __instancecheck__(self, obj): + raise TypeError("Unions cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Unions cannot be used with issubclass().") + + +Union = _Union(_root=True) + + +class _Optional(_FinalTypingBase, _root=True): + """Optional type. + + Optional[X] is equivalent to Union[X, None]. + """ + + __slots__ = () + + @_tp_cache + def __getitem__(self, arg): + arg = _type_check(arg, "Optional[t] requires a single type.") + return Union[arg, type(None)] + + +Optional = _Optional(_root=True) + + +def _gorg(a): + """Return the farthest origin of a generic class (internal helper).""" + assert isinstance(a, GenericMeta) + while a.__origin__ is not None: + a = a.__origin__ + return a + + +def _geqv(a, b): + """Return whether two generic classes are equivalent (internal helper). + + The intention is to consider generic class X and any of its + parameterized forms (X[T], X[int], etc.) as equivalent. + + However, X is not equivalent to a subclass of X. + + The relation is reflexive, symmetric and transitive. + """ + assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) + # Reduce each to its origin. + return _gorg(a) is _gorg(b) + + +def _next_in_mro(cls): + """Helper for Generic.__new__. + + Returns the class after the last occurrence of Generic or + Generic[...] in cls.__mro__. + """ + next_in_mro = object + # Look for the last occurrence of Generic or Generic[...]. + for i, c in enumerate(cls.__mro__[:-1]): + if isinstance(c, GenericMeta) and _gorg(c) is Generic: + next_in_mro = cls.__mro__[i+1] + return next_in_mro + + +def _valid_for_check(cls): + """An internal helper to prohibit isinstance([1], List[str]) etc.""" + if cls is Generic: + raise TypeError("Class %r cannot be used with class " + "or instance checks" % cls) + if (cls.__origin__ is not None and + sys._getframe(3).f_globals['__name__'] not in ['abc', 'functools']): + raise TypeError("Parameterized generics cannot be used with class " + "or instance checks") + + +def _make_subclasshook(cls): + """Construct a __subclasshook__ callable that incorporates + the associated __extra__ class in subclass checks performed + against cls. + """ + if isinstance(cls.__extra__, abc.ABCMeta): + # The logic mirrors that of ABCMeta.__subclasscheck__. + # Registered classes need not be checked here because + # cls and its extra share the same _abc_registry. + def __extrahook__(subclass): + _valid_for_check(cls) + res = cls.__extra__.__subclasshook__(subclass) + if res is not NotImplemented: + return res + if cls.__extra__ in subclass.__mro__: + return True + for scls in cls.__extra__.__subclasses__(): + if isinstance(scls, GenericMeta): + continue + if issubclass(subclass, scls): + return True + return NotImplemented + else: + # For non-ABC extras we'll just call issubclass(). + def __extrahook__(subclass): + _valid_for_check(cls) + if cls.__extra__ and issubclass(subclass, cls.__extra__): + return True + return NotImplemented + return __extrahook__ + + +def _no_slots_copy(dct): + """Internal helper: copy class __dict__ and clean slots class variables. + (They will be re-created if necessary by normal class machinery.) + """ + dict_copy = dict(dct) + if '__slots__' in dict_copy: + for slot in dict_copy['__slots__']: + dict_copy.pop(slot, None) + return dict_copy + + +class GenericMeta(TypingMeta, abc.ABCMeta): + """Metaclass for generic types.""" + + def __new__(cls, name, bases, namespace, + tvars=None, args=None, origin=None, extra=None, orig_bases=None): + if tvars is not None: + # Called from __getitem__() below. + assert origin is not None + assert all(isinstance(t, TypeVar) for t in tvars), tvars + else: + # Called from class statement. + assert tvars is None, tvars + assert args is None, args + assert origin is None, origin + + # Get the full set of tvars from the bases. + tvars = _type_vars(bases) + # Look for Generic[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...]. + gvars = None + for base in bases: + if base is Generic: + raise TypeError("Cannot inherit from plain Generic") + if (isinstance(base, GenericMeta) and + base.__origin__ is Generic): + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + raise TypeError( + "Some type variables (%s) " + "are not listed in Generic[%s]" % + (", ".join(str(t) for t in tvars if t not in gvarset), + ", ".join(str(g) for g in gvars))) + tvars = gvars + + initial_bases = bases + if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: + bases = (extra,) + bases + bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) + + # remove bare Generic from bases if there are other generic bases + if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): + bases = tuple(b for b in bases if b is not Generic) + self = super().__new__(cls, name, bases, namespace, _root=True) + + self.__parameters__ = tvars + # Be prepared that GenericMeta will be subclassed by TupleMeta + # and CallableMeta, those two allow ..., (), or [] in __args___. + self.__args__ = tuple(... if a is _TypingEllipsis else + () if a is _TypingEmpty else + a for a in args) if args else None + self.__origin__ = origin + self.__extra__ = extra + # Speed hack (https://github.com/python/typing/issues/196). + self.__next_in_mro__ = _next_in_mro(self) + # Preserve base classes on subclassing (__bases__ are type erased now). + if orig_bases is None: + self.__orig_bases__ = initial_bases + + # This allows unparameterized generic collections to be used + # with issubclass() and isinstance() in the same way as their + # collections.abc counterparts (e.g., isinstance([], Iterable)). + if ('__subclasshook__' not in namespace and extra # allow overriding + or hasattr(self.__subclasshook__, '__name__') and + self.__subclasshook__.__name__ == '__extrahook__'): + self.__subclasshook__ = _make_subclasshook(self) + if isinstance(extra, abc.ABCMeta): + self._abc_registry = extra._abc_registry + + if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. + self.__qualname__ = origin.__qualname__ + self.__tree_hash__ = hash(self._subs_tree()) if origin else hash((self.__name__,)) + return self + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def _eval_type(self, globalns, localns): + ev_origin = (self.__origin__._eval_type(globalns, localns) + if self.__origin__ else None) + ev_args = tuple(_eval_type(a, globalns, localns) for a + in self.__args__) if self.__args__ else None + if ev_origin == self.__origin__ and ev_args == self.__args__: + return self + return self.__class__(self.__name__, + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=_type_vars(ev_args) if ev_args else None, + args=ev_args, + origin=ev_origin, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if arg == (): + arg_list.append('()') + elif not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super().__repr__() + '[%s]' % ', '.join(arg_list) + + def _subs_tree(self, tvars=None, args=None): + if self.__origin__ is None: + return self + tree_args = _subs_tree(self, tvars, args) + return (_gorg(self),) + tuple(tree_args) + + def __eq__(self, other): + if not isinstance(other, GenericMeta): + return NotImplemented + if self.__origin__ is None or other.__origin__ is None: + return self is other + return self.__tree_hash__ == other.__tree_hash__ + + def __hash__(self): + return self.__tree_hash__ + + @_tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + if not params and not _gorg(self) is Tuple: + raise TypeError( + "Parameter list to %s[...] cannot be empty" % _qualname(self)) + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + if self is Generic: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, TypeVar) for p in params): + raise TypeError( + "Parameters to Generic[...] must all be type variables") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Generic[...] must all be unique") + tvars = params + args = params + elif self in (Tuple, Callable): + tvars = _type_vars(params) + args = params + elif self is _Protocol: + # _Protocol is internal, don't check anything. + tvars = params + args = params + elif self.__origin__ in (Generic, _Protocol): + # Can't subscript Generic[...] or _Protocol[...]. + raise TypeError("Cannot subscript already-subscripted %s" % + repr(self)) + else: + # Subscripting a regular Generic subclass. + _check_generic(self, params) + tvars = _type_vars(params) + args = params + return self.__class__(self.__name__, + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=tvars, + args=args, + origin=self, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __instancecheck__(self, instance): + # Since we extend ABC.__subclasscheck__ and + # ABC.__instancecheck__ inlines the cache checking done by the + # latter, we must extend __instancecheck__ too. For simplicity + # we just skip the cache check -- instance checks for generic + # classes are supposed to be rare anyways. + return issubclass(instance.__class__, self) + + def __copy__(self): + return self.__class__(self.__name__, self.__bases__, + _no_slots_copy(self.__dict__), + self.__parameters__, self.__args__, self.__origin__, + self.__extra__, self.__orig_bases__) + + +# Prevent checks for Generic to crash when defining Generic. +Generic = None + + +def _generic_new(base_cls, cls, *args, **kwds): + # Assure type is erased on instantiation, + # but attempt to store it in __orig_class__ + if cls.__origin__ is None: + return base_cls.__new__(cls) + else: + origin = _gorg(cls) + obj = base_cls.__new__(origin) + try: + obj.__orig_class__ = cls + except AttributeError: + pass + obj.__init__(*args, **kwds) + return obj + + +class Generic(metaclass=GenericMeta): + """Abstract base class for generic types. + + A generic type is typically declared by inheriting from + this class parameterized with one or more type variables. + For example, a generic mapping type might be defined as:: + + class Mapping(Generic[KT, VT]): + def __getitem__(self, key: KT) -> VT: + ... + # Etc. + + This class can then be used as follows:: + + def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: + try: + return mapping[key] + except KeyError: + return default + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Generic): + raise TypeError("Type Generic cannot be instantiated; " + "it can be used only as a base class") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +class _TypingEmpty: + """Internal placeholder for () or []. Used by TupleMeta and CallableMeta + to allow empty list/tuple in specific places, without allowing them + to sneak in where prohibited. + """ + + +class _TypingEllipsis: + """Internal placeholder for ... (ellipsis).""" + + +class TupleMeta(GenericMeta): + """Metaclass for Tuple (internal).""" + + @_tp_cache + def __getitem__(self, parameters): + if self.__origin__ is not None or not _geqv(self, Tuple): + # Normal generic rules apply if this is not the first subscription + # or a subscription of a subclass. + return super().__getitem__(parameters) + if parameters == (): + return super().__getitem__((_TypingEmpty,)) + if not isinstance(parameters, tuple): + parameters = (parameters,) + if len(parameters) == 2 and parameters[1] is ...: + msg = "Tuple[t, ...]: t must be a type." + p = _type_check(parameters[0], msg) + return super().__getitem__((p, _TypingEllipsis)) + msg = "Tuple[t0, t1, ...]: each t must be a type." + parameters = tuple(_type_check(p, msg) for p in parameters) + return super().__getitem__(parameters) + + def __instancecheck__(self, obj): + if self.__args__ == None: + return isinstance(obj, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with isinstance().") + + def __subclasscheck__(self, cls): + if self.__args__ == None: + return issubclass(cls, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with issubclass().") + + +class Tuple(tuple, extra=tuple, metaclass=TupleMeta): + """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. + + Example: Tuple[T1, T2] is a tuple of two elements corresponding + to type variables T1 and T2. Tuple[int, float, str] is a tuple + of an int, a float and a string. + + To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Tuple): + raise TypeError("Type Tuple cannot be instantiated; " + "use tuple() instead") + return _generic_new(tuple, cls, *args, **kwds) + + +class CallableMeta(GenericMeta): + """Metaclass for Callable (internal).""" + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + if _gorg(self) is not Callable: + return super()._tree_repr(tree) + # For actual Callable (not its subclass) we override + # super()._tree_repr() for nice formatting. + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + if arg_list[0] == '...': + return repr(tree[0]) + '[..., %s]' % arg_list[1] + return (repr(tree[0]) + + '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) + + def __getitem__(self, parameters): + """A thin wrapper around __getitem_inner__ to provide the latter + with hashable arguments to improve speed. + """ + + if self.__origin__ is not None or not _geqv(self, Callable): + return super().__getitem__(parameters) + if not isinstance(parameters, tuple) or len(parameters) != 2: + raise TypeError("Callable must be used as " + "Callable[[arg, ...], result].") + args, result = parameters + if args is Ellipsis: + parameters = (Ellipsis, result) + else: + if not isinstance(args, list): + raise TypeError("Callable[args, result]: args must be a list." + " Got %.100r." % (args,)) + parameters = (tuple(args), result) + return self.__getitem_inner__(parameters) + + @_tp_cache + def __getitem_inner__(self, parameters): + args, result = parameters + msg = "Callable[args, result]: result must be a type." + result = _type_check(result, msg) + if args is Ellipsis: + return super().__getitem__((_TypingEllipsis, result)) + msg = "Callable[[arg, ...], result]: each arg must be a type." + args = tuple(_type_check(arg, msg) for arg in args) + parameters = args + (result,) + return super().__getitem__(parameters) + + +class Callable(extra=collections_abc.Callable, metaclass = CallableMeta): + """Callable type; Callable[[int], str] is a function of (int) -> str. + + The subscription syntax must always be used with exactly two + values: the argument list and the return type. The argument list + must be a list of types or ellipsis; the return type must be a single type. + + There is no syntax to indicate optional or keyword arguments, + such function types are rarely used as callback types. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Callable): + raise TypeError("Type Callable cannot be instantiated; " + "use a non-abstract subclass instead") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +class _ClassVar(_FinalTypingBase, _root=True): + """Special type construct to mark class variables. + + An annotation wrapped in ClassVar indicates that a given + attribute is intended to be used as a class variable and + should not be set on instances of that class. Usage:: + + class Starship: + stats: ClassVar[Dict[str, int]] = {} # class variable + damage: int = 10 # instance variable + + ClassVar accepts only types and cannot be further subscribed. + + Note that ClassVar is not a class itself, and should not + be used with isinstance() or issubclass(). + """ + + __slots__ = ('__type__',) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls(_type_check(item, + '{} accepts only single type.'.format(cls.__name__[1:])), + _root=True) + raise TypeError('{} cannot be further subscripted' + .format(cls.__name__[1:])) + + def _eval_type(self, globalns, localns): + new_tp = _eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += '[{}]'.format(_type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _ClassVar): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + +ClassVar = _ClassVar(_root=True) + + +def cast(typ, val): + """Cast a value to a type. + + This returns the value unchanged. To the type checker this + signals that the return value has the designated type, but at + runtime we intentionally don't check anything (we want this + to be as fast as possible). + """ + return val + + +def _get_defaults(func): + """Internal helper to extract the default arguments, by name.""" + try: + code = func.__code__ + except AttributeError: + # Some built-in functions don't have __code__, __defaults__, etc. + return {} + pos_count = code.co_argcount + arg_names = code.co_varnames + arg_names = arg_names[:pos_count] + defaults = func.__defaults__ or () + kwdefaults = func.__kwdefaults__ + res = dict(kwdefaults) if kwdefaults else {} + pos_offset = pos_count - len(defaults) + for name, value in zip(arg_names[pos_offset:], defaults): + assert name not in res + res[name] = value + return res + + +def get_type_hints(obj, globalns=None, localns=None): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, and if necessary + adds Optional[t] if a default value equal to None is set. + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj, and these are also used as the locals. If the + object does not appear to have globals, an exception is raised. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + + if getattr(obj, '__no_type_check__', None): + return {} + if globalns is None: + globalns = getattr(obj, '__globals__', {}) + if localns is None: + localns = globalns + elif localns is None: + localns = globalns + # Classes require a special treatment. + if isinstance(obj, type): + hints = {} + for base in reversed(obj.__mro__): + ann = base.__dict__.get('__annotations__', {}) + for name, value in ann.items(): + if value is None: + value = type(None) + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, globalns, localns) + hints[name] = value + return hints + hints = getattr(obj, '__annotations__', None) + if hints is None: + # Return empty annotations for something that _could_ have them. + if (isinstance(obj, types.FunctionType) or + isinstance(obj, types.BuiltinFunctionType) or + isinstance(obj, types.MethodType) or + isinstance(obj, types.ModuleType)): + return {} + else: + raise TypeError('{!r} is not a module, class, method, ' + 'or function.'.format(obj)) + defaults = _get_defaults(obj) + hints = dict(hints) + for name, value in hints.items(): + if value is None: + value = type(None) + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, globalns, localns) + if name in defaults and defaults[name] is None: + value = Optional[value] + hints[name] = value + return hints + + +def no_type_check(arg): + """Decorator to indicate that annotations are not type hints. + + The argument must be a class or function; if it is a class, it + applies recursively to all methods and classes defined in that class + (but not to methods defined in its superclasses or subclasses). + + This mutates the function(s) or class(es) in place. + """ + if isinstance(arg, type): + arg_attrs = arg.__dict__.copy() + for attr, val in arg.__dict__.items(): + if val in arg.__bases__: + arg_attrs.pop(attr) + for obj in arg_attrs.values(): + if isinstance(obj, types.FunctionType): + obj.__no_type_check__ = True + if isinstance(obj, type): + no_type_check(obj) + try: + arg.__no_type_check__ = True + except TypeError: # built-in classes + pass + return arg + + +def no_type_check_decorator(decorator): + """Decorator to give another decorator the @no_type_check effect. + + This wraps the decorator with something that wraps the decorated + function in @no_type_check. + """ + + @functools.wraps(decorator) + def wrapped_decorator(*args, **kwds): + func = decorator(*args, **kwds) + func = no_type_check(func) + return func + + return wrapped_decorator + + +def _overload_dummy(*args, **kwds): + """Helper for @overload to raise when called.""" + raise NotImplementedError( + "You should not call an overloaded function. " + "A series of @overload-decorated functions " + "outside a stub module should always be followed " + "by an implementation that is not @overload-ed.") + + +def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + """ + return _overload_dummy + + +class _ProtocolMeta(GenericMeta): + """Internal metaclass for _Protocol. + + This exists so _Protocol classes can be generic without deriving + from Generic. + """ + + def __instancecheck__(self, obj): + if _Protocol not in self.__bases__: + return super().__instancecheck__(obj) + raise TypeError("Protocols cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not self._is_protocol: + # No structural checks since this isn't a protocol. + return NotImplemented + + if self is _Protocol: + # Every class is a subclass of the empty protocol. + return True + + # Find all attributes defined in the protocol. + attrs = self._get_protocol_attrs() + + for attr in attrs: + if not any(attr in d.__dict__ for d in cls.__mro__): + return False + return True + + def _get_protocol_attrs(self): + # Get all Protocol base classes. + protocol_bases = [] + for c in self.__mro__: + if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': + protocol_bases.append(c) + + # Get attributes included in protocol. + attrs = set() + for base in protocol_bases: + for attr in base.__dict__.keys(): + # Include attributes not defined in any non-protocol bases. + for c in self.__mro__: + if (c is not base and attr in c.__dict__ and + not getattr(c, '_is_protocol', False)): + break + else: + if (not attr.startswith('_abc_') and + attr != '__abstractmethods__' and + attr != '__annotations__' and + attr != '__weakref__' and + attr != '_is_protocol' and + attr != '__dict__' and + attr != '__args__' and + attr != '__slots__' and + attr != '_get_protocol_attrs' and + attr != '__next_in_mro__' and + attr != '__parameters__' and + attr != '__origin__' and + attr != '__orig_bases__' and + attr != '__extra__' and + attr != '__tree_hash__' and + attr != '__module__'): + attrs.add(attr) + + return attrs + + +class _Protocol(metaclass=_ProtocolMeta): + """Internal base class for protocol classes. + + This implements a simple-minded structural issubclass check + (similar but more general than the one-offs in collections.abc + such as Hashable). + """ + + __slots__ = () + + _is_protocol = True + + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + +Hashable = collections_abc.Hashable # Not generic. + + +if hasattr(collections_abc, 'Awaitable'): + class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): + __slots__ = () + + __all__.append('Awaitable') + + +if hasattr(collections_abc, 'Coroutine'): + class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co], + extra=collections_abc.Coroutine): + __slots__ = () + + __all__.append('Coroutine') + + +if hasattr(collections_abc, 'AsyncIterable'): + + class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): + __slots__ = () + + class AsyncIterator(AsyncIterable[T_co], + extra=collections_abc.AsyncIterator): + __slots__ = () + + __all__.append('AsyncIterable') + __all__.append('AsyncIterator') + + +class Iterable(Generic[T_co], extra=collections_abc.Iterable): + __slots__ = () + + +class Iterator(Iterable[T_co], extra=collections_abc.Iterator): + __slots__ = () + + +class SupportsInt(_Protocol): + __slots__ = () + + @abstractmethod + def __int__(self) -> int: + pass + + +class SupportsFloat(_Protocol): + __slots__ = () + + @abstractmethod + def __float__(self) -> float: + pass + + +class SupportsComplex(_Protocol): + __slots__ = () + + @abstractmethod + def __complex__(self) -> complex: + pass + + +class SupportsBytes(_Protocol): + __slots__ = () + + @abstractmethod + def __bytes__(self) -> bytes: + pass + + +class SupportsAbs(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __abs__(self) -> T_co: + pass + + +class SupportsRound(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +if hasattr(collections_abc, 'Reversible'): + class Reversible(Iterable[T_co], extra=collections_abc.Reversible): + __slots__ = () +else: + class Reversible(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __reversed__(self) -> 'Iterator[T_co]': + pass + + +Sized = collections_abc.Sized # Not generic. + + +class Container(Generic[T_co], extra=collections_abc.Container): + __slots__ = () + + +if hasattr(collections_abc, 'Collection'): + class Collection(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Collection): + __slots__ = () + + __all__.append('Collection') + + +# Callable was defined earlier. + +if hasattr(collections_abc, 'Collection'): + class AbstractSet(Collection[T_co], + extra=collections_abc.Set): + __slots__ = () +else: + class AbstractSet(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Set): + __slots__ = () + + +class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): + __slots__ = () + + +# NOTE: It is only covariant in the value type. +if hasattr(collections_abc, 'Collection'): + class Mapping(Collection[KT], Generic[KT, VT_co], + extra=collections_abc.Mapping): + __slots__ = () +else: + class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], + extra=collections_abc.Mapping): + __slots__ = () + + +class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): + __slots__ = () + +if hasattr(collections_abc, 'Reversible'): + if hasattr(collections_abc, 'Collection'): + class Sequence(Reversible[T_co], Collection[T_co], + extra=collections_abc.Sequence): + __slots__ = () + else: + class Sequence(Sized, Reversible[T_co], Container[T_co], + extra=collections_abc.Sequence): + __slots__ = () +else: + class Sequence(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Sequence): + __slots__ = () + + +class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): + __slots__ = () + + +class ByteString(Sequence[int], extra=collections_abc.ByteString): + __slots__ = () + + +class List(list, MutableSequence[T], extra=list): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, List): + raise TypeError("Type List cannot be instantiated; " + "use list() instead") + return _generic_new(list, cls, *args, **kwds) + + +class Set(set, MutableSet[T], extra=set): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Set): + raise TypeError("Type Set cannot be instantiated; " + "use set() instead") + return _generic_new(set, cls, *args, **kwds) + + +class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, FrozenSet): + raise TypeError("Type FrozenSet cannot be instantiated; " + "use frozenset() instead") + return _generic_new(frozenset, cls, *args, **kwds) + + +class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): + __slots__ = () + + +class KeysView(MappingView[KT], AbstractSet[KT], + extra=collections_abc.KeysView): + __slots__ = () + + +class ItemsView(MappingView[Tuple[KT, VT_co]], + AbstractSet[Tuple[KT, VT_co]], + Generic[KT, VT_co], + extra=collections_abc.ItemsView): + __slots__ = () + + +class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): + __slots__ = () + + +if hasattr(contextlib, 'AbstractContextManager'): + class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): + __slots__ = () + __all__.append('ContextManager') + + +class Dict(dict, MutableMapping[KT, VT], extra=dict): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Dict): + raise TypeError("Type Dict cannot be instantiated; " + "use dict() instead") + return _generic_new(dict, cls, *args, **kwds) + +class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], + extra=collections.defaultdict): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, DefaultDict): + raise TypeError("Type DefaultDict cannot be instantiated; " + "use collections.defaultdict() instead") + return _generic_new(collections.defaultdict, cls, *args, **kwds) + +# Determine what base class to use for Generator. +if hasattr(collections_abc, 'Generator'): + # Sufficiently recent versions of 3.5 have a Generator ABC. + _G_base = collections_abc.Generator +else: + # Fall back on the exact type. + _G_base = types.GeneratorType + + +class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], + extra=_G_base): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Generator): + raise TypeError("Type Generator cannot be instantiated; " + "create a subclass instead") + return _generic_new(_G_base, cls, *args, **kwds) + + +# Internal type variable used for Type[]. +CT_co = TypeVar('CT_co', covariant=True, bound=type) + + +# This is not a real generic class. Don't use outside annotations. +class Type(Generic[CT_co], extra=type): + """A special construct usable to annotate class objects. + + For example, suppose we have the following classes:: + + class User: ... # Abstract base for User classes + class BasicUser(User): ... + class ProUser(User): ... + class TeamUser(User): ... + + And a function that takes a class argument that's a subclass of + User and returns an instance of the corresponding class:: + + U = TypeVar('U', bound=User) + def new_user(user_class: Type[U]) -> U: + user = user_class() + # (Here we could write the user object to a database) + return user + + joe = new_user(BasicUser) + + At this point the type checker knows that joe has type BasicUser. + """ + + __slots__ = () + + +def _make_nmtuple(name, types): + msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" + types = [(n, _type_check(t, msg)) for n, t in types] + nm_tpl = collections.namedtuple(name, [n for n, t in types]) + nm_tpl._field_types = dict(types) + try: + nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + return nm_tpl + + +_PY36 = sys.version_info[:2] >= (3, 6) + + +class NamedTupleMeta(type): + + def __new__(cls, typename, bases, ns): + if ns.get('_root', False): + return super().__new__(cls, typename, bases, ns) + if not _PY36: + raise TypeError("Class syntax for NamedTuple is only supported" + " in Python 3.6+") + types = ns.get('__annotations__', {}) + return _make_nmtuple(typename, types.items()) + +class NamedTuple(metaclass=NamedTupleMeta): + """Typed version of namedtuple. + + Usage in Python versions >= 3.6:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has one extra attribute: _field_types, + giving a dict mapping field names to types. (The field names + are in the _fields attribute, which is part of the namedtuple + API.) Alternative equivalent keyword syntax is also accepted:: + + Employee = NamedTuple('Employee', name=str, id=int) + + In Python versions <= 3.5 use:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ + _root = True + + def __new__(self, typename, fields=None, **kwargs): + if kwargs and not _PY36: + raise TypeError("Keyword syntax for NamedTuple is only supported" + " in Python 3.6+") + if fields is None: + fields = kwargs.items() + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + return _make_nmtuple(typename, fields) + + +def NewType(name, tp): + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy function that simply returns its argument. Usage:: + + UserId = NewType('UserId', int) + + def name_by_id(user_id: UserId) -> str: + ... + + UserId('user') # Fails type check + + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + + num = UserId(5) + 1 # type: int + """ + + def new_type(x): + return x + + new_type.__name__ = name + new_type.__supertype__ = tp + return new_type + + +# Python-version-specific alias (Python 2: unicode; Python 3: str) +Text = str + + +# Constant that's True when type checking, but False here. +TYPE_CHECKING = False + + +class IO(Generic[AnyStr]): + """Generic base class for TextIO and BinaryIO. + + This is an abstract, generic version of the return of open(). + + NOTE: This does not distinguish between the different possible + classes (text vs. binary, read vs. write vs. read/write, + append-only, unbuffered). The TextIO and BinaryIO subclasses + below capture the distinctions between text vs. binary, which is + pervasive in the interface; however we currently do not offer a + way to track the other distinctions in the type system. + """ + + __slots__ = () + + @abstractproperty + def mode(self) -> str: + pass + + @abstractproperty + def name(self) -> str: + pass + + @abstractmethod + def close(self) -> None: + pass + + @abstractmethod + def closed(self) -> bool: + pass + + @abstractmethod + def fileno(self) -> int: + pass + + @abstractmethod + def flush(self) -> None: + pass + + @abstractmethod + def isatty(self) -> bool: + pass + + @abstractmethod + def read(self, n: int = -1) -> AnyStr: + pass + + @abstractmethod + def readable(self) -> bool: + pass + + @abstractmethod + def readline(self, limit: int = -1) -> AnyStr: + pass + + @abstractmethod + def readlines(self, hint: int = -1) -> List[AnyStr]: + pass + + @abstractmethod + def seek(self, offset: int, whence: int = 0) -> int: + pass + + @abstractmethod + def seekable(self) -> bool: + pass + + @abstractmethod + def tell(self) -> int: + pass + + @abstractmethod + def truncate(self, size: int = None) -> int: + pass + + @abstractmethod + def writable(self) -> bool: + pass + + @abstractmethod + def write(self, s: AnyStr) -> int: + pass + + @abstractmethod + def writelines(self, lines: List[AnyStr]) -> None: + pass + + @abstractmethod + def __enter__(self) -> 'IO[AnyStr]': + pass + + @abstractmethod + def __exit__(self, type, value, traceback) -> None: + pass + + +class BinaryIO(IO[bytes]): + """Typed version of the return of open() in binary mode.""" + + __slots__ = () + + @abstractmethod + def write(self, s: Union[bytes, bytearray]) -> int: + pass + + @abstractmethod + def __enter__(self) -> 'BinaryIO': + pass + + +class TextIO(IO[str]): + """Typed version of the return of open() in text mode.""" + + __slots__ = () + + @abstractproperty + def buffer(self) -> BinaryIO: + pass + + @abstractproperty + def encoding(self) -> str: + pass + + @abstractproperty + def errors(self) -> Optional[str]: + pass + + @abstractproperty + def line_buffering(self) -> bool: + pass + + @abstractproperty + def newlines(self) -> Any: + pass + + @abstractmethod + def __enter__(self) -> 'TextIO': + pass + + +class io: + """Wrapper namespace for IO generic classes.""" + + __all__ = ['IO', 'TextIO', 'BinaryIO'] + IO = IO + TextIO = TextIO + BinaryIO = BinaryIO + +io.__name__ = __name__ + '.io' +sys.modules[io.__name__] = io + + +Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), + lambda p: p.pattern) +Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), + lambda m: m.re.pattern) + + +class re: + """Wrapper namespace for re type aliases.""" + + __all__ = ['Pattern', 'Match'] + Pattern = Pattern + Match = Match + +re.__name__ = __name__ + '.re' +sys.modules[re.__name__] = re diff --git a/typing_extensions/test_data/python-3.6.1/_collections_abc.py b/typing_extensions/test_data/python-3.6.1/_collections_abc.py new file mode 100644 index 00000000..b172f3f3 --- /dev/null +++ b/typing_extensions/test_data/python-3.6.1/_collections_abc.py @@ -0,0 +1,1007 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. + +Unit tests are in test_collections. +""" + +from abc import ABCMeta, abstractmethod +import sys + +__all__ = ["Awaitable", "Coroutine", + "AsyncIterable", "AsyncIterator", "AsyncGenerator", + "Hashable", "Iterable", "Iterator", "Generator", "Reversible", + "Sized", "Container", "Callable", "Collection", + "Set", "MutableSet", + "Mapping", "MutableMapping", + "MappingView", "KeysView", "ItemsView", "ValuesView", + "Sequence", "MutableSequence", + "ByteString", + ] + +# This module has been renamed from collections.abc to _collections_abc to +# speed up interpreter startup. Some of the types such as MutableMapping are +# required early but collections module imports a lot of other modules. +# See issue #19218 +__name__ = "collections.abc" + +# Private list of types that we want to register with the various ABCs +# so that they will pass tests like: +# it = iter(somebytearray) +# assert isinstance(it, Iterable) +# Note: in other implementations, these types might not be distinct +# and they may have their own implementation specific types that +# are not included on this list. +bytes_iterator = type(iter(b'')) +bytearray_iterator = type(iter(bytearray())) +#callable_iterator = ??? +dict_keyiterator = type(iter({}.keys())) +dict_valueiterator = type(iter({}.values())) +dict_itemiterator = type(iter({}.items())) +list_iterator = type(iter([])) +list_reverseiterator = type(iter(reversed([]))) +range_iterator = type(iter(range(0))) +longrange_iterator = type(iter(range(1 << 1000))) +set_iterator = type(iter(set())) +str_iterator = type(iter("")) +tuple_iterator = type(iter(())) +zip_iterator = type(iter(zip())) +## views ## +dict_keys = type({}.keys()) +dict_values = type({}.values()) +dict_items = type({}.items()) +## misc ## +mappingproxy = type(type.__dict__) +generator = type((lambda: (yield))()) +## coroutine ## +async def _coro(): pass +_coro = _coro() +coroutine = type(_coro) +_coro.close() # Prevent ResourceWarning +del _coro +## asynchronous generator ## +async def _ag(): yield +_ag = _ag() +async_generator = type(_ag) +del _ag + + +### ONE-TRICK PONIES ### + +def _check_methods(C, *methods): + mro = C.__mro__ + for method in methods: + for B in mro: + if method in B.__dict__: + if B.__dict__[method] is None: + return NotImplemented + break + else: + return NotImplemented + return True + +class Hashable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __hash__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Hashable: + return _check_methods(C, "__hash__") + return NotImplemented + + +class Awaitable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __await__(self): + yield + + @classmethod + def __subclasshook__(cls, C): + if cls is Awaitable: + return _check_methods(C, "__await__") + return NotImplemented + + +class Coroutine(Awaitable): + + __slots__ = () + + @abstractmethod + def send(self, value): + """Send a value into the coroutine. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the coroutine. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside coroutine. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("coroutine ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Coroutine: + return _check_methods(C, '__await__', 'send', 'throw', 'close') + return NotImplemented + + +Coroutine.register(coroutine) + + +class AsyncIterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __aiter__(self): + return AsyncIterator() + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterable: + return _check_methods(C, "__aiter__") + return NotImplemented + + +class AsyncIterator(AsyncIterable): + + __slots__ = () + + @abstractmethod + async def __anext__(self): + """Return the next item or raise StopAsyncIteration when exhausted.""" + raise StopAsyncIteration + + def __aiter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterator: + return _check_methods(C, "__anext__", "__aiter__") + return NotImplemented + + +class AsyncGenerator(AsyncIterator): + + __slots__ = () + + async def __anext__(self): + """Return the next item from the asynchronous generator. + When exhausted, raise StopAsyncIteration. + """ + return await self.asend(None) + + @abstractmethod + async def asend(self, value): + """Send a value into the asynchronous generator. + Return next yielded value or raise StopAsyncIteration. + """ + raise StopAsyncIteration + + @abstractmethod + async def athrow(self, typ, val=None, tb=None): + """Raise an exception in the asynchronous generator. + Return next yielded value or raise StopAsyncIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + async def aclose(self): + """Raise GeneratorExit inside coroutine. + """ + try: + await self.athrow(GeneratorExit) + except (GeneratorExit, StopAsyncIteration): + pass + else: + raise RuntimeError("asynchronous generator ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncGenerator: + return _check_methods(C, '__aiter__', '__anext__', + 'asend', 'athrow', 'aclose') + return NotImplemented + + +AsyncGenerator.register(async_generator) + + +class Iterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __iter__(self): + while False: + yield None + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterable: + return _check_methods(C, "__iter__") + return NotImplemented + + +class Iterator(Iterable): + + __slots__ = () + + @abstractmethod + def __next__(self): + 'Return the next item from the iterator. When exhausted, raise StopIteration' + raise StopIteration + + def __iter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is Iterator: + return _check_methods(C, '__iter__', '__next__') + return NotImplemented + +Iterator.register(bytes_iterator) +Iterator.register(bytearray_iterator) +#Iterator.register(callable_iterator) +Iterator.register(dict_keyiterator) +Iterator.register(dict_valueiterator) +Iterator.register(dict_itemiterator) +Iterator.register(list_iterator) +Iterator.register(list_reverseiterator) +Iterator.register(range_iterator) +Iterator.register(longrange_iterator) +Iterator.register(set_iterator) +Iterator.register(str_iterator) +Iterator.register(tuple_iterator) +Iterator.register(zip_iterator) + + +class Reversible(Iterable): + + __slots__ = () + + @abstractmethod + def __reversed__(self): + while False: + yield None + + @classmethod + def __subclasshook__(cls, C): + if cls is Reversible: + return _check_methods(C, "__reversed__", "__iter__") + return NotImplemented + + +class Generator(Iterator): + + __slots__ = () + + def __next__(self): + """Return the next item from the generator. + When exhausted, raise StopIteration. + """ + return self.send(None) + + @abstractmethod + def send(self, value): + """Send a value into the generator. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the generator. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside generator. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("generator ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Generator: + return _check_methods(C, '__iter__', '__next__', + 'send', 'throw', 'close') + return NotImplemented + +Generator.register(generator) + + +class Sized(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __len__(self): + return 0 + + @classmethod + def __subclasshook__(cls, C): + if cls is Sized: + return _check_methods(C, "__len__") + return NotImplemented + + +class Container(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __contains__(self, x): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Container: + return _check_methods(C, "__contains__") + return NotImplemented + +class Collection(Sized, Iterable, Container): + + __slots__ = () + + @classmethod + def __subclasshook__(cls, C): + if cls is Collection: + return _check_methods(C, "__len__", "__iter__", "__contains__") + return NotImplemented + +class Callable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __call__(self, *args, **kwds): + return False + + @classmethod + def __subclasshook__(cls, C): + if cls is Callable: + return _check_methods(C, "__call__") + return NotImplemented + + +### SETS ### + + +class Set(Collection): + + """A set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__ and __len__. + + To override the comparisons (presumably for speed, as the + semantics are fixed), redefine __le__ and __ge__, + then the other operations will automatically follow suit. + """ + + __slots__ = () + + def __le__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) > len(other): + return False + for elem in self: + if elem not in other: + return False + return True + + def __lt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) < len(other) and self.__le__(other) + + def __gt__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) > len(other) and self.__ge__(other) + + def __ge__(self, other): + if not isinstance(other, Set): + return NotImplemented + if len(self) < len(other): + return False + for elem in other: + if elem not in self: + return False + return True + + def __eq__(self, other): + if not isinstance(other, Set): + return NotImplemented + return len(self) == len(other) and self.__le__(other) + + @classmethod + def _from_iterable(cls, it): + '''Construct an instance of the class from any iterable input. + + Must override this method if the class constructor signature + does not accept an iterable for an input. + ''' + return cls(it) + + def __and__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + return self._from_iterable(value for value in other if value in self) + + __rand__ = __and__ + + def isdisjoint(self, other): + 'Return True if two sets have a null intersection.' + for value in other: + if value in self: + return False + return True + + def __or__(self, other): + if not isinstance(other, Iterable): + return NotImplemented + chain = (e for s in (self, other) for e in s) + return self._from_iterable(chain) + + __ror__ = __or__ + + def __sub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in self + if value not in other) + + def __rsub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in other + if value not in self) + + def __xor__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return (self - other) | (other - self) + + __rxor__ = __xor__ + + def _hash(self): + """Compute the hash value of a set. + + Note that we don't define __hash__: not all sets are hashable. + But if you define a hashable set type, its __hash__ should + call this function. + + This must be compatible __eq__. + + All sets ought to compare equal if they contain the same + elements, regardless of how they are implemented, and + regardless of the order of the elements; so there's not much + freedom for __eq__ or __hash__. We match the algorithm used + by the built-in frozenset type. + """ + MAX = sys.maxsize + MASK = 2 * MAX + 1 + n = len(self) + h = 1927868237 * (n + 1) + h &= MASK + for x in self: + hx = hash(x) + h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 + h &= MASK + h = h * 69069 + 907133923 + h &= MASK + if h > MAX: + h -= MASK + 1 + if h == -1: + h = 590923713 + return h + +Set.register(frozenset) + + +class MutableSet(Set): + """A mutable set is a finite, iterable container. + + This class provides concrete generic implementations of all + methods except for __contains__, __iter__, __len__, + add(), and discard(). + + To override the comparisons (presumably for speed, as the + semantics are fixed), all you have to do is redefine __le__ and + then the other operations will automatically follow suit. + """ + + __slots__ = () + + @abstractmethod + def add(self, value): + """Add an element.""" + raise NotImplementedError + + @abstractmethod + def discard(self, value): + """Remove an element. Do not raise an exception if absent.""" + raise NotImplementedError + + def remove(self, value): + """Remove an element. If not a member, raise a KeyError.""" + if value not in self: + raise KeyError(value) + self.discard(value) + + def pop(self): + """Return the popped value. Raise KeyError if empty.""" + it = iter(self) + try: + value = next(it) + except StopIteration: + raise KeyError + self.discard(value) + return value + + def clear(self): + """This is slow (creates N new iterators!) but effective.""" + try: + while True: + self.pop() + except KeyError: + pass + + def __ior__(self, it): + for value in it: + self.add(value) + return self + + def __iand__(self, it): + for value in (self - it): + self.discard(value) + return self + + def __ixor__(self, it): + if it is self: + self.clear() + else: + if not isinstance(it, Set): + it = self._from_iterable(it) + for value in it: + if value in self: + self.discard(value) + else: + self.add(value) + return self + + def __isub__(self, it): + if it is self: + self.clear() + else: + for value in it: + self.discard(value) + return self + +MutableSet.register(set) + + +### MAPPINGS ### + + +class Mapping(Collection): + + __slots__ = () + + """A Mapping is a generic container for associating key/value + pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __iter__, and __len__. + + """ + + @abstractmethod + def __getitem__(self, key): + raise KeyError + + def get(self, key, default=None): + 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' + try: + return self[key] + except KeyError: + return default + + def __contains__(self, key): + try: + self[key] + except KeyError: + return False + else: + return True + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return KeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return ItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return ValuesView(self) + + def __eq__(self, other): + if not isinstance(other, Mapping): + return NotImplemented + return dict(self.items()) == dict(other.items()) + + __reversed__ = None + +Mapping.register(mappingproxy) + + +class MappingView(Sized): + + __slots__ = '_mapping', + + def __init__(self, mapping): + self._mapping = mapping + + def __len__(self): + return len(self._mapping) + + def __repr__(self): + return '{0.__class__.__name__}({0._mapping!r})'.format(self) + + +class KeysView(MappingView, Set): + + __slots__ = () + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, key): + return key in self._mapping + + def __iter__(self): + yield from self._mapping + +KeysView.register(dict_keys) + + +class ItemsView(MappingView, Set): + + __slots__ = () + + @classmethod + def _from_iterable(self, it): + return set(it) + + def __contains__(self, item): + key, value = item + try: + v = self._mapping[key] + except KeyError: + return False + else: + return v is value or v == value + + def __iter__(self): + for key in self._mapping: + yield (key, self._mapping[key]) + +ItemsView.register(dict_items) + + +class ValuesView(MappingView): + + __slots__ = () + + def __contains__(self, value): + for key in self._mapping: + v = self._mapping[key] + if v is value or v == value: + return True + return False + + def __iter__(self): + for key in self._mapping: + yield self._mapping[key] + +ValuesView.register(dict_values) + + +class MutableMapping(Mapping): + + __slots__ = () + + """A MutableMapping is a generic container for associating + key/value pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __setitem__, __delitem__, + __iter__, and __len__. + + """ + + @abstractmethod + def __setitem__(self, key, value): + raise KeyError + + @abstractmethod + def __delitem__(self, key): + raise KeyError + + __marker = object() + + def pop(self, key, default=__marker): + '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + ''' + try: + value = self[key] + except KeyError: + if default is self.__marker: + raise + return default + else: + del self[key] + return value + + def popitem(self): + '''D.popitem() -> (k, v), remove and return some (key, value) pair + as a 2-tuple; but raise KeyError if D is empty. + ''' + try: + key = next(iter(self)) + except StopIteration: + raise KeyError + value = self[key] + del self[key] + return key, value + + def clear(self): + 'D.clear() -> None. Remove all items from D.' + try: + while True: + self.popitem() + except KeyError: + pass + + def update(*args, **kwds): + ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. + If E present and has a .keys() method, does: for k in E: D[k] = E[k] + If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v + In either case, this is followed by: for k, v in F.items(): D[k] = v + ''' + if not args: + raise TypeError("descriptor 'update' of 'MutableMapping' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('update expected at most 1 arguments, got %d' % + len(args)) + if args: + other = args[0] + if isinstance(other, Mapping): + for key in other: + self[key] = other[key] + elif hasattr(other, "keys"): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + def setdefault(self, key, default=None): + 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' + try: + return self[key] + except KeyError: + self[key] = default + return default + +MutableMapping.register(dict) + + +### SEQUENCES ### + + +class Sequence(Reversible, Collection): + + """All the operations on a read-only sequence. + + Concrete subclasses must override __new__ or __init__, + __getitem__, and __len__. + """ + + __slots__ = () + + @abstractmethod + def __getitem__(self, index): + raise IndexError + + def __iter__(self): + i = 0 + try: + while True: + v = self[i] + yield v + i += 1 + except IndexError: + return + + def __contains__(self, value): + for v in self: + if v is value or v == value: + return True + return False + + def __reversed__(self): + for i in reversed(range(len(self))): + yield self[i] + + def index(self, value, start=0, stop=None): + '''S.index(value, [start, [stop]]) -> integer -- return first index of value. + Raises ValueError if the value is not present. + ''' + if start is not None and start < 0: + start = max(len(self) + start, 0) + if stop is not None and stop < 0: + stop += len(self) + + i = start + while stop is None or i < stop: + try: + if self[i] == value: + return i + except IndexError: + break + i += 1 + raise ValueError + + def count(self, value): + 'S.count(value) -> integer -- return number of occurrences of value' + return sum(1 for v in self if v == value) + +Sequence.register(tuple) +Sequence.register(str) +Sequence.register(range) +Sequence.register(memoryview) + + +class ByteString(Sequence): + + """This unifies bytes and bytearray. + + XXX Should add all their methods. + """ + + __slots__ = () + +ByteString.register(bytes) +ByteString.register(bytearray) + + +class MutableSequence(Sequence): + + __slots__ = () + + """All the operations on a read-write sequence. + + Concrete subclasses must provide __new__ or __init__, + __getitem__, __setitem__, __delitem__, __len__, and insert(). + + """ + + @abstractmethod + def __setitem__(self, index, value): + raise IndexError + + @abstractmethod + def __delitem__(self, index): + raise IndexError + + @abstractmethod + def insert(self, index, value): + 'S.insert(index, value) -- insert value before index' + raise IndexError + + def append(self, value): + 'S.append(value) -- append value to the end of the sequence' + self.insert(len(self), value) + + def clear(self): + 'S.clear() -> None -- remove all items from S' + try: + while True: + self.pop() + except IndexError: + pass + + def reverse(self): + 'S.reverse() -- reverse *IN PLACE*' + n = len(self) + for i in range(n//2): + self[i], self[n-i-1] = self[n-i-1], self[i] + + def extend(self, values): + 'S.extend(iterable) -- extend sequence by appending elements from the iterable' + for v in values: + self.append(v) + + def pop(self, index=-1): + '''S.pop([index]) -> item -- remove and return item at index (default last). + Raise IndexError if list is empty or index is out of range. + ''' + v = self[index] + del self[index] + return v + + def remove(self, value): + '''S.remove(value) -- remove first occurrence of value. + Raise ValueError if the value is not present. + ''' + del self[self.index(value)] + + def __iadd__(self, values): + self.extend(values) + return self + +MutableSequence.register(list) +MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.6.1/abc.py b/typing_extensions/test_data/python-3.6.1/abc.py new file mode 100644 index 00000000..1cbf96a6 --- /dev/null +++ b/typing_extensions/test_data/python-3.6.1/abc.py @@ -0,0 +1,248 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Abstract Base Classes (ABCs) according to PEP 3119.""" + +from _weakrefset import WeakSet + + +def abstractmethod(funcobj): + """A decorator indicating abstract methods. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract methods are overridden. + The abstract methods can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractmethod + def my_abstract_method(self, ...): + ... + """ + funcobj.__isabstractmethod__ = True + return funcobj + + +class abstractclassmethod(classmethod): + """ + A decorator indicating abstract classmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractclassmethod + def my_abstract_classmethod(cls, ...): + ... + + 'abstractclassmethod' is deprecated. Use 'classmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractstaticmethod(staticmethod): + """ + A decorator indicating abstract staticmethods. + + Similar to abstractmethod. + + Usage: + + class C(metaclass=ABCMeta): + @abstractstaticmethod + def my_abstract_staticmethod(...): + ... + + 'abstractstaticmethod' is deprecated. Use 'staticmethod' with + 'abstractmethod' instead. + """ + + __isabstractmethod__ = True + + def __init__(self, callable): + callable.__isabstractmethod__ = True + super().__init__(callable) + + +class abstractproperty(property): + """ + A decorator indicating abstract properties. + + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract properties are overridden. + The abstract properties can be called using any of the normal + 'super' call mechanisms. + + Usage: + + class C(metaclass=ABCMeta): + @abstractproperty + def my_abstract_property(self): + ... + + This defines a read-only property; you can also define a read-write + abstract property using the 'long' form of property declaration: + + class C(metaclass=ABCMeta): + def getx(self): ... + def setx(self, value): ... + x = abstractproperty(getx, setx) + + 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' + instead. + """ + + __isabstractmethod__ = True + + +class ABCMeta(type): + + """Metaclass for defining Abstract Base Classes (ABCs). + + Use this metaclass to create an ABC. An ABC can be subclassed + directly, and then acts as a mix-in class. You can also register + unrelated concrete classes (even built-in classes) and unrelated + ABCs as 'virtual subclasses' -- these and their descendants will + be considered subclasses of the registering ABC by the built-in + issubclass() function, but the registering ABC won't show up in + their MRO (Method Resolution Order) nor will method + implementations defined by the registering ABC be callable (not + even via super()). + + """ + + # A global counter that is incremented each time a class is + # registered as a virtual subclass of anything. It forces the + # negative cache to be cleared before its next use. + # Note: this counter is private. Use `abc.get_cache_token()` for + # external code. + _abc_invalidation_counter = 0 + + def __new__(mcls, name, bases, namespace): + cls = super().__new__(mcls, name, bases, namespace) + # Compute set of abstract method names + abstracts = {name + for name, value in namespace.items() + if getattr(value, "__isabstractmethod__", False)} + for base in bases: + for name in getattr(base, "__abstractmethods__", set()): + value = getattr(cls, name, None) + if getattr(value, "__isabstractmethod__", False): + abstracts.add(name) + cls.__abstractmethods__ = frozenset(abstracts) + # Set up inheritance registry + cls._abc_registry = WeakSet() + cls._abc_cache = WeakSet() + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + return cls + + def register(cls, subclass): + """Register a virtual subclass of an ABC. + + Returns the subclass, to allow usage as a class decorator. + """ + if not isinstance(subclass, type): + raise TypeError("Can only register classes") + if issubclass(subclass, cls): + return subclass # Already a subclass + # Subtle: test for cycles *after* testing for "already a subclass"; + # this means we allow X.register(X) and interpret it as a no-op. + if issubclass(cls, subclass): + # This would create a cycle, which is bad for the algorithm below + raise RuntimeError("Refusing to create an inheritance cycle") + cls._abc_registry.add(subclass) + ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache + return subclass + + def _dump_registry(cls, file=None): + """Debug helper to print the ABC registry.""" + print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) + print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) + for name in sorted(cls.__dict__.keys()): + if name.startswith("_abc_"): + value = getattr(cls, name) + print("%s: %r" % (name, value), file=file) + + def __instancecheck__(cls, instance): + """Override for isinstance(instance, cls).""" + # Inline the cache checking + subclass = instance.__class__ + if subclass in cls._abc_cache: + return True + subtype = type(instance) + if subtype is subclass: + if (cls._abc_negative_cache_version == + ABCMeta._abc_invalidation_counter and + subclass in cls._abc_negative_cache): + return False + # Fall back to the subclass check. + return cls.__subclasscheck__(subclass) + return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) + + def __subclasscheck__(cls, subclass): + """Override for issubclass(subclass, cls).""" + # Check cache + if subclass in cls._abc_cache: + return True + # Check negative cache; may have to invalidate + if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: + # Invalidate the negative cache + cls._abc_negative_cache = WeakSet() + cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter + elif subclass in cls._abc_negative_cache: + return False + # Check the subclass hook + ok = cls.__subclasshook__(subclass) + if ok is not NotImplemented: + assert isinstance(ok, bool) + if ok: + cls._abc_cache.add(subclass) + else: + cls._abc_negative_cache.add(subclass) + return ok + # Check if it's a direct subclass + if cls in getattr(subclass, '__mro__', ()): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a registered class (recursive) + for rcls in cls._abc_registry: + if issubclass(subclass, rcls): + cls._abc_cache.add(subclass) + return True + # Check if it's a subclass of a subclass (recursive) + for scls in cls.__subclasses__(): + if issubclass(subclass, scls): + cls._abc_cache.add(subclass) + return True + # No dice; update negative cache + cls._abc_negative_cache.add(subclass) + return False + + +class ABC(metaclass=ABCMeta): + """Helper class that provides a standard way to create an ABC using + inheritance. + """ + pass + + +def get_cache_token(): + """Returns the current ABC cache token. + + The token is an opaque object (supporting equality testing) identifying the + current version of the ABC cache for virtual subclasses. The token changes + with every call to ``register()`` on any ABC. + """ + return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.6.1/collections/__init__.py b/typing_extensions/test_data/python-3.6.1/collections/__init__.py new file mode 100644 index 00000000..85b4c3c1 --- /dev/null +++ b/typing_extensions/test_data/python-3.6.1/collections/__init__.py @@ -0,0 +1,1246 @@ +'''This module implements specialized container datatypes providing +alternatives to Python's general purpose built-in containers, dict, +list, set, and tuple. + +* namedtuple factory function for creating tuple subclasses with named fields +* deque list-like container with fast appends and pops on either end +* ChainMap dict-like class for creating a single view of multiple mappings +* Counter dict subclass for counting hashable objects +* OrderedDict dict subclass that remembers the order entries were added +* defaultdict dict subclass that calls a factory function to supply missing values +* UserDict wrapper around dictionary objects for easier dict subclassing +* UserList wrapper around list objects for easier list subclassing +* UserString wrapper around string objects for easier string subclassing + +''' + +__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', + 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] + +# For backwards compatibility, continue to make the collections ABCs +# available through the collections module. +from _collections_abc import * +import _collections_abc +__all__ += _collections_abc.__all__ + +from operator import itemgetter as _itemgetter, eq as _eq +from keyword import iskeyword as _iskeyword +import sys as _sys +import heapq as _heapq +from _weakref import proxy as _proxy +from itertools import repeat as _repeat, chain as _chain, starmap as _starmap +from reprlib import recursive_repr as _recursive_repr + +try: + from _collections import deque +except ImportError: + pass +else: + MutableSequence.register(deque) + +try: + from _collections import defaultdict +except ImportError: + pass + + +################################################################################ +### OrderedDict +################################################################################ + +class _OrderedDictKeysView(KeysView): + + def __reversed__(self): + yield from reversed(self._mapping) + +class _OrderedDictItemsView(ItemsView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield (key, self._mapping[key]) + +class _OrderedDictValuesView(ValuesView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield self._mapping[key] + +class _Link(object): + __slots__ = 'prev', 'next', 'key', '__weakref__' + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as regular dictionaries. + + # The internal self.__map dict maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # The sentinel is in self.__hardroot with a weakref proxy in self.__root. + # The prev links are weakref proxies (to prevent circular references). + # Individual links are kept alive by the hard reference in self.__map. + # Those hard references disappear when a key is deleted from an OrderedDict. + + def __init__(*args, **kwds): + '''Initialize an ordered dictionary. The signature is the same as + regular dictionaries, but keyword arguments are not recommended because + their insertion order is arbitrary. + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'OrderedDict' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__hardroot = _Link() + self.__root = root = _proxy(self.__hardroot) + root.prev = root.next = root + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, + dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link at the end of the linked list, + # and the inherited dictionary is updated with the new key/value pair. + if key not in self: + self.__map[key] = link = Link() + root = self.__root + last = root.prev + link.prev, link.next, link.key = last, root, key + last.next = link + root.prev = proxy(link) + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which gets + # removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link = self.__map.pop(key) + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + link.prev = None + link.next = None + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + # Traverse the linked list in order. + root = self.__root + curr = root.next + while curr is not root: + yield curr.key + curr = curr.next + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + # Traverse the linked list in reverse order. + root = self.__root + curr = root.prev + while curr is not root: + yield curr.key + curr = curr.prev + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + root = self.__root + root.prev = root.next = root + self.__map.clear() + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root.prev + link_prev = link.prev + link_prev.next = root + root.prev = link_prev + else: + link = root.next + link_next = link.next + root.next = link_next + link_next.prev = root + key = link.key + del self.__map[key] + value = dict.pop(self, key) + return key, value + + def move_to_end(self, key, last=True): + '''Move an existing element to the end (or beginning if last==False). + + Raises KeyError if the element does not exist. + When last=True, acts like a fast version of self[key]=self.pop(key). + + ''' + link = self.__map[key] + link_prev = link.prev + link_next = link.next + soft_link = link_next.prev + link_prev.next = link_next + link_next.prev = link_prev + root = self.__root + if last: + last = root.prev + link.prev = last + link.next = root + root.prev = soft_link + last.next = link + else: + first = root.next + link.prev = root + link.next = first + first.prev = soft_link + root.next = link + + def __sizeof__(self): + sizeof = _sys.getsizeof + n = len(self) + 1 # number of links including root + size = sizeof(self.__dict__) # instance dictionary + size += sizeof(self.__map) * 2 # internal dict and inherited dict + size += sizeof(self.__hardroot) * n # link objects + size += sizeof(self.__root) * n # proxy objects + return size + + update = __update = MutableMapping.update + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return _OrderedDictKeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return _OrderedDictItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return _OrderedDictValuesView(self) + + __ne__ = MutableMapping.__ne__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding + value. If key is not found, d is returned if given, otherwise KeyError + is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + @_recursive_repr() + def __repr__(self): + 'od.__repr__() <==> repr(od)' + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self.items())) + + def __reduce__(self): + 'Return state information for pickling' + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + return self.__class__, (), inst_dict or None, None, iter(self.items()) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. + If not specified, the value defaults to None. + + ''' + self = cls() + for key in iterable: + self[key] = value + return self + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return dict.__eq__(self, other) and all(map(_eq, self, other)) + return dict.__eq__(self, other) + + +try: + from _collections import OrderedDict +except ImportError: + # Leave the pure Python version in place. + pass + + +################################################################################ +### namedtuple +################################################################################ + +_class_template = """\ +from builtins import property as _property, tuple as _tuple +from operator import itemgetter as _itemgetter +from collections import OrderedDict + +class {typename}(tuple): + '{typename}({arg_list})' + + __slots__ = () + + _fields = {field_names!r} + + def __new__(_cls, {arg_list}): + 'Create new instance of {typename}({arg_list})' + return _tuple.__new__(_cls, ({arg_list})) + + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + 'Make a new {typename} object from a sequence or iterable' + result = new(cls, iterable) + if len(result) != {num_fields:d}: + raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) + return result + + def _replace(_self, **kwds): + 'Return a new {typename} object replacing specified fields with new values' + result = _self._make(map(kwds.pop, {field_names!r}, _self)) + if kwds: + raise ValueError('Got unexpected field names: %r' % list(kwds)) + return result + + def __repr__(self): + 'Return a nicely formatted representation string' + return self.__class__.__name__ + '({repr_fmt})' % self + + def _asdict(self): + 'Return a new OrderedDict which maps field names to their values.' + return OrderedDict(zip(self._fields, self)) + + def __getnewargs__(self): + 'Return self as a plain tuple. Used by copy and pickle.' + return tuple(self) + +{field_defs} +""" + +_repr_template = '{name}=%r' + +_field_template = '''\ + {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') +''' + +def namedtuple(typename, field_names, *, verbose=False, rename=False, module=None): + """Returns a new subclass of tuple with named fields. + + >>> Point = namedtuple('Point', ['x', 'y']) + >>> Point.__doc__ # docstring for the new class + 'Point(x, y)' + >>> p = Point(11, y=22) # instantiate with positional args or keywords + >>> p[0] + p[1] # indexable like a plain tuple + 33 + >>> x, y = p # unpack like a regular tuple + >>> x, y + (11, 22) + >>> p.x + p.y # fields also accessible by name + 33 + >>> d = p._asdict() # convert to a dictionary + >>> d['x'] + 11 + >>> Point(**d) # convert from a dictionary + Point(x=11, y=22) + >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields + Point(x=100, y=22) + + """ + + # Validate the field names. At the user's option, either generate an error + # message or automatically replace the field name with a valid name. + if isinstance(field_names, str): + field_names = field_names.replace(',', ' ').split() + field_names = list(map(str, field_names)) + typename = str(typename) + if rename: + seen = set() + for index, name in enumerate(field_names): + if (not name.isidentifier() + or _iskeyword(name) + or name.startswith('_') + or name in seen): + field_names[index] = '_%d' % index + seen.add(name) + for name in [typename] + field_names: + if type(name) is not str: + raise TypeError('Type names and field names must be strings') + if not name.isidentifier(): + raise ValueError('Type names and field names must be valid ' + 'identifiers: %r' % name) + if _iskeyword(name): + raise ValueError('Type names and field names cannot be a ' + 'keyword: %r' % name) + seen = set() + for name in field_names: + if name.startswith('_') and not rename: + raise ValueError('Field names cannot start with an underscore: ' + '%r' % name) + if name in seen: + raise ValueError('Encountered duplicate field name: %r' % name) + seen.add(name) + + # Fill-in the class template + class_definition = _class_template.format( + typename = typename, + field_names = tuple(field_names), + num_fields = len(field_names), + arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], + repr_fmt = ', '.join(_repr_template.format(name=name) + for name in field_names), + field_defs = '\n'.join(_field_template.format(index=index, name=name) + for index, name in enumerate(field_names)) + ) + + # Execute the template string in a temporary namespace and support + # tracing utilities by setting a value for frame.f_globals['__name__'] + namespace = dict(__name__='namedtuple_%s' % typename) + exec(class_definition, namespace) + result = namespace[typename] + result._source = class_definition + if verbose: + print(result._source) + + # For pickling to work, the __module__ variable needs to be set to the frame + # where the named tuple is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython), or where the user has + # specified a particular module. + if module is None: + try: + module = _sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + if module is not None: + result.__module__ = module + + return result + + +######################################################################## +### Counter +######################################################################## + +def _count_elements(mapping, iterable): + 'Tally elements from the iterable.' + mapping_get = mapping.get + for elem in iterable: + mapping[elem] = mapping_get(elem, 0) + 1 + +try: # Load C helper function if available + from _collections import _count_elements +except ImportError: + pass + +class Counter(dict): + '''Dict subclass for counting hashable items. Sometimes called a bag + or multiset. Elements are stored as dictionary keys and their counts + are stored as dictionary values. + + >>> c = Counter('abcdeabcdabcaba') # count elements from a string + + >>> c.most_common(3) # three most common elements + [('a', 5), ('b', 4), ('c', 3)] + >>> sorted(c) # list all unique elements + ['a', 'b', 'c', 'd', 'e'] + >>> ''.join(sorted(c.elements())) # list elements with repetitions + 'aaaaabbbbcccdde' + >>> sum(c.values()) # total of all counts + 15 + + >>> c['a'] # count of letter 'a' + 5 + >>> for elem in 'shazam': # update counts from an iterable + ... c[elem] += 1 # by adding 1 to each element's count + >>> c['a'] # now there are seven 'a' + 7 + >>> del c['b'] # remove all 'b' + >>> c['b'] # now there are zero 'b' + 0 + + >>> d = Counter('simsalabim') # make another counter + >>> c.update(d) # add in the second counter + >>> c['a'] # now there are nine 'a' + 9 + + >>> c.clear() # empty the counter + >>> c + Counter() + + Note: If a count is set to zero or reduced to zero, it will remain + in the counter until the entry is deleted or the counter is cleared: + + >>> c = Counter('aaabbc') + >>> c['b'] -= 2 # reduce the count of 'b' by two + >>> c.most_common() # 'b' is still in, but its count is zero + [('a', 3), ('c', 1), ('b', 0)] + + ''' + # References: + # http://en.wikipedia.org/wiki/Multiset + # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html + # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm + # http://code.activestate.com/recipes/259174/ + # Knuth, TAOCP Vol. II section 4.6.3 + + def __init__(*args, **kwds): + '''Create a new, empty Counter object. And if given, count elements + from an input iterable. Or, initialize the count from another mapping + of elements to their counts. + + >>> c = Counter() # a new, empty counter + >>> c = Counter('gallahad') # a new counter from an iterable + >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping + >>> c = Counter(a=4, b=2) # a new counter from keyword args + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + super(Counter, self).__init__() + self.update(*args, **kwds) + + def __missing__(self, key): + 'The count of elements not in the Counter is zero.' + # Needed so that self[missing_item] does not raise KeyError + return 0 + + def most_common(self, n=None): + '''List the n most common elements and their counts from the most + common to the least. If n is None, then list all element counts. + + >>> Counter('abcdeabcdabcaba').most_common(3) + [('a', 5), ('b', 4), ('c', 3)] + + ''' + # Emulate Bag.sortedByCount from Smalltalk + if n is None: + return sorted(self.items(), key=_itemgetter(1), reverse=True) + return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) + + def elements(self): + '''Iterator over elements repeating each as many times as its count. + + >>> c = Counter('ABCABC') + >>> sorted(c.elements()) + ['A', 'A', 'B', 'B', 'C', 'C'] + + # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) + >>> product = 1 + >>> for factor in prime_factors.elements(): # loop over factors + ... product *= factor # and multiply them + >>> product + 1836 + + Note, if an element's count has been set to zero or is a negative + number, elements() will ignore it. + + ''' + # Emulate Bag.do from Smalltalk and Multiset.begin from C++. + return _chain.from_iterable(_starmap(_repeat, self.items())) + + # Override dict methods where necessary + + @classmethod + def fromkeys(cls, iterable, v=None): + # There is no equivalent method for counters because setting v=1 + # means that no element can have a count greater than one. + raise NotImplementedError( + 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + + def update(*args, **kwds): + '''Like dict.update() but add counts instead of replacing them. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.update('witch') # add elements from another iterable + >>> d = Counter('watch') + >>> c.update(d) # add elements from another counter + >>> c['h'] # four 'h' in which, witch, and watch + 4 + + ''' + # The regular dict.update() operation makes no sense here because the + # replace behavior results in the some of original untouched counts + # being mixed-in with all of the other counts for a mismash that + # doesn't have a straight-forward interpretation in most counting + # contexts. Instead, we implement straight-addition. Both the inputs + # and outputs are allowed to contain zero and negative counts. + + if not args: + raise TypeError("descriptor 'update' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + if isinstance(iterable, Mapping): + if self: + self_get = self.get + for elem, count in iterable.items(): + self[elem] = count + self_get(elem, 0) + else: + super(Counter, self).update(iterable) # fast path when counter is empty + else: + _count_elements(self, iterable) + if kwds: + self.update(kwds) + + def subtract(*args, **kwds): + '''Like dict.update() but subtracts counts instead of replacing them. + Counts can be reduced below zero. Both the inputs and outputs are + allowed to contain zero and negative counts. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.subtract('witch') # subtract elements from another iterable + >>> c.subtract(Counter('watch')) # subtract elements from another counter + >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch + 0 + >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch + -1 + + ''' + if not args: + raise TypeError("descriptor 'subtract' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + self_get = self.get + if isinstance(iterable, Mapping): + for elem, count in iterable.items(): + self[elem] = self_get(elem, 0) - count + else: + for elem in iterable: + self[elem] = self_get(elem, 0) - 1 + if kwds: + self.subtract(kwds) + + def copy(self): + 'Return a shallow copy.' + return self.__class__(self) + + def __reduce__(self): + return self.__class__, (dict(self),) + + def __delitem__(self, elem): + 'Like dict.__delitem__() but does not raise KeyError for missing values.' + if elem in self: + super().__delitem__(elem) + + def __repr__(self): + if not self: + return '%s()' % self.__class__.__name__ + try: + items = ', '.join(map('%r: %r'.__mod__, self.most_common())) + return '%s({%s})' % (self.__class__.__name__, items) + except TypeError: + # handle case where values are not orderable + return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) + + # Multiset-style mathematical operations discussed in: + # Knuth TAOCP Volume II section 4.6.3 exercise 19 + # and at http://en.wikipedia.org/wiki/Multiset + # + # Outputs guaranteed to only include positive counts. + # + # To strip negative and zero counts, add-in an empty counter: + # c += Counter() + + def __add__(self, other): + '''Add counts from two counters. + + >>> Counter('abbb') + Counter('bcc') + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count + other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __sub__(self, other): + ''' Subtract count, but keep only results with positive counts. + + >>> Counter('abbbc') - Counter('bccd') + Counter({'b': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count - other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count < 0: + result[elem] = 0 - count + return result + + def __or__(self, other): + '''Union is the maximum of value in either of the input counters. + + >>> Counter('abbb') | Counter('bcc') + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = other_count if count < other_count else count + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __and__(self, other): + ''' Intersection is the minimum of corresponding counts. + + >>> Counter('abbb') & Counter('bcc') + Counter({'b': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = count if count < other_count else other_count + if newcount > 0: + result[elem] = newcount + return result + + def __pos__(self): + 'Adds an empty counter, effectively stripping negative and zero counts' + result = Counter() + for elem, count in self.items(): + if count > 0: + result[elem] = count + return result + + def __neg__(self): + '''Subtracts from an empty counter. Strips positive and zero counts, + and flips the sign on negative counts. + + ''' + result = Counter() + for elem, count in self.items(): + if count < 0: + result[elem] = 0 - count + return result + + def _keep_positive(self): + '''Internal method to strip elements with a negative or zero count''' + nonpositive = [elem for elem, count in self.items() if not count > 0] + for elem in nonpositive: + del self[elem] + return self + + def __iadd__(self, other): + '''Inplace add from another counter, keeping only positive counts. + + >>> c = Counter('abbb') + >>> c += Counter('bcc') + >>> c + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] += count + return self._keep_positive() + + def __isub__(self, other): + '''Inplace subtract counter, but keep only results with positive counts. + + >>> c = Counter('abbbc') + >>> c -= Counter('bccd') + >>> c + Counter({'b': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] -= count + return self._keep_positive() + + def __ior__(self, other): + '''Inplace union is the maximum of value from either counter. + + >>> c = Counter('abbb') + >>> c |= Counter('bcc') + >>> c + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + for elem, other_count in other.items(): + count = self[elem] + if other_count > count: + self[elem] = other_count + return self._keep_positive() + + def __iand__(self, other): + '''Inplace intersection is the minimum of corresponding counts. + + >>> c = Counter('abbb') + >>> c &= Counter('bcc') + >>> c + Counter({'b': 1}) + + ''' + for elem, count in self.items(): + other_count = other[elem] + if other_count < count: + self[elem] = other_count + return self._keep_positive() + + +######################################################################## +### ChainMap +######################################################################## + +class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + be accessed or updated using the *maps* attribute. There is no other + state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self, m=None): # like Django's Context.push() + '''New ChainMap with a new map followed by all previous maps. + If no map is provided, an empty dict is used. + ''' + if m is None: + m = {} + return self.__class__(m, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +################################################################################ +### UserDict +################################################################################ + +class UserDict(MutableMapping): + + # Start by filling-out the abstract methods + def __init__(*args, **kwargs): + if not args: + raise TypeError("descriptor '__init__' of 'UserDict' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + if args: + dict = args[0] + elif 'dict' in kwargs: + dict = kwargs.pop('dict') + import warnings + warnings.warn("Passing 'dict' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + dict = None + self.data = {} + if dict is not None: + self.update(dict) + if len(kwargs): + self.update(kwargs) + def __len__(self): return len(self.data) + def __getitem__(self, key): + if key in self.data: + return self.data[key] + if hasattr(self.__class__, "__missing__"): + return self.__class__.__missing__(self, key) + raise KeyError(key) + def __setitem__(self, key, item): self.data[key] = item + def __delitem__(self, key): del self.data[key] + def __iter__(self): + return iter(self.data) + + # Modify __contains__ to work correctly when __missing__ is present + def __contains__(self, key): + return key in self.data + + # Now, add the methods in dicts but not in MutableMapping + def __repr__(self): return repr(self.data) + def copy(self): + if self.__class__ is UserDict: + return UserDict(self.data.copy()) + import copy + data = self.data + try: + self.data = {} + c = copy.copy(self) + finally: + self.data = data + c.update(self) + return c + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + + +################################################################################ +### UserList +################################################################################ + +class UserList(MutableSequence): + """A more or less complete user-defined wrapper around list objects.""" + def __init__(self, initlist=None): + self.data = [] + if initlist is not None: + # XXX should this accept an arbitrary sequence? + if type(initlist) == type(self.data): + self.data[:] = initlist + elif isinstance(initlist, UserList): + self.data[:] = initlist.data[:] + else: + self.data = list(initlist) + def __repr__(self): return repr(self.data) + def __lt__(self, other): return self.data < self.__cast(other) + def __le__(self, other): return self.data <= self.__cast(other) + def __eq__(self, other): return self.data == self.__cast(other) + def __gt__(self, other): return self.data > self.__cast(other) + def __ge__(self, other): return self.data >= self.__cast(other) + def __cast(self, other): + return other.data if isinstance(other, UserList) else other + def __contains__(self, item): return item in self.data + def __len__(self): return len(self.data) + def __getitem__(self, i): return self.data[i] + def __setitem__(self, i, item): self.data[i] = item + def __delitem__(self, i): del self.data[i] + def __add__(self, other): + if isinstance(other, UserList): + return self.__class__(self.data + other.data) + elif isinstance(other, type(self.data)): + return self.__class__(self.data + other) + return self.__class__(self.data + list(other)) + def __radd__(self, other): + if isinstance(other, UserList): + return self.__class__(other.data + self.data) + elif isinstance(other, type(self.data)): + return self.__class__(other + self.data) + return self.__class__(list(other) + self.data) + def __iadd__(self, other): + if isinstance(other, UserList): + self.data += other.data + elif isinstance(other, type(self.data)): + self.data += other + else: + self.data += list(other) + return self + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __imul__(self, n): + self.data *= n + return self + def append(self, item): self.data.append(item) + def insert(self, i, item): self.data.insert(i, item) + def pop(self, i=-1): return self.data.pop(i) + def remove(self, item): self.data.remove(item) + def clear(self): self.data.clear() + def copy(self): return self.__class__(self) + def count(self, item): return self.data.count(item) + def index(self, item, *args): return self.data.index(item, *args) + def reverse(self): self.data.reverse() + def sort(self, *args, **kwds): self.data.sort(*args, **kwds) + def extend(self, other): + if isinstance(other, UserList): + self.data.extend(other.data) + else: + self.data.extend(other) + + + +################################################################################ +### UserString +################################################################################ + +class UserString(Sequence): + def __init__(self, seq): + if isinstance(seq, str): + self.data = seq + elif isinstance(seq, UserString): + self.data = seq.data[:] + else: + self.data = str(seq) + def __str__(self): return str(self.data) + def __repr__(self): return repr(self.data) + def __int__(self): return int(self.data) + def __float__(self): return float(self.data) + def __complex__(self): return complex(self.data) + def __hash__(self): return hash(self.data) + def __getnewargs__(self): + return (self.data[:],) + + def __eq__(self, string): + if isinstance(string, UserString): + return self.data == string.data + return self.data == string + def __lt__(self, string): + if isinstance(string, UserString): + return self.data < string.data + return self.data < string + def __le__(self, string): + if isinstance(string, UserString): + return self.data <= string.data + return self.data <= string + def __gt__(self, string): + if isinstance(string, UserString): + return self.data > string.data + return self.data > string + def __ge__(self, string): + if isinstance(string, UserString): + return self.data >= string.data + return self.data >= string + + def __contains__(self, char): + if isinstance(char, UserString): + char = char.data + return char in self.data + + def __len__(self): return len(self.data) + def __getitem__(self, index): return self.__class__(self.data[index]) + def __add__(self, other): + if isinstance(other, UserString): + return self.__class__(self.data + other.data) + elif isinstance(other, str): + return self.__class__(self.data + other) + return self.__class__(self.data + str(other)) + def __radd__(self, other): + if isinstance(other, str): + return self.__class__(other + self.data) + return self.__class__(str(other) + self.data) + def __mul__(self, n): + return self.__class__(self.data*n) + __rmul__ = __mul__ + def __mod__(self, args): + return self.__class__(self.data % args) + def __rmod__(self, format): + return self.__class__(format % args) + + # the following methods are defined in alphabetical order: + def capitalize(self): return self.__class__(self.data.capitalize()) + def casefold(self): + return self.__class__(self.data.casefold()) + def center(self, width, *args): + return self.__class__(self.data.center(width, *args)) + def count(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.count(sub, start, end) + def encode(self, encoding=None, errors=None): # XXX improve this? + if encoding: + if errors: + return self.__class__(self.data.encode(encoding, errors)) + return self.__class__(self.data.encode(encoding)) + return self.__class__(self.data.encode()) + def endswith(self, suffix, start=0, end=_sys.maxsize): + return self.data.endswith(suffix, start, end) + def expandtabs(self, tabsize=8): + return self.__class__(self.data.expandtabs(tabsize)) + def find(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.find(sub, start, end) + def format(self, *args, **kwds): + return self.data.format(*args, **kwds) + def format_map(self, mapping): + return self.data.format_map(mapping) + def index(self, sub, start=0, end=_sys.maxsize): + return self.data.index(sub, start, end) + def isalpha(self): return self.data.isalpha() + def isalnum(self): return self.data.isalnum() + def isdecimal(self): return self.data.isdecimal() + def isdigit(self): return self.data.isdigit() + def isidentifier(self): return self.data.isidentifier() + def islower(self): return self.data.islower() + def isnumeric(self): return self.data.isnumeric() + def isprintable(self): return self.data.isprintable() + def isspace(self): return self.data.isspace() + def istitle(self): return self.data.istitle() + def isupper(self): return self.data.isupper() + def join(self, seq): return self.data.join(seq) + def ljust(self, width, *args): + return self.__class__(self.data.ljust(width, *args)) + def lower(self): return self.__class__(self.data.lower()) + def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) + maketrans = str.maketrans + def partition(self, sep): + return self.data.partition(sep) + def replace(self, old, new, maxsplit=-1): + if isinstance(old, UserString): + old = old.data + if isinstance(new, UserString): + new = new.data + return self.__class__(self.data.replace(old, new, maxsplit)) + def rfind(self, sub, start=0, end=_sys.maxsize): + if isinstance(sub, UserString): + sub = sub.data + return self.data.rfind(sub, start, end) + def rindex(self, sub, start=0, end=_sys.maxsize): + return self.data.rindex(sub, start, end) + def rjust(self, width, *args): + return self.__class__(self.data.rjust(width, *args)) + def rpartition(self, sep): + return self.data.rpartition(sep) + def rstrip(self, chars=None): + return self.__class__(self.data.rstrip(chars)) + def split(self, sep=None, maxsplit=-1): + return self.data.split(sep, maxsplit) + def rsplit(self, sep=None, maxsplit=-1): + return self.data.rsplit(sep, maxsplit) + def splitlines(self, keepends=False): return self.data.splitlines(keepends) + def startswith(self, prefix, start=0, end=_sys.maxsize): + return self.data.startswith(prefix, start, end) + def strip(self, chars=None): return self.__class__(self.data.strip(chars)) + def swapcase(self): return self.__class__(self.data.swapcase()) + def title(self): return self.__class__(self.data.title()) + def translate(self, *args): + return self.__class__(self.data.translate(*args)) + def upper(self): return self.__class__(self.data.upper()) + def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.6.1/collections/abc.py b/typing_extensions/test_data/python-3.6.1/collections/abc.py new file mode 100644 index 00000000..891600d1 --- /dev/null +++ b/typing_extensions/test_data/python-3.6.1/collections/abc.py @@ -0,0 +1,2 @@ +from _collections_abc import * +from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.6.1/typing.py b/typing_extensions/test_data/python-3.6.1/typing.py new file mode 100644 index 00000000..9a0f4909 --- /dev/null +++ b/typing_extensions/test_data/python-3.6.1/typing.py @@ -0,0 +1,2335 @@ +import abc +from abc import abstractmethod, abstractproperty +import collections +import contextlib +import functools +import re as stdlib_re # Avoid confusion with the re we export. +import sys +import types +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. +try: + from types import SlotWrapperType, MethodWrapperType, MethodDescriptorType +except ImportError: + SlotWrapperType = type(object.__init__) + MethodWrapperType = type(object().__str__) + MethodDescriptorType = type(str.join) + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'Any', + 'Callable', + 'ClassVar', + 'Generic', + 'Optional', + 'Tuple', + 'Type', + 'TypeVar', + 'Union', + + # ABCs (from collections.abc). + 'AbstractSet', # collections.abc.Set. + 'GenericMeta', # subclass of abc.ABCMeta and a metaclass + # for 'Generic' and ABCs below. + 'ByteString', + 'Container', + 'Hashable', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'Mapping', + 'MappingView', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Sequence', + 'Sized', + 'ValuesView', + # The following are added depending on presence + # of their non-generic counterparts in stdlib: + # Awaitable, + # AsyncIterator, + # AsyncIterable, + # Coroutine, + # Collection, + # ContextManager, + # AsyncGenerator, + + # Structural checks, a.k.a. protocols. + 'Reversible', + 'SupportsAbs', + 'SupportsFloat', + 'SupportsInt', + 'SupportsRound', + + # Concrete collection types. + 'Counter', + 'Deque', + 'Dict', + 'DefaultDict', + 'List', + 'Set', + 'FrozenSet', + 'NamedTuple', # Not really a type. + 'Generator', + + # One-off things. + 'AnyStr', + 'cast', + 'get_type_hints', + 'NewType', + 'no_type_check', + 'no_type_check_decorator', + 'overload', + 'Text', + 'TYPE_CHECKING', +] + +# The pseudo-submodules 're' and 'io' are part of the public +# namespace, but excluded from __all__ because they might stomp on +# legitimate imports of those modules. + + +def _qualname(x): + if sys.version_info[:2] >= (3, 3): + return x.__qualname__ + else: + # Fall back to just name. + return x.__name__ + + +def _trim_name(nm): + whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase') + if nm.startswith('_') and nm not in whitelist: + nm = nm[1:] + return nm + + +class TypingMeta(type): + """Metaclass for most types defined in typing module + (not a part of public API). + + This overrides __new__() to require an extra keyword parameter + '_root', which serves as a guard against naive subclassing of the + typing classes. Any legitimate class defined using a metaclass + derived from TypingMeta must pass _root=True. + + This also defines a dummy constructor (all the work for most typing + constructs is done in __new__) and a nicer repr(). + """ + + _is_protocol = False + + def __new__(cls, name, bases, namespace, *, _root=False): + if not _root: + raise TypeError("Cannot subclass %s" % + (', '.join(map(_type_repr, bases)) or '()')) + return super().__new__(cls, name, bases, namespace) + + def __init__(self, *args, **kwds): + pass + + def _eval_type(self, globalns, localns): + """Override this in subclasses to interpret forward references. + + For example, List['C'] is internally stored as + List[_ForwardRef('C')], which should evaluate to List[C], + where C is an object found in globalns or localns (searching + localns first, of course). + """ + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + qname = _trim_name(_qualname(self)) + return '%s.%s' % (self.__module__, qname) + + +class _TypingBase(metaclass=TypingMeta, _root=True): + """Internal indicator of special typing constructs.""" + + __slots__ = ('__weakref__',) + + def __init__(self, *args, **kwds): + pass + + def __new__(cls, *args, **kwds): + """Constructor. + + This only exists to give a better error message in case + someone tries to subclass a special typing object (not a good idea). + """ + if (len(args) == 3 and + isinstance(args[0], str) and + isinstance(args[1], tuple)): + # Close enough. + raise TypeError("Cannot subclass %r" % cls) + return super().__new__(cls) + + # Things that are not classes also need these. + def _eval_type(self, globalns, localns): + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + cls = type(self) + qname = _trim_name(_qualname(cls)) + return '%s.%s' % (cls.__module__, qname) + + def __call__(self, *args, **kwds): + raise TypeError("Cannot instantiate %r" % type(self)) + + +class _FinalTypingBase(_TypingBase, _root=True): + """Internal mix-in class to prevent instantiation. + + Prevents instantiation unless _root=True is given in class call. + It is used to create pseudo-singleton instances Any, Union, Optional, etc. + """ + + __slots__ = () + + def __new__(cls, *args, _root=False, **kwds): + self = super().__new__(cls, *args, **kwds) + if _root is True: + return self + raise TypeError("Cannot instantiate %r" % cls) + + def __reduce__(self): + return _trim_name(type(self).__name__) + + +class _ForwardRef(_TypingBase, _root=True): + """Internal wrapper to hold a forward reference.""" + + __slots__ = ('__forward_arg__', '__forward_code__', + '__forward_evaluated__', '__forward_value__') + + def __init__(self, arg): + super().__init__(arg) + if not isinstance(arg, str): + raise TypeError('Forward reference must be a string -- got %r' % (arg,)) + try: + code = compile(arg, '', 'eval') + except SyntaxError: + raise SyntaxError('Forward reference must be an expression -- got %r' % + (arg,)) + self.__forward_arg__ = arg + self.__forward_code__ = code + self.__forward_evaluated__ = False + self.__forward_value__ = None + + def _eval_type(self, globalns, localns): + if not self.__forward_evaluated__ or localns is not globalns: + if globalns is None and localns is None: + globalns = localns = {} + elif globalns is None: + globalns = localns + elif localns is None: + localns = globalns + self.__forward_value__ = _type_check( + eval(self.__forward_code__, globalns, localns), + "Forward references must evaluate to types.") + self.__forward_evaluated__ = True + return self.__forward_value__ + + def __eq__(self, other): + if not isinstance(other, _ForwardRef): + return NotImplemented + return (self.__forward_arg__ == other.__forward_arg__ and + self.__forward_value__ == other.__forward_value__) + + def __hash__(self): + return hash((self.__forward_arg__, self.__forward_value__)) + + def __instancecheck__(self, obj): + raise TypeError("Forward references cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Forward references cannot be used with issubclass().") + + def __repr__(self): + return '_ForwardRef(%r)' % (self.__forward_arg__,) + + +class _TypeAlias(_TypingBase, _root=True): + """Internal helper class for defining generic variants of concrete types. + + Note that this is not a type; let's call it a pseudo-type. It cannot + be used in instance and subclass checks in parameterized form, i.e. + ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning + ``False``. + """ + + __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') + + def __init__(self, name, type_var, impl_type, type_checker): + """Initializer. + + Args: + name: The name, e.g. 'Pattern'. + type_var: The type parameter, e.g. AnyStr, or the + specific type, e.g. str. + impl_type: The implementation type. + type_checker: Function that takes an impl_type instance. + and returns a value that should be a type_var instance. + """ + assert isinstance(name, str), repr(name) + assert isinstance(impl_type, type), repr(impl_type) + assert not isinstance(impl_type, TypingMeta), repr(impl_type) + assert isinstance(type_var, (type, _TypingBase)), repr(type_var) + self.name = name + self.type_var = type_var + self.impl_type = impl_type + self.type_checker = type_checker + + def __repr__(self): + return "%s[%s]" % (self.name, _type_repr(self.type_var)) + + def __getitem__(self, parameter): + if not isinstance(self.type_var, TypeVar): + raise TypeError("%s cannot be further parameterized." % self) + if self.type_var.__constraints__ and isinstance(parameter, type): + if not issubclass(parameter, self.type_var.__constraints__): + raise TypeError("%s is not a valid substitution for %s." % + (parameter, self.type_var)) + if isinstance(parameter, TypeVar) and parameter is not self.type_var: + raise TypeError("%s cannot be re-parameterized." % self) + return self.__class__(self.name, parameter, + self.impl_type, self.type_checker) + + def __eq__(self, other): + if not isinstance(other, _TypeAlias): + return NotImplemented + return self.name == other.name and self.type_var == other.type_var + + def __hash__(self): + return hash((self.name, self.type_var)) + + def __instancecheck__(self, obj): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with isinstance().") + return isinstance(obj, self.impl_type) + + def __subclasscheck__(self, cls): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with issubclass().") + return issubclass(cls, self.impl_type) + + +def _get_type_vars(types, tvars): + for t in types: + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + t._get_type_vars(tvars) + + +def _type_vars(types): + tvars = [] + _get_type_vars(types, tvars) + return tuple(tvars) + + +def _eval_type(t, globalns, localns): + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + return t._eval_type(globalns, localns) + return t + + +def _type_check(arg, msg): + """Check that the argument is a type, and return it (internal helper). + + As a special case, accept None and return type(None) instead. + Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. + + The msg argument is a human-readable error message, e.g. + + "Union[arg, ...]: arg should be a type." + + We append the repr() of the actual value (truncated to 100 chars). + """ + if arg is None: + return type(None) + if isinstance(arg, str): + arg = _ForwardRef(arg) + if ( + isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or + not isinstance(arg, (type, _TypingBase)) and not callable(arg) + ): + raise TypeError(msg + " Got %.100r." % (arg,)) + # Bare Union etc. are not valid as type arguments + if ( + type(arg).__name__ in ('_Union', '_Optional') and + not getattr(arg, '__origin__', None) or + isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol) + ): + raise TypeError("Plain %s is not valid as type argument" % arg) + return arg + + +def _type_repr(obj): + """Return the repr() of an object, special-casing types (internal helper). + + If obj is a type, we return a shorter version than the default + type.__repr__, based on the module and qualified name, which is + typically enough to uniquely identify a type. For everything + else, we fall back on repr(obj). + """ + if isinstance(obj, type) and not isinstance(obj, TypingMeta): + if obj.__module__ == 'builtins': + return _qualname(obj) + return '%s.%s' % (obj.__module__, _qualname(obj)) + if obj is ...: + return('...') + if isinstance(obj, types.FunctionType): + return obj.__name__ + return repr(obj) + + +class _Any(_FinalTypingBase, _root=True): + """Special type indicating an unconstrained type. + + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + or class checks. + """ + + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("Any cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Any cannot be used with issubclass().") + + +Any = _Any(_root=True) + + +class TypeVar(_TypingBase, _root=True): + """Type variable. + + Usage:: + + T = TypeVar('T') # Can be anything + A = TypeVar('A', str, bytes) # Must be str or bytes + + Type variables exist primarily for the benefit of static type + checkers. They serve as the parameters for generic types as well + as for generic function definitions. See class Generic for more + information on generic types. Generic functions work as follows: + + def repeat(x: T, n: int) -> List[T]: + '''Return a list containing n references to x.''' + return [x]*n + + def longest(x: A, y: A) -> A: + '''Return the longest of two strings.''' + return x if len(x) >= len(y) else y + + The latter example's signature is essentially the overloading + of (str, str) -> str and (bytes, bytes) -> bytes. Also note + that if the arguments are instances of some subclass of str, + the return type is still plain str. + + At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. + + Type variables defined with covariant=True or contravariant=True + can be used do declare covariant or contravariant generic types. + See PEP 484 for more details. By default generic types are invariant + in all type variables. + + Type variables can be introspected. e.g.: + + T.__name__ == 'T' + T.__constraints__ == () + T.__covariant__ == False + T.__contravariant__ = False + A.__constraints__ == (str, bytes) + """ + + __slots__ = ('__name__', '__bound__', '__constraints__', + '__covariant__', '__contravariant__') + + def __init__(self, name, *constraints, bound=None, + covariant=False, contravariant=False): + super().__init__(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant) + self.__name__ = name + if covariant and contravariant: + raise ValueError("Bivariant types are not supported.") + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if constraints and bound is not None: + raise TypeError("Constraints cannot be combined with bound=...") + if constraints and len(constraints) == 1: + raise TypeError("A single constraint is not allowed") + msg = "TypeVar(name, constraint, ...): constraints must be types." + self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) + if bound: + self.__bound__ = _type_check(bound, "Bound must be a type.") + else: + self.__bound__ = None + + def _get_type_vars(self, tvars): + if self not in tvars: + tvars.append(self) + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __instancecheck__(self, instance): + raise TypeError("Type variables cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Type variables cannot be used with issubclass().") + + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = TypeVar('T') # Any type. +KT = TypeVar('KT') # Key type. +VT = TypeVar('VT') # Value type. +T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. +V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. +VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. +T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +# A useful type variable with constraints. This represents string types. +# (This one *is* for export!) +AnyStr = TypeVar('AnyStr', bytes, str) + + +def _replace_arg(arg, tvars, args): + """An internal helper function: replace arg if it is a type variable + found in tvars with corresponding substitution from args or + with corresponding substitution sub-tree if arg is a generic type. + """ + + if tvars is None: + tvars = [] + if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)): + return arg._subs_tree(tvars, args) + if isinstance(arg, TypeVar): + for i, tvar in enumerate(tvars): + if arg == tvar: + return args[i] + return arg + + +# Special typing constructs Union, Optional, Generic, Callable and Tuple +# use three special attributes for internal bookkeeping of generic types: +# * __parameters__ is a tuple of unique free type parameters of a generic +# type, for example, Dict[T, T].__parameters__ == (T,); +# * __origin__ keeps a reference to a type that was subscripted, +# e.g., Union[T, int].__origin__ == Union; +# * __args__ is a tuple of all arguments used in subscripting, +# e.g., Dict[T, int].__args__ == (T, int). + + +def _subs_tree(cls, tvars=None, args=None): + """An internal helper function: calculate substitution tree + for generic cls after replacing its type parameters with + substitutions in tvars -> args (if any). + Repeat the same following __origin__'s. + + Return a list of arguments with all possible substitutions + performed. Arguments that are generic classes themselves are represented + as tuples (so that no new classes are created by this function). + For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] + """ + + if cls.__origin__ is None: + return cls + # Make of chain of origins (i.e. cls -> cls.__origin__) + current = cls.__origin__ + orig_chain = [] + while current.__origin__ is not None: + orig_chain.append(current) + current = current.__origin__ + # Replace type variables in __args__ if asked ... + tree_args = [] + for arg in cls.__args__: + tree_args.append(_replace_arg(arg, tvars, args)) + # ... then continue replacing down the origin chain. + for ocls in orig_chain: + new_tree_args = [] + for arg in ocls.__args__: + new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) + tree_args = new_tree_args + return tree_args + + +def _remove_dups_flatten(parameters): + """An internal helper for Union creation and substitution: flatten Union's + among parameters, then remove duplicates and strict subclasses. + """ + + # Flatten out Union[Union[...], ...]. + params = [] + for p in parameters: + if isinstance(p, _Union) and p.__origin__ is Union: + params.extend(p.__args__) + elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: + params.extend(p[1:]) + else: + params.append(p) + # Weed out strict duplicates, preserving the first of each occurrence. + all_params = set(params) + if len(all_params) < len(params): + new_params = [] + for t in params: + if t in all_params: + new_params.append(t) + all_params.remove(t) + params = new_params + assert not all_params, all_params + # Weed out subclasses. + # E.g. Union[int, Employee, Manager] == Union[int, Employee]. + # If object is present it will be sole survivor among proper classes. + # Never discard type variables. + # (In particular, Union[str, AnyStr] != AnyStr.) + all_params = set(params) + for t1 in params: + if not isinstance(t1, type): + continue + if any(isinstance(t2, type) and issubclass(t1, t2) + for t2 in all_params - {t1} + if not (isinstance(t2, GenericMeta) and + t2.__origin__ is not None)): + all_params.remove(t1) + return tuple(t for t in params if t in all_params) + + +def _check_generic(cls, parameters): + # Check correct count for parameters of a generic cls (internal helper). + if not cls.__parameters__: + raise TypeError("%s is not a generic class" % repr(cls)) + alen = len(parameters) + elen = len(cls.__parameters__) + if alen != elen: + raise TypeError("Too %s parameters for %s; actual %s, expected %s" % + ("many" if alen > elen else "few", repr(cls), alen, elen)) + + +_cleanups = [] + + +def _tp_cache(func): + """Internal wrapper caching __getitem__ of generic types with a fallback to + original function for non-hashable arguments. + """ + + cached = functools.lru_cache()(func) + _cleanups.append(cached.cache_clear) + + @functools.wraps(func) + def inner(*args, **kwds): + try: + return cached(*args, **kwds) + except TypeError: + pass # All real errors (not unhashable args) are raised below. + return func(*args, **kwds) + return inner + + +class _Union(_FinalTypingBase, _root=True): + """Union type; Union[X, Y] means either X or Y. + + To define a union, use e.g. Union[int, str]. Details: + + - The arguments must be types and there must be at least one. + + - None as an argument is a special case and is replaced by + type(None). + + - Unions of unions are flattened, e.g.:: + + Union[Union[int, str], float] == Union[int, str, float] + + - Unions of a single argument vanish, e.g.:: + + Union[int] == int # The constructor actually returns int + + - Redundant arguments are skipped, e.g.:: + + Union[int, str, int] == Union[int, str] + + - When comparing unions, the argument order is ignored, e.g.:: + + Union[int, str] == Union[str, int] + + - When two arguments have a subclass relationship, the least + derived argument is kept, e.g.:: + + class Employee: pass + class Manager(Employee): pass + Union[int, Employee, Manager] == Union[int, Employee] + Union[Manager, int, Employee] == Union[int, Employee] + Union[Employee, Manager] == Employee + + - Similar for object:: + + Union[int, object] == object + + - You cannot subclass or instantiate a union. + + - You can use Optional[X] as a shorthand for Union[X, None]. + """ + + __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') + + def __new__(cls, parameters=None, origin=None, *args, _root=False): + self = super().__new__(cls, parameters, origin, *args, _root=_root) + if origin is None: + self.__parameters__ = None + self.__args__ = None + self.__origin__ = None + self.__tree_hash__ = hash(frozenset(('Union',))) + return self + if not isinstance(parameters, tuple): + raise TypeError("Expected parameters=") + if origin is Union: + parameters = _remove_dups_flatten(parameters) + # It's not a union if there's only one type left. + if len(parameters) == 1: + return parameters[0] + self.__parameters__ = _type_vars(parameters) + self.__args__ = parameters + self.__origin__ = origin + # Pre-calculate the __hash__ on instantiation. + # This improves speed for complex substitutions. + subs_tree = self._subs_tree() + if isinstance(subs_tree, tuple): + self.__tree_hash__ = hash(frozenset(subs_tree)) + else: + self.__tree_hash__ = hash(subs_tree) + return self + + def _eval_type(self, globalns, localns): + if self.__args__ is None: + return self + ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) + ev_origin = _eval_type(self.__origin__, globalns, localns) + if ev_args == self.__args__ and ev_origin == self.__origin__: + # Everything is already evaluated. + return self + return self.__class__(ev_args, ev_origin, _root=True) + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + tree = self._subs_tree() + if not isinstance(tree, tuple): + return repr(tree) + return tree[0]._tree_repr(tree) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super().__repr__() + '[%s]' % ', '.join(arg_list) + + @_tp_cache + def __getitem__(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Union of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if self.__origin__ is None: + msg = "Union[arg, ...]: each arg must be a type." + else: + msg = "Parameters to generic types must be types." + parameters = tuple(_type_check(p, msg) for p in parameters) + if self is not Union: + _check_generic(self, parameters) + return self.__class__(parameters, origin=self, _root=True) + + def _subs_tree(self, tvars=None, args=None): + if self is Union: + return Union # Nothing to substitute + tree_args = _subs_tree(self, tvars, args) + tree_args = _remove_dups_flatten(tree_args) + if len(tree_args) == 1: + return tree_args[0] # Union of a single type is that type + return (Union,) + tree_args + + def __eq__(self, other): + if isinstance(other, _Union): + return self.__tree_hash__ == other.__tree_hash__ + elif self is not Union: + return self._subs_tree() == other + else: + return self is other + + def __hash__(self): + return self.__tree_hash__ + + def __instancecheck__(self, obj): + raise TypeError("Unions cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Unions cannot be used with issubclass().") + + +Union = _Union(_root=True) + + +class _Optional(_FinalTypingBase, _root=True): + """Optional type. + + Optional[X] is equivalent to Union[X, None]. + """ + + __slots__ = () + + @_tp_cache + def __getitem__(self, arg): + arg = _type_check(arg, "Optional[t] requires a single type.") + return Union[arg, type(None)] + + +Optional = _Optional(_root=True) + + +def _gorg(a): + """Return the farthest origin of a generic class (internal helper).""" + assert isinstance(a, GenericMeta) + while a.__origin__ is not None: + a = a.__origin__ + return a + + +def _geqv(a, b): + """Return whether two generic classes are equivalent (internal helper). + + The intention is to consider generic class X and any of its + parameterized forms (X[T], X[int], etc.) as equivalent. + + However, X is not equivalent to a subclass of X. + + The relation is reflexive, symmetric and transitive. + """ + assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) + # Reduce each to its origin. + return _gorg(a) is _gorg(b) + + +def _next_in_mro(cls): + """Helper for Generic.__new__. + + Returns the class after the last occurrence of Generic or + Generic[...] in cls.__mro__. + """ + next_in_mro = object + # Look for the last occurrence of Generic or Generic[...]. + for i, c in enumerate(cls.__mro__[:-1]): + if isinstance(c, GenericMeta) and _gorg(c) is Generic: + next_in_mro = cls.__mro__[i + 1] + return next_in_mro + + +def _make_subclasshook(cls): + """Construct a __subclasshook__ callable that incorporates + the associated __extra__ class in subclass checks performed + against cls. + """ + if isinstance(cls.__extra__, abc.ABCMeta): + # The logic mirrors that of ABCMeta.__subclasscheck__. + # Registered classes need not be checked here because + # cls and its extra share the same _abc_registry. + def __extrahook__(subclass): + res = cls.__extra__.__subclasshook__(subclass) + if res is not NotImplemented: + return res + if cls.__extra__ in subclass.__mro__: + return True + for scls in cls.__extra__.__subclasses__(): + if isinstance(scls, GenericMeta): + continue + if issubclass(subclass, scls): + return True + return NotImplemented + else: + # For non-ABC extras we'll just call issubclass(). + def __extrahook__(subclass): + if cls.__extra__ and issubclass(subclass, cls.__extra__): + return True + return NotImplemented + return __extrahook__ + + +def _no_slots_copy(dct): + """Internal helper: copy class __dict__ and clean slots class variables. + (They will be re-created if necessary by normal class machinery.) + """ + dict_copy = dict(dct) + if '__slots__' in dict_copy: + for slot in dict_copy['__slots__']: + dict_copy.pop(slot, None) + return dict_copy + + +class GenericMeta(TypingMeta, abc.ABCMeta): + """Metaclass for generic types. + + This is a metaclass for typing.Generic and generic ABCs defined in + typing module. User defined subclasses of GenericMeta can override + __new__ and invoke super().__new__. Note that GenericMeta.__new__ + has strict rules on what is allowed in its bases argument: + * plain Generic is disallowed in bases; + * Generic[...] should appear in bases at most once; + * if Generic[...] is present, then it should list all type variables + that appear in other bases. + In addition, type of all generic bases is erased, e.g., C[int] is + stripped to plain C. + """ + + def __new__(cls, name, bases, namespace, + tvars=None, args=None, origin=None, extra=None, orig_bases=None): + """Create a new generic class. GenericMeta.__new__ accepts + keyword arguments that are used for internal bookkeeping, therefore + an override should pass unused keyword arguments to super(). + """ + if tvars is not None: + # Called from __getitem__() below. + assert origin is not None + assert all(isinstance(t, TypeVar) for t in tvars), tvars + else: + # Called from class statement. + assert tvars is None, tvars + assert args is None, args + assert origin is None, origin + + # Get the full set of tvars from the bases. + tvars = _type_vars(bases) + # Look for Generic[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...]. + gvars = None + for base in bases: + if base is Generic: + raise TypeError("Cannot inherit from plain Generic") + if (isinstance(base, GenericMeta) and + base.__origin__ is Generic): + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + raise TypeError( + "Some type variables (%s) " + "are not listed in Generic[%s]" % + (", ".join(str(t) for t in tvars if t not in gvarset), + ", ".join(str(g) for g in gvars))) + tvars = gvars + + initial_bases = bases + if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: + bases = (extra,) + bases + bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) + + # remove bare Generic from bases if there are other generic bases + if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): + bases = tuple(b for b in bases if b is not Generic) + namespace.update({'__origin__': origin, '__extra__': extra}) + self = super().__new__(cls, name, bases, namespace, _root=True) + + self.__parameters__ = tvars + # Be prepared that GenericMeta will be subclassed by TupleMeta + # and CallableMeta, those two allow ..., (), or [] in __args___. + self.__args__ = tuple(... if a is _TypingEllipsis else + () if a is _TypingEmpty else + a for a in args) if args else None + # Speed hack (https://github.com/python/typing/issues/196). + self.__next_in_mro__ = _next_in_mro(self) + # Preserve base classes on subclassing (__bases__ are type erased now). + if orig_bases is None: + self.__orig_bases__ = initial_bases + + # This allows unparameterized generic collections to be used + # with issubclass() and isinstance() in the same way as their + # collections.abc counterparts (e.g., isinstance([], Iterable)). + if ( + '__subclasshook__' not in namespace and extra or + # allow overriding + getattr(self.__subclasshook__, '__name__', '') == '__extrahook__' + ): + self.__subclasshook__ = _make_subclasshook(self) + if isinstance(extra, abc.ABCMeta): + self._abc_registry = extra._abc_registry + self._abc_cache = extra._abc_cache + elif origin is not None: + self._abc_registry = origin._abc_registry + self._abc_cache = origin._abc_cache + + if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. + self.__qualname__ = origin.__qualname__ + self.__tree_hash__ = (hash(self._subs_tree()) if origin else + super(GenericMeta, self).__hash__()) + return self + + # _abc_negative_cache and _abc_negative_cache_version + # realised as descriptors, since GenClass[t1, t2, ...] always + # share subclass info with GenClass. + # This is an important memory optimization. + @property + def _abc_negative_cache(self): + if isinstance(self.__extra__, abc.ABCMeta): + return self.__extra__._abc_negative_cache + return _gorg(self)._abc_generic_negative_cache + + @_abc_negative_cache.setter + def _abc_negative_cache(self, value): + if self.__origin__ is None: + if isinstance(self.__extra__, abc.ABCMeta): + self.__extra__._abc_negative_cache = value + else: + self._abc_generic_negative_cache = value + + @property + def _abc_negative_cache_version(self): + if isinstance(self.__extra__, abc.ABCMeta): + return self.__extra__._abc_negative_cache_version + return _gorg(self)._abc_generic_negative_cache_version + + @_abc_negative_cache_version.setter + def _abc_negative_cache_version(self, value): + if self.__origin__ is None: + if isinstance(self.__extra__, abc.ABCMeta): + self.__extra__._abc_negative_cache_version = value + else: + self._abc_generic_negative_cache_version = value + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def _eval_type(self, globalns, localns): + ev_origin = (self.__origin__._eval_type(globalns, localns) + if self.__origin__ else None) + ev_args = tuple(_eval_type(a, globalns, localns) for a + in self.__args__) if self.__args__ else None + if ev_origin == self.__origin__ and ev_args == self.__args__: + return self + return self.__class__(self.__name__, + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=_type_vars(ev_args) if ev_args else None, + args=ev_args, + origin=ev_origin, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if arg == (): + arg_list.append('()') + elif not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super().__repr__() + '[%s]' % ', '.join(arg_list) + + def _subs_tree(self, tvars=None, args=None): + if self.__origin__ is None: + return self + tree_args = _subs_tree(self, tvars, args) + return (_gorg(self),) + tuple(tree_args) + + def __eq__(self, other): + if not isinstance(other, GenericMeta): + return NotImplemented + if self.__origin__ is None or other.__origin__ is None: + return self is other + return self.__tree_hash__ == other.__tree_hash__ + + def __hash__(self): + return self.__tree_hash__ + + @_tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + if not params and not _gorg(self) is Tuple: + raise TypeError( + "Parameter list to %s[...] cannot be empty" % _qualname(self)) + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + if self is Generic: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, TypeVar) for p in params): + raise TypeError( + "Parameters to Generic[...] must all be type variables") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Generic[...] must all be unique") + tvars = params + args = params + elif self in (Tuple, Callable): + tvars = _type_vars(params) + args = params + elif self is _Protocol: + # _Protocol is internal, don't check anything. + tvars = params + args = params + elif self.__origin__ in (Generic, _Protocol): + # Can't subscript Generic[...] or _Protocol[...]. + raise TypeError("Cannot subscript already-subscripted %s" % + repr(self)) + else: + # Subscripting a regular Generic subclass. + _check_generic(self, params) + tvars = _type_vars(params) + args = params + + prepend = (self,) if self.__origin__ is None else () + return self.__class__(self.__name__, + prepend + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=tvars, + args=args, + origin=self, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __subclasscheck__(self, cls): + if self.__origin__ is not None: + if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: + raise TypeError("Parameterized generics cannot be used with class " + "or instance checks") + return False + if self is Generic: + raise TypeError("Class %r cannot be used with class " + "or instance checks" % self) + return super().__subclasscheck__(cls) + + def __instancecheck__(self, instance): + # Since we extend ABC.__subclasscheck__ and + # ABC.__instancecheck__ inlines the cache checking done by the + # latter, we must extend __instancecheck__ too. For simplicity + # we just skip the cache check -- instance checks for generic + # classes are supposed to be rare anyways. + return issubclass(instance.__class__, self) + + def __copy__(self): + return self.__class__(self.__name__, self.__bases__, + _no_slots_copy(self.__dict__), + self.__parameters__, self.__args__, self.__origin__, + self.__extra__, self.__orig_bases__) + + def __setattr__(self, attr, value): + # We consider all the subscripted genrics as proxies for original class + if ( + attr.startswith('__') and attr.endswith('__') or + attr.startswith('_abc_') + ): + super(GenericMeta, self).__setattr__(attr, value) + else: + super(GenericMeta, _gorg(self)).__setattr__(attr, value) + + +# Prevent checks for Generic to crash when defining Generic. +Generic = None + + +def _generic_new(base_cls, cls, *args, **kwds): + # Assure type is erased on instantiation, + # but attempt to store it in __orig_class__ + if cls.__origin__ is None: + return base_cls.__new__(cls) + else: + origin = _gorg(cls) + obj = base_cls.__new__(origin) + try: + obj.__orig_class__ = cls + except AttributeError: + pass + obj.__init__(*args, **kwds) + return obj + + +class Generic(metaclass=GenericMeta): + """Abstract base class for generic types. + + A generic type is typically declared by inheriting from + this class parameterized with one or more type variables. + For example, a generic mapping type might be defined as:: + + class Mapping(Generic[KT, VT]): + def __getitem__(self, key: KT) -> VT: + ... + # Etc. + + This class can then be used as follows:: + + def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: + try: + return mapping[key] + except KeyError: + return default + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Generic): + raise TypeError("Type Generic cannot be instantiated; " + "it can be used only as a base class") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +class _TypingEmpty: + """Internal placeholder for () or []. Used by TupleMeta and CallableMeta + to allow empty list/tuple in specific places, without allowing them + to sneak in where prohibited. + """ + + +class _TypingEllipsis: + """Internal placeholder for ... (ellipsis).""" + + +class TupleMeta(GenericMeta): + """Metaclass for Tuple (internal).""" + + @_tp_cache + def __getitem__(self, parameters): + if self.__origin__ is not None or not _geqv(self, Tuple): + # Normal generic rules apply if this is not the first subscription + # or a subscription of a subclass. + return super().__getitem__(parameters) + if parameters == (): + return super().__getitem__((_TypingEmpty,)) + if not isinstance(parameters, tuple): + parameters = (parameters,) + if len(parameters) == 2 and parameters[1] is ...: + msg = "Tuple[t, ...]: t must be a type." + p = _type_check(parameters[0], msg) + return super().__getitem__((p, _TypingEllipsis)) + msg = "Tuple[t0, t1, ...]: each t must be a type." + parameters = tuple(_type_check(p, msg) for p in parameters) + return super().__getitem__(parameters) + + def __instancecheck__(self, obj): + if self.__args__ is None: + return isinstance(obj, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with isinstance().") + + def __subclasscheck__(self, cls): + if self.__args__ is None: + return issubclass(cls, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with issubclass().") + + +class Tuple(tuple, extra=tuple, metaclass=TupleMeta): + """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. + + Example: Tuple[T1, T2] is a tuple of two elements corresponding + to type variables T1 and T2. Tuple[int, float, str] is a tuple + of an int, a float and a string. + + To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Tuple): + raise TypeError("Type Tuple cannot be instantiated; " + "use tuple() instead") + return _generic_new(tuple, cls, *args, **kwds) + + +class CallableMeta(GenericMeta): + """Metaclass for Callable (internal).""" + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + if _gorg(self) is not Callable: + return super()._tree_repr(tree) + # For actual Callable (not its subclass) we override + # super()._tree_repr() for nice formatting. + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + if arg_list[0] == '...': + return repr(tree[0]) + '[..., %s]' % arg_list[1] + return (repr(tree[0]) + + '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) + + def __getitem__(self, parameters): + """A thin wrapper around __getitem_inner__ to provide the latter + with hashable arguments to improve speed. + """ + + if self.__origin__ is not None or not _geqv(self, Callable): + return super().__getitem__(parameters) + if not isinstance(parameters, tuple) or len(parameters) != 2: + raise TypeError("Callable must be used as " + "Callable[[arg, ...], result].") + args, result = parameters + if args is Ellipsis: + parameters = (Ellipsis, result) + else: + if not isinstance(args, list): + raise TypeError("Callable[args, result]: args must be a list." + " Got %.100r." % (args,)) + parameters = (tuple(args), result) + return self.__getitem_inner__(parameters) + + @_tp_cache + def __getitem_inner__(self, parameters): + args, result = parameters + msg = "Callable[args, result]: result must be a type." + result = _type_check(result, msg) + if args is Ellipsis: + return super().__getitem__((_TypingEllipsis, result)) + msg = "Callable[[arg, ...], result]: each arg must be a type." + args = tuple(_type_check(arg, msg) for arg in args) + parameters = args + (result,) + return super().__getitem__(parameters) + + +class Callable(extra=collections_abc.Callable, metaclass=CallableMeta): + """Callable type; Callable[[int], str] is a function of (int) -> str. + + The subscription syntax must always be used with exactly two + values: the argument list and the return type. The argument list + must be a list of types or ellipsis; the return type must be a single type. + + There is no syntax to indicate optional or keyword arguments, + such function types are rarely used as callback types. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Callable): + raise TypeError("Type Callable cannot be instantiated; " + "use a non-abstract subclass instead") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +class _ClassVar(_FinalTypingBase, _root=True): + """Special type construct to mark class variables. + + An annotation wrapped in ClassVar indicates that a given + attribute is intended to be used as a class variable and + should not be set on instances of that class. Usage:: + + class Starship: + stats: ClassVar[Dict[str, int]] = {} # class variable + damage: int = 10 # instance variable + + ClassVar accepts only types and cannot be further subscribed. + + Note that ClassVar is not a class itself, and should not + be used with isinstance() or issubclass(). + """ + + __slots__ = ('__type__',) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls(_type_check(item, + '{} accepts only single type.'.format(cls.__name__[1:])), + _root=True) + raise TypeError('{} cannot be further subscripted' + .format(cls.__name__[1:])) + + def _eval_type(self, globalns, localns): + new_tp = _eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += '[{}]'.format(_type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _ClassVar): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + +ClassVar = _ClassVar(_root=True) + + +def cast(typ, val): + """Cast a value to a type. + + This returns the value unchanged. To the type checker this + signals that the return value has the designated type, but at + runtime we intentionally don't check anything (we want this + to be as fast as possible). + """ + return val + + +def _get_defaults(func): + """Internal helper to extract the default arguments, by name.""" + try: + code = func.__code__ + except AttributeError: + # Some built-in functions don't have __code__, __defaults__, etc. + return {} + pos_count = code.co_argcount + arg_names = code.co_varnames + arg_names = arg_names[:pos_count] + defaults = func.__defaults__ or () + kwdefaults = func.__kwdefaults__ + res = dict(kwdefaults) if kwdefaults else {} + pos_offset = pos_count - len(defaults) + for name, value in zip(arg_names[pos_offset:], defaults): + assert name not in res + res[name] = value + return res + + +_allowed_types = (types.FunctionType, types.BuiltinFunctionType, + types.MethodType, types.ModuleType, + SlotWrapperType, MethodWrapperType, MethodDescriptorType) + + +def get_type_hints(obj, globalns=None, localns=None): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, and if necessary + adds Optional[t] if a default value equal to None is set. + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj, and these are also used as the locals. If the + object does not appear to have globals, an exception is raised. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + + if getattr(obj, '__no_type_check__', None): + return {} + if globalns is None: + globalns = getattr(obj, '__globals__', {}) + if localns is None: + localns = globalns + elif localns is None: + localns = globalns + # Classes require a special treatment. + if isinstance(obj, type): + hints = {} + for base in reversed(obj.__mro__): + ann = base.__dict__.get('__annotations__', {}) + for name, value in ann.items(): + if value is None: + value = type(None) + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, globalns, localns) + hints[name] = value + return hints + hints = getattr(obj, '__annotations__', None) + if hints is None: + # Return empty annotations for something that _could_ have them. + if isinstance(obj, _allowed_types): + return {} + else: + raise TypeError('{!r} is not a module, class, method, ' + 'or function.'.format(obj)) + defaults = _get_defaults(obj) + hints = dict(hints) + for name, value in hints.items(): + if value is None: + value = type(None) + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, globalns, localns) + if name in defaults and defaults[name] is None: + value = Optional[value] + hints[name] = value + return hints + + +def no_type_check(arg): + """Decorator to indicate that annotations are not type hints. + + The argument must be a class or function; if it is a class, it + applies recursively to all methods and classes defined in that class + (but not to methods defined in its superclasses or subclasses). + + This mutates the function(s) or class(es) in place. + """ + if isinstance(arg, type): + arg_attrs = arg.__dict__.copy() + for attr, val in arg.__dict__.items(): + if val in arg.__bases__: + arg_attrs.pop(attr) + for obj in arg_attrs.values(): + if isinstance(obj, types.FunctionType): + obj.__no_type_check__ = True + if isinstance(obj, type): + no_type_check(obj) + try: + arg.__no_type_check__ = True + except TypeError: # built-in classes + pass + return arg + + +def no_type_check_decorator(decorator): + """Decorator to give another decorator the @no_type_check effect. + + This wraps the decorator with something that wraps the decorated + function in @no_type_check. + """ + + @functools.wraps(decorator) + def wrapped_decorator(*args, **kwds): + func = decorator(*args, **kwds) + func = no_type_check(func) + return func + + return wrapped_decorator + + +def _overload_dummy(*args, **kwds): + """Helper for @overload to raise when called.""" + raise NotImplementedError( + "You should not call an overloaded function. " + "A series of @overload-decorated functions " + "outside a stub module should always be followed " + "by an implementation that is not @overload-ed.") + + +def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + """ + return _overload_dummy + + +class _ProtocolMeta(GenericMeta): + """Internal metaclass for _Protocol. + + This exists so _Protocol classes can be generic without deriving + from Generic. + """ + + def __instancecheck__(self, obj): + if _Protocol not in self.__bases__: + return super().__instancecheck__(obj) + raise TypeError("Protocols cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not self._is_protocol: + # No structural checks since this isn't a protocol. + return NotImplemented + + if self is _Protocol: + # Every class is a subclass of the empty protocol. + return True + + # Find all attributes defined in the protocol. + attrs = self._get_protocol_attrs() + + for attr in attrs: + if not any(attr in d.__dict__ for d in cls.__mro__): + return False + return True + + def _get_protocol_attrs(self): + # Get all Protocol base classes. + protocol_bases = [] + for c in self.__mro__: + if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': + protocol_bases.append(c) + + # Get attributes included in protocol. + attrs = set() + for base in protocol_bases: + for attr in base.__dict__.keys(): + # Include attributes not defined in any non-protocol bases. + for c in self.__mro__: + if (c is not base and attr in c.__dict__ and + not getattr(c, '_is_protocol', False)): + break + else: + if (not attr.startswith('_abc_') and + attr != '__abstractmethods__' and + attr != '__annotations__' and + attr != '__weakref__' and + attr != '_is_protocol' and + attr != '__dict__' and + attr != '__args__' and + attr != '__slots__' and + attr != '_get_protocol_attrs' and + attr != '__next_in_mro__' and + attr != '__parameters__' and + attr != '__origin__' and + attr != '__orig_bases__' and + attr != '__extra__' and + attr != '__tree_hash__' and + attr != '__module__'): + attrs.add(attr) + + return attrs + + +class _Protocol(metaclass=_ProtocolMeta): + """Internal base class for protocol classes. + + This implements a simple-minded structural issubclass check + (similar but more general than the one-offs in collections.abc + such as Hashable). + """ + + __slots__ = () + + _is_protocol = True + + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + +Hashable = collections_abc.Hashable # Not generic. + + +if hasattr(collections_abc, 'Awaitable'): + class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): + __slots__ = () + + __all__.append('Awaitable') + + +if hasattr(collections_abc, 'Coroutine'): + class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co], + extra=collections_abc.Coroutine): + __slots__ = () + + __all__.append('Coroutine') + + +if hasattr(collections_abc, 'AsyncIterable'): + + class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): + __slots__ = () + + class AsyncIterator(AsyncIterable[T_co], + extra=collections_abc.AsyncIterator): + __slots__ = () + + __all__.append('AsyncIterable') + __all__.append('AsyncIterator') + + +class Iterable(Generic[T_co], extra=collections_abc.Iterable): + __slots__ = () + + +class Iterator(Iterable[T_co], extra=collections_abc.Iterator): + __slots__ = () + + +class SupportsInt(_Protocol): + __slots__ = () + + @abstractmethod + def __int__(self) -> int: + pass + + +class SupportsFloat(_Protocol): + __slots__ = () + + @abstractmethod + def __float__(self) -> float: + pass + + +class SupportsComplex(_Protocol): + __slots__ = () + + @abstractmethod + def __complex__(self) -> complex: + pass + + +class SupportsBytes(_Protocol): + __slots__ = () + + @abstractmethod + def __bytes__(self) -> bytes: + pass + + +class SupportsAbs(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __abs__(self) -> T_co: + pass + + +class SupportsRound(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +if hasattr(collections_abc, 'Reversible'): + class Reversible(Iterable[T_co], extra=collections_abc.Reversible): + __slots__ = () +else: + class Reversible(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __reversed__(self) -> 'Iterator[T_co]': + pass + + +Sized = collections_abc.Sized # Not generic. + + +class Container(Generic[T_co], extra=collections_abc.Container): + __slots__ = () + + +if hasattr(collections_abc, 'Collection'): + class Collection(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Collection): + __slots__ = () + + __all__.append('Collection') + + +# Callable was defined earlier. + +if hasattr(collections_abc, 'Collection'): + class AbstractSet(Collection[T_co], + extra=collections_abc.Set): + __slots__ = () +else: + class AbstractSet(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Set): + __slots__ = () + + +class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): + __slots__ = () + + +# NOTE: It is only covariant in the value type. +if hasattr(collections_abc, 'Collection'): + class Mapping(Collection[KT], Generic[KT, VT_co], + extra=collections_abc.Mapping): + __slots__ = () +else: + class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], + extra=collections_abc.Mapping): + __slots__ = () + + +class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): + __slots__ = () + + +if hasattr(collections_abc, 'Reversible'): + if hasattr(collections_abc, 'Collection'): + class Sequence(Reversible[T_co], Collection[T_co], + extra=collections_abc.Sequence): + __slots__ = () + else: + class Sequence(Sized, Reversible[T_co], Container[T_co], + extra=collections_abc.Sequence): + __slots__ = () +else: + class Sequence(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Sequence): + __slots__ = () + + +class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): + __slots__ = () + + +class ByteString(Sequence[int], extra=collections_abc.ByteString): + __slots__ = () + + +class List(list, MutableSequence[T], extra=list): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, List): + raise TypeError("Type List cannot be instantiated; " + "use list() instead") + return _generic_new(list, cls, *args, **kwds) + + +class Deque(collections.deque, MutableSequence[T], extra=collections.deque): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Deque): + return collections.deque(*args, **kwds) + return _generic_new(collections.deque, cls, *args, **kwds) + + +class Set(set, MutableSet[T], extra=set): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Set): + raise TypeError("Type Set cannot be instantiated; " + "use set() instead") + return _generic_new(set, cls, *args, **kwds) + + +class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, FrozenSet): + raise TypeError("Type FrozenSet cannot be instantiated; " + "use frozenset() instead") + return _generic_new(frozenset, cls, *args, **kwds) + + +class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): + __slots__ = () + + +class KeysView(MappingView[KT], AbstractSet[KT], + extra=collections_abc.KeysView): + __slots__ = () + + +class ItemsView(MappingView[Tuple[KT, VT_co]], + AbstractSet[Tuple[KT, VT_co]], + Generic[KT, VT_co], + extra=collections_abc.ItemsView): + __slots__ = () + + +class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): + __slots__ = () + + +if hasattr(contextlib, 'AbstractContextManager'): + class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): + __slots__ = () + __all__.append('ContextManager') + + +class Dict(dict, MutableMapping[KT, VT], extra=dict): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Dict): + raise TypeError("Type Dict cannot be instantiated; " + "use dict() instead") + return _generic_new(dict, cls, *args, **kwds) + + +class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], + extra=collections.defaultdict): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, DefaultDict): + return collections.defaultdict(*args, **kwds) + return _generic_new(collections.defaultdict, cls, *args, **kwds) + + +class Counter(collections.Counter, Dict[T, int], extra=collections.Counter): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Counter): + return collections.Counter(*args, **kwds) + return _generic_new(collections.Counter, cls, *args, **kwds) + + +if hasattr(collections, 'ChainMap'): + # ChainMap only exists in 3.3+ + __all__.append('ChainMap') + + class ChainMap(collections.ChainMap, MutableMapping[KT, VT], + extra=collections.ChainMap): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, ChainMap): + return collections.ChainMap(*args, **kwds) + return _generic_new(collections.ChainMap, cls, *args, **kwds) + + +# Determine what base class to use for Generator. +if hasattr(collections_abc, 'Generator'): + # Sufficiently recent versions of 3.5 have a Generator ABC. + _G_base = collections_abc.Generator +else: + # Fall back on the exact type. + _G_base = types.GeneratorType + + +class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], + extra=_G_base): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Generator): + raise TypeError("Type Generator cannot be instantiated; " + "create a subclass instead") + return _generic_new(_G_base, cls, *args, **kwds) + + +if hasattr(collections_abc, 'AsyncGenerator'): + class AsyncGenerator(AsyncIterator[T_co], Generic[T_co, T_contra], + extra=collections_abc.AsyncGenerator): + __slots__ = () + + __all__.append('AsyncGenerator') + + +# Internal type variable used for Type[]. +CT_co = TypeVar('CT_co', covariant=True, bound=type) + + +# This is not a real generic class. Don't use outside annotations. +class Type(Generic[CT_co], extra=type): + """A special construct usable to annotate class objects. + + For example, suppose we have the following classes:: + + class User: ... # Abstract base for User classes + class BasicUser(User): ... + class ProUser(User): ... + class TeamUser(User): ... + + And a function that takes a class argument that's a subclass of + User and returns an instance of the corresponding class:: + + U = TypeVar('U', bound=User) + def new_user(user_class: Type[U]) -> U: + user = user_class() + # (Here we could write the user object to a database) + return user + + joe = new_user(BasicUser) + + At this point the type checker knows that joe has type BasicUser. + """ + + __slots__ = () + + +def _make_nmtuple(name, types): + msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" + types = [(n, _type_check(t, msg)) for n, t in types] + nm_tpl = collections.namedtuple(name, [n for n, t in types]) + # Prior to PEP 526, only _field_types attribute was assigned. + # Now, both __annotations__ and _field_types are used to maintain compatibility. + nm_tpl.__annotations__ = nm_tpl._field_types = collections.OrderedDict(types) + try: + nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + return nm_tpl + + +_PY36 = sys.version_info[:2] >= (3, 6) + +# attributes prohibited to set in NamedTuple class syntax +_prohibited = ('__new__', '__init__', '__slots__', '__getnewargs__', + '_fields', '_field_defaults', '_field_types', + '_make', '_replace', '_asdict') + +_special = ('__module__', '__name__', '__qualname__', '__annotations__') + + +class NamedTupleMeta(type): + + def __new__(cls, typename, bases, ns): + if ns.get('_root', False): + return super().__new__(cls, typename, bases, ns) + if not _PY36: + raise TypeError("Class syntax for NamedTuple is only supported" + " in Python 3.6+") + types = ns.get('__annotations__', {}) + nm_tpl = _make_nmtuple(typename, types.items()) + defaults = [] + defaults_dict = {} + for field_name in types: + if field_name in ns: + default_value = ns[field_name] + defaults.append(default_value) + defaults_dict[field_name] = default_value + elif defaults: + raise TypeError("Non-default namedtuple field {field_name} cannot " + "follow default field(s) {default_names}" + .format(field_name=field_name, + default_names=', '.join(defaults_dict.keys()))) + nm_tpl.__new__.__defaults__ = tuple(defaults) + nm_tpl._field_defaults = defaults_dict + # update from user namespace without overriding special namedtuple attributes + for key in ns: + if key in _prohibited: + raise AttributeError("Cannot overwrite NamedTuple attribute " + key) + elif key not in _special and key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) + return nm_tpl + + +class NamedTuple(metaclass=NamedTupleMeta): + """Typed version of namedtuple. + + Usage in Python versions >= 3.6:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has extra __annotations__ and _field_types + attributes, giving an ordered dict mapping field names to types. + __annotations__ should be preferred, while _field_types + is kept to maintain pre PEP 526 compatibility. (The field names + are in the _fields attribute, which is part of the namedtuple + API.) Alternative equivalent keyword syntax is also accepted:: + + Employee = NamedTuple('Employee', name=str, id=int) + + In Python versions <= 3.5 use:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ + _root = True + + def __new__(self, typename, fields=None, **kwargs): + if kwargs and not _PY36: + raise TypeError("Keyword syntax for NamedTuple is only supported" + " in Python 3.6+") + if fields is None: + fields = kwargs.items() + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + return _make_nmtuple(typename, fields) + + +def NewType(name, tp): + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy function that simply returns its argument. Usage:: + + UserId = NewType('UserId', int) + + def name_by_id(user_id: UserId) -> str: + ... + + UserId('user') # Fails type check + + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + + num = UserId(5) + 1 # type: int + """ + + def new_type(x): + return x + + new_type.__name__ = name + new_type.__supertype__ = tp + return new_type + + +# Python-version-specific alias (Python 2: unicode; Python 3: str) +Text = str + + +# Constant that's True when type checking, but False here. +TYPE_CHECKING = False + + +class IO(Generic[AnyStr]): + """Generic base class for TextIO and BinaryIO. + + This is an abstract, generic version of the return of open(). + + NOTE: This does not distinguish between the different possible + classes (text vs. binary, read vs. write vs. read/write, + append-only, unbuffered). The TextIO and BinaryIO subclasses + below capture the distinctions between text vs. binary, which is + pervasive in the interface; however we currently do not offer a + way to track the other distinctions in the type system. + """ + + __slots__ = () + + @abstractproperty + def mode(self) -> str: + pass + + @abstractproperty + def name(self) -> str: + pass + + @abstractmethod + def close(self) -> None: + pass + + @abstractmethod + def closed(self) -> bool: + pass + + @abstractmethod + def fileno(self) -> int: + pass + + @abstractmethod + def flush(self) -> None: + pass + + @abstractmethod + def isatty(self) -> bool: + pass + + @abstractmethod + def read(self, n: int = -1) -> AnyStr: + pass + + @abstractmethod + def readable(self) -> bool: + pass + + @abstractmethod + def readline(self, limit: int = -1) -> AnyStr: + pass + + @abstractmethod + def readlines(self, hint: int = -1) -> List[AnyStr]: + pass + + @abstractmethod + def seek(self, offset: int, whence: int = 0) -> int: + pass + + @abstractmethod + def seekable(self) -> bool: + pass + + @abstractmethod + def tell(self) -> int: + pass + + @abstractmethod + def truncate(self, size: int = None) -> int: + pass + + @abstractmethod + def writable(self) -> bool: + pass + + @abstractmethod + def write(self, s: AnyStr) -> int: + pass + + @abstractmethod + def writelines(self, lines: List[AnyStr]) -> None: + pass + + @abstractmethod + def __enter__(self) -> 'IO[AnyStr]': + pass + + @abstractmethod + def __exit__(self, type, value, traceback) -> None: + pass + + +class BinaryIO(IO[bytes]): + """Typed version of the return of open() in binary mode.""" + + __slots__ = () + + @abstractmethod + def write(self, s: Union[bytes, bytearray]) -> int: + pass + + @abstractmethod + def __enter__(self) -> 'BinaryIO': + pass + + +class TextIO(IO[str]): + """Typed version of the return of open() in text mode.""" + + __slots__ = () + + @abstractproperty + def buffer(self) -> BinaryIO: + pass + + @abstractproperty + def encoding(self) -> str: + pass + + @abstractproperty + def errors(self) -> Optional[str]: + pass + + @abstractproperty + def line_buffering(self) -> bool: + pass + + @abstractproperty + def newlines(self) -> Any: + pass + + @abstractmethod + def __enter__(self) -> 'TextIO': + pass + + +class io: + """Wrapper namespace for IO generic classes.""" + + __all__ = ['IO', 'TextIO', 'BinaryIO'] + IO = IO + TextIO = TextIO + BinaryIO = BinaryIO + + +io.__name__ = __name__ + '.io' +sys.modules[io.__name__] = io + + +Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), + lambda p: p.pattern) +Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), + lambda m: m.re.pattern) + + +class re: + """Wrapper namespace for re type aliases.""" + + __all__ = ['Pattern', 'Match'] + Pattern = Pattern + Match = Match + + +re.__name__ = __name__ + '.re' +sys.modules[re.__name__] = re From ed2e14213b1a25b026ddacab47fd00695b3185ad Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Mon, 26 Jun 2017 17:08:25 -0700 Subject: [PATCH 02/23] Remove unnecessary gitignore --- typing_extensions/.gitignore | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 typing_extensions/.gitignore diff --git a/typing_extensions/.gitignore b/typing_extensions/.gitignore deleted file mode 100644 index 46b620c1..00000000 --- a/typing_extensions/.gitignore +++ /dev/null @@ -1,11 +0,0 @@ -MANIFEST -build/ -dist/ -.tox/ -.idea/ -.cache/ -__pycache__/ -.mypy_cache/ -tmp/ -*.swp -*.pyc From eb1e5d76f9652f2b871eb26c2213eab03be68dd4 Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Mon, 26 Jun 2017 22:46:44 -0700 Subject: [PATCH 03/23] Fix flake8 errors; make travis test typing_extensions --- .travis.yml | 4 +- tox.ini | 7 ++- typing_extensions/run_tests.py | 25 ++++---- typing_extensions/setup.py | 3 +- .../src_py2/test_typing_extensions.py | 39 ++++++------ .../src_py2/typing_extensions.py | 19 +++--- .../src_py3/test_typing_extensions.py | 61 ++++++++++--------- .../src_py3/typing_extensions.py | 38 +++++------- 8 files changed, 96 insertions(+), 100 deletions(-) diff --git a/.travis.yml b/.travis.yml index 0dfe9f5e..6e40ec93 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,5 +14,7 @@ install: script: - export PYTHONPATH=`python -c "import sys; print('python2' if sys.version.startswith('2') else 'src')"`; py.test $PYTHONPATH + - export PYTHONPATH=`python -c "import sys; print('typing_extensions/src_py2' if sys.version.startswith('2') else 'typing_extensions/src_py3')"`; py.test $PYTHONPATH + - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then cd typing_extensions; python run_tests.py; fi - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8; fi - - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8 --config=.flake8-tests src/test_typing.py python2/test_typing.py; fi + - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8 --config=.flake8-tests src/test_typing.py python2/test_typing.py typing_extensions/src_py2/test_typing_extensions.py typing_extensions/src_py3/test_typing_extensions.py; fi diff --git a/tox.ini b/tox.ini index 90c4f844..4b029a40 100644 --- a/tox.ini +++ b/tox.ini @@ -22,4 +22,9 @@ exclude = # tests have more relaxed formatting rules # and its own specific config in .flake8-tests python2/test_typing.py, - src/test_typing.py + src/test_typing.py, + typing_extensions/src_py2/test_typing_extensions.py, + typing_extensions/src_py3/test_typing_extensions.py, + # test_data should contain an exact snapshot of the standard + # library and so should be exempt from checks + typing_extensions/test_data, diff --git a/typing_extensions/run_tests.py b/typing_extensions/run_tests.py index b051f1ef..a720ea90 100644 --- a/typing_extensions/run_tests.py +++ b/typing_extensions/run_tests.py @@ -11,12 +11,12 @@ import textwrap CORE_FILES_2 = [ - "./src_py2/typing_extensions.py", - "./src_py2/test_typing_extensions.py" + "./src_py2/typing_extensions.py", + "./src_py2/test_typing_extensions.py" ] CORE_FILES_3 = [ - "./src_py3/typing_extensions.py", - "./src_py3/test_typing_extensions.py" + "./src_py3/typing_extensions.py", + "./src_py3/test_typing_extensions.py" ] TEST_DIR = "test_data" @@ -71,11 +71,11 @@ def run_shell(command: str) -> Tuple[bool, str]: env = os.environ.copy() env["PYTHONPATH"] = ":".join([os.getcwd(), env["PYTHONPATH"], env["PATH"]]) out = subprocess.run( - command, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - shell=True, - env=env) + command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + shell=True, + env=env) success = out.returncode == 0 stdout = '' if out.stdout is None else out.stdout.decode('utf-8') return (success, stdout) @@ -91,12 +91,12 @@ def main() -> int: core_files = CORE_FILES_2 if py2 else CORE_FILES_3 python_exe = PYTHON2 if py2 else PYTHON3 - + with temp_copy(core_files, test_dir), change_directory(test_dir): success, output = run_shell("{} {} {}".format( - python_exe, + python_exe, "test_typing_extensions.py", - version_number)) + "PYVERSION.{}".format(version_number))) if success: print(" All tests passed!") else: @@ -107,4 +107,3 @@ def main() -> int: if __name__ == '__main__': sys.exit(main()) - diff --git a/typing_extensions/setup.py b/typing_extensions/setup.py index afd66262..d8fe490e 100644 --- a/typing_extensions/setup.py +++ b/typing_extensions/setup.py @@ -67,5 +67,4 @@ package_dir={'': package_dir}, py_modules=['typing_extensions'], classifiers=classifiers, - install_requires=install_requires, -) + install_requires=install_requires) diff --git a/typing_extensions/src_py2/test_typing_extensions.py b/typing_extensions/src_py2/test_typing_extensions.py index 2b86cb28..8fd3421d 100644 --- a/typing_extensions/src_py2/test_typing_extensions.py +++ b/typing_extensions/src_py2/test_typing_extensions.py @@ -1,24 +1,24 @@ -# Override version info import sys -ORIGINAL_VERSION = sys.version_info -if len(sys.argv) >= 2: - PYTHON_VERSION = tuple(map(int, sys.argv[1].split('.'))) - sys.version_info = PYTHON_VERSION - OVERRIDING_VERSION = True -else: - PYTHON_VERSION = ORIGINAL_VERSION - OVERRIDING_VERSION = False - import os import abc import contextlib import collections -from unittest import TestCase, main, skipUnless, SkipTest +from unittest import TestCase, main, skipUnless -from typing_extensions import NoReturn, ClassVar, Type, NewType +from typing_extensions import NoReturn import typing import typing_extensions +# Override version info +ORIGINAL_VERSION = sys.version_info +if len(sys.argv) >= 2 and sys.argv[-1].startswith("PYVERSION"): + PYTHON_VERSION = tuple(map(int, sys.argv[-1].split('.')[1:])) + sys.version_info = PYTHON_VERSION + OVERRIDING_VERSION = True +else: + PYTHON_VERSION = ORIGINAL_VERSION + OVERRIDING_VERSION = False + class BaseTestCase(TestCase): def assertIsSubclass(self, cls, class_or_tuple, msg=None): @@ -106,7 +106,7 @@ def test_collection(self): self.assertNotIsInstance(42, typing_extensions.Collection) def test_collection_instantiation(self): - class MyAbstractCollection(typing_extensions.Collection[int]): + class MyAbstractCollection(typing_extensions.Collection[int]): pass class MyCollection(typing_extensions.Collection[int]): def __contains__(self, item): pass @@ -114,11 +114,11 @@ def __iter__(self): pass def __len__(self): pass self.assertIsSubclass( - type(MyCollection()), - typing_extensions.Collection) + type(MyCollection()), + typing_extensions.Collection) self.assertIsSubclass( - MyCollection, - typing_extensions.Collection) + MyCollection, + typing_extensions.Collection) with self.assertRaises(TypeError): MyAbstractCollection() @@ -152,8 +152,9 @@ def test_typing_extensions_defers_when_possible(self): for item in typing_extensions.__all__: if item not in exclude and hasattr(typing, item): self.assertIs( - getattr(typing_extensions, item), - getattr(typing, item)) + getattr(typing_extensions, item), + getattr(typing, item)) + if __name__ == '__main__': main(argv=[sys.argv[0]] + sys.argv[2:]) diff --git a/typing_extensions/src_py2/typing_extensions.py b/typing_extensions/src_py2/typing_extensions.py index 96dd61c1..3ba9a651 100644 --- a/typing_extensions/src_py2/typing_extensions.py +++ b/typing_extensions/src_py2/typing_extensions.py @@ -1,11 +1,10 @@ import abc -from abc import abstractmethod, abstractproperty import collections import typing from typing import ( - ClassVar, Type, - Counter, DefaultDict, Deque, - NewType, overload, Text, Type, TYPE_CHECKING, + ClassVar, Type, + Counter, DefaultDict, Deque, + NewType, overload, Text, TYPE_CHECKING, ) # Please keep __all__ alphabetized within each category. @@ -41,7 +40,6 @@ def __new__(cls, name, bases, namespace): self = super(NoReturnMeta, cls).__new__(cls, name, bases, namespace) return self - class _NoReturn(typing._FinalTypingBase): """Special type indicating functions that never return. Example:: @@ -60,7 +58,6 @@ def __instancecheck__(self, obj): def __subclasscheck__(self, cls): raise TypeError("NoReturn cannot be used with issubclass().") - NoReturn = _NoReturn(_root=True) @@ -95,17 +92,17 @@ def __subclasshook__(cls, C): # Backport collections.Collection -class _CollectionAbc(collections.Sized, - collections.Iterable, - collections.Container): +class _CollectionAbc(collections.Sized, + collections.Iterable, + collections.Container): pass + _CollectionAbc.register(list) _CollectionAbc.register(tuple) _CollectionAbc.register(set) _CollectionAbc.register(frozenset) _CollectionAbc.register(basestring) -_CollectionAbc.register(buffer) _CollectionAbc.register(dict) @@ -114,5 +111,3 @@ class Collection(typing.Sized, typing.Container[T_co]): __slots__ = () __extra__ = _CollectionAbc - - diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index 6292f443..4054f20f 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -1,27 +1,16 @@ -# Override version info -import sys -ORIGINAL_VERSION = sys.version_info -if len(sys.argv) >= 2: - PYTHON_VERSION = tuple(map(int, sys.argv[1].split('.'))) - sys.version_info = PYTHON_VERSION - OVERRIDING_VERSION = True -else: - PYTHON_VERSION = ORIGINAL_VERSION - OVERRIDING_VERSION = False - import os +import sys import abc import contextlib import collections -from unittest import TestCase, main, skipUnless, SkipTest +from unittest import TestCase, main, skipUnless from typing import TypeVar, Optional from typing import T, KT, VT # Not in __all__. -from typing import Tuple, List, MutableMapping +from typing import Tuple, List from typing import Generic from typing import get_type_hints -from typing import no_type_check, no_type_check_decorator -from typing import NamedTuple +from typing import no_type_check from typing_extensions import NoReturn, ClassVar, Type, NewType import typing import typing_extensions @@ -32,6 +21,16 @@ TYPING_V3 = sys.version_info >= (3, 5, 3) TYPING_V4 = sys.version_info >= (3, 6, 1) +# Override version info +ORIGINAL_VERSION = sys.version_info +if len(sys.argv) >= 2 and sys.argv[-1].startswith("PYVERSION"): + PYTHON_VERSION = tuple(map(int, sys.argv[-1].split('.')[1:])) + sys.version_info = PYTHON_VERSION + OVERRIDING_VERSION = True +else: + PYTHON_VERSION = ORIGINAL_VERSION + OVERRIDING_VERSION = False + class BaseTestCase(TestCase): def assertIsSubclass(self, cls, class_or_tuple, msg=None): @@ -248,6 +247,7 @@ async def __aexit__(self, etype, eval, tb): PY36_TESTS = """ from test import ann_module, ann_module2, ann_module3 from typing_extensions import AsyncContextManager +from typing import NamedTuple class A: y: float @@ -394,7 +394,6 @@ def test_async_iterator(self): self.assertNotIsInstance(42, typing_extensions.AsyncIterator) def test_collection(self): - #if hasattr(collections_abc, 'Collection'): self.assertIsInstance(tuple(), typing_extensions.Collection) self.assertIsInstance(frozenset(), typing_extensions.Collection) self.assertIsSubclass(dict, typing_extensions.Collection) @@ -408,11 +407,11 @@ def __iter__(self): ... def __len__(self): ... self.assertIsSubclass( - type(MyCollection()), - typing_extensions.Collection) + type(MyCollection()), + typing_extensions.Collection) self.assertIsSubclass( - MyCollection, - typing_extensions.Collection) + MyCollection, + typing_extensions.Collection) def test_deque(self): self.assertIsSubclass(collections.deque, typing_extensions.Deque) @@ -425,14 +424,14 @@ def test_counter(self): @skipUnless(TYPING_V4, "Behavior added in typing v4") def test_defaultdict_instantiation(self): self.assertIs( - type(typing_extensions.DefaultDict()), - collections.defaultdict) + type(typing_extensions.DefaultDict()), + collections.defaultdict) self.assertIs( - type(typing_extensions.DefaultDict[KT, VT]()), - collections.defaultdict) + type(typing_extensions.DefaultDict[KT, VT]()), + collections.defaultdict) self.assertIs( - type(typing_extensions.DefaultDict[str, int]()), - collections.defaultdict) + type(typing_extensions.DefaultDict[str, int]()), + collections.defaultdict) def test_defaultdict_subclass(self): @@ -510,7 +509,6 @@ def test_no_async_generator_instantiation(self): with self.assertRaises(TypeError): typing_extensions.AsyncGenerator[int, int]() - @skipUnless(PY36, 'Python 3.6 required') def test_subclassing_async_generator(self): class G(typing_extensions.AsyncGenerator[int, int]): @@ -595,7 +593,8 @@ def new_user(user_class: Type[U]) -> U: new_user(BasicUser) - @skipUnless(sys.version_info != (3, 5, 2), 'Python 3.5.2 has a somewhat buggy Type impl') + @skipUnless(sys.version_info != (3, 5, 2), + 'Python 3.5.2 has a somewhat buggy Type impl') def test_type_optional(self): A = Optional[Type[BaseException]] @@ -627,6 +626,7 @@ def test_errors(self): class D(UserName): pass + class AllTests(BaseTestCase): def test_typing_extensions_includes_standard(self): a = typing_extensions.__all__ @@ -659,8 +659,9 @@ def test_typing_extensions_defers_when_possible(self): for item in typing_extensions.__all__: if item not in exclude and hasattr(typing, item): self.assertIs( - getattr(typing_extensions, item), - getattr(typing, item)) + getattr(typing_extensions, item), + getattr(typing, item)) + if __name__ == '__main__': main(argv=[sys.argv[0]] + sys.argv[2:]) diff --git a/typing_extensions/src_py3/typing_extensions.py b/typing_extensions/src_py3/typing_extensions.py index 907866f3..ca8cf363 100644 --- a/typing_extensions/src_py3/typing_extensions.py +++ b/typing_extensions/src_py3/typing_extensions.py @@ -1,5 +1,4 @@ import abc -from abc import abstractmethod, abstractproperty import collections import contextlib import sys @@ -16,7 +15,7 @@ def _generic_new(base_cls, cls, *args, **kwargs): return base_cls.__new__(cls, *args, **kwargs) if sys.version_info[:2] >= (3, 6): - import _collections_abc + import _collections_abc _check_methods_in_mro = _collections_abc._check_methods else: def _check_methods_in_mro(C, *methods): @@ -41,13 +40,13 @@ def _check_methods_in_mro(C, *methods): # ABCs (from collections.abc). # The following are added depending on presence # of their non-generic counterparts in stdlib: - #'Awaitable', - #'AsyncIterator', - #'AsyncIterable', - #'Coroutine', - #'AsyncGenerator', - #'AsyncContextManager', - #'ChainMap', + # 'Awaitable', + # 'AsyncIterator', + # 'AsyncIterable', + # 'Coroutine', + # 'AsyncGenerator', + # 'AsyncContextManager', + # 'ChainMap', # Concrete collection types. 'Collection', @@ -68,7 +67,7 @@ def _check_methods_in_mro(C, *methods): if hasattr(typing, 'NoReturn'): NoReturn = typing.NoReturn elif hasattr(typing, '_FinalTypingBase'): - class _NoReturn(typing._FinalTypingBase, _root=True): + class _NoReturn(typing._FinalTypingBase, _root=True): """Special type indicating functions that never return. Example:: @@ -178,14 +177,14 @@ def __init__(self, tp=None, **kwds): def __getitem__(self, item): cls = type(self) if self.__type__ is None: - return cls(_type_check(item, + return cls(typing._type_check(item, '{} accepts only single type.'.format(cls.__name__[1:])), _root=True) raise TypeError('{} cannot be further subscripted' .format(cls.__name__[1:])) def _eval_type(self, globalns, localns): - new_tp = _eval_type(self.__type__, globalns, localns) + new_tp = typing._eval_type(self.__type__, globalns, localns) if new_tp == self.__type__: return self return type(self)(new_tp, _root=True) @@ -206,7 +205,6 @@ def __eq__(self, other): return self.__type__ == other.__type__ return self is other - ClassVar = _ClassVar(_root=True) else: class ClassVarMeta(typing.TypingMeta): @@ -237,7 +235,7 @@ def __getitem__(self, item): dict(self.__dict__), tp=param, _root=True) def _eval_type(self, globalns, localns): - new_tp = _eval_type(self.__type__, globalns, localns) + new_tp = typing._eval_type(self.__type__, globalns, localns) if new_tp == self.__type__: return self return type(self)(self.__name__, self.__bases__, @@ -260,7 +258,6 @@ def __eq__(self, other): return self.__type__ == other.__type__ return self is other - class ClassVar(typing.Final, metaclass=ClassVarMeta, _root=True): """Special type construct to mark class variables. @@ -319,8 +316,6 @@ def utf8(value): return _overload_dummy - - # This is not a real generic class. Don't use outside annotations. if hasattr(typing, 'Type'): Type = typing.Type @@ -368,7 +363,7 @@ def _define_guard(type_name): if hasattr(typing, type_name): __all__.append(type_name) globals()[type_name] = getattr(typing, type_name) - return False + return False elif hasattr(collections_abc, type_name): __all__.append(type_name) return True @@ -405,8 +400,8 @@ class AsyncIterator(AsyncIterable[T_co], __all__.append('Collection') # Backport collections.abc.Collections - class _CollectionAbc(collections.Sized, - collections.Iterable, + class _CollectionAbc(collections.Sized, + collections.Iterable, collections.Container): __slots__ = () @@ -414,7 +409,7 @@ class _CollectionAbc(collections.Sized, def __subclasshook__(cls, C): if cls is _CollectionAbc: return _check_methods_in_mro( - C, "__len__", "__iter__", "__contains__") + C, "__len__", "__iter__", "__contains__") return NotImplemented extra = getattr(collections_abc, 'Collection', _CollectionAbc) @@ -615,4 +610,3 @@ def new_type(x): else: # Constant that's True when type checking, but False here. TYPE_CHECKING = False - From bded0155156d562e44f1b691a606d0d516a8ca5a Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Tue, 27 Jun 2017 00:25:29 -0700 Subject: [PATCH 04/23] Fix tests and lint errors; make run_tests.py Python 3 only --- .flake8-tests | 2 + .travis.yml | 9 +- typing_extensions/README.md | 38 +- typing_extensions/run_tests.py | 27 +- .../src_py2/test_typing_extensions.py | 26 - .../src_py3/test_typing_extensions.py | 24 +- .../test_data/python-2.7.13/_abcoll.py | 695 ---------------- .../test_data/python-2.7.13/abc.py | 185 ----- .../test_data/python-2.7.13/collections.py | 742 ------------------ 9 files changed, 55 insertions(+), 1693 deletions(-) delete mode 100644 typing_extensions/test_data/python-2.7.13/_abcoll.py delete mode 100644 typing_extensions/test_data/python-2.7.13/abc.py delete mode 100644 typing_extensions/test_data/python-2.7.13/collections.py diff --git a/.flake8-tests b/.flake8-tests index fedc1cbb..3afdb192 100644 --- a/.flake8-tests +++ b/.flake8-tests @@ -15,6 +15,8 @@ ignore = E701, E704, F811, + # Included to allow sys.version_info hack + E402, # irrelevant plugins B3, DW12 diff --git a/.travis.yml b/.travis.yml index 6e40ec93..0b609057 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,7 +14,12 @@ install: script: - export PYTHONPATH=`python -c "import sys; print('python2' if sys.version.startswith('2') else 'src')"`; py.test $PYTHONPATH - - export PYTHONPATH=`python -c "import sys; print('typing_extensions/src_py2' if sys.version.startswith('2') else 'typing_extensions/src_py3')"`; py.test $PYTHONPATH - - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then cd typing_extensions; python run_tests.py; fi + - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then + cd typing_extensions + python run_tests.py + fi + - pip setup.py install + export PYTHONPATH=`python -c "import sys; print('typing_extensions/src_py2' if sys.version.startswith('2') else 'typing_extensions/src_py3')"` + py.test $PYTHONPATH - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8; fi - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8 --config=.flake8-tests src/test_typing.py python2/test_typing.py typing_extensions/src_py2/test_typing_extensions.py typing_extensions/src_py3/test_typing_extensions.py; fi diff --git a/typing_extensions/README.md b/typing_extensions/README.md index 77c937f2..94478a37 100644 --- a/typing_extensions/README.md +++ b/typing_extensions/README.md @@ -59,21 +59,33 @@ issues when mixing the differing implementations of modified classes. ## Running tests -There are two different ways to test this module. The first is to simply run -each individual Python interpreter against `test_typing_extensions.py` in the -`src_py2` and `src_py3` folders. +### Python 2 tests -However, because multiple versions of Python for each individual release -can be onerous, you can instead run `run_tests.py` using a single Python -interpreter. The `run_tests.py` file will essentially "modify" the standard -library by changing `PYTHONPATH` to point to individual folders in the -`test_data` repo. +To test Python 2, you should: + +1. Install the latest version of typing +2. Navigate into `src_py2` and run `test_typing_extensions.py` + +### Python 3 tests + +Similarly, to test Python 3, you can: + +1. Install the latest version of typing (for Python 3.4 or under) +2. Navigate into `src_py3` and run `test_typing_extensions.py` + +However, you would need to repeat this test for each version of Python 3 +that has been released since Python 3.5.0 for full coverage. + +Since installing 7+ different versions of Python 3 is an onerous requirement, +this module provides a second way of running tests that requires you to only +have the latest version of Python 3 installed on your system: + +- Run `py -3 run_tests.py` (for Windows) +- Run `python3 run_tests.py` (for Linux) + +The `run_tests.py` file will essentially "modify" the standard library by +changing `PYTHONPATH` to point to individual folders in the `test_data` repo. Each individual folder contains a snapshot of the source code for the `collections`, `typing,` and `abc` modules for that given release, letting us test `typing` against those particular implementations. - -`run_tests.py` will assume that you have Python 3.6.1 and a reasonably -modern version of Python 2.7 installed on your system, aliased to -`py -2.7` and `py -3.6` on Windows, and `python` and `python3` on Linux and -Mac. diff --git a/typing_extensions/run_tests.py b/typing_extensions/run_tests.py index a720ea90..a22cb727 100644 --- a/typing_extensions/run_tests.py +++ b/typing_extensions/run_tests.py @@ -10,23 +10,12 @@ import sys import textwrap -CORE_FILES_2 = [ - "./src_py2/typing_extensions.py", - "./src_py2/test_typing_extensions.py" -] -CORE_FILES_3 = [ +CORE_FILES = [ "./src_py3/typing_extensions.py", "./src_py3/test_typing_extensions.py" ] TEST_DIR = "test_data" -if sys.platform.startswith('win32'): - PYTHON2 = "py -2.7" - PYTHON3 = "py -3.6" -else: - PYTHON2 = "python" - PYTHON3 = "python3" - def get_test_dirs() -> List[str]: """Get all folders to test inside TEST_DIR.""" @@ -86,15 +75,17 @@ def main() -> int: exit_code = 0 for test_dir in test_dirs: _, version_number = test_dir.split('-') - py2 = version_number.startswith("2") - print("Testing Python {}".format(version_number)) + test_version = tuple(map(int, version_number.split('.'))) - core_files = CORE_FILES_2 if py2 else CORE_FILES_3 - python_exe = PYTHON2 if py2 else PYTHON3 + if sys.version_info[0] != test_version[0]: + print("Skipping Python {}".format(version_number)) + continue + else: + print("Testing Python {}".format(version_number)) - with temp_copy(core_files, test_dir), change_directory(test_dir): + with temp_copy(CORE_FILES, test_dir), change_directory(test_dir): success, output = run_shell("{} {} {}".format( - python_exe, + sys.executable, "test_typing_extensions.py", "PYVERSION.{}".format(version_number))) if success: diff --git a/typing_extensions/src_py2/test_typing_extensions.py b/typing_extensions/src_py2/test_typing_extensions.py index 8fd3421d..c9f1be2a 100644 --- a/typing_extensions/src_py2/test_typing_extensions.py +++ b/typing_extensions/src_py2/test_typing_extensions.py @@ -9,16 +9,6 @@ import typing import typing_extensions -# Override version info -ORIGINAL_VERSION = sys.version_info -if len(sys.argv) >= 2 and sys.argv[-1].startswith("PYVERSION"): - PYTHON_VERSION = tuple(map(int, sys.argv[-1].split('.')[1:])) - sys.version_info = PYTHON_VERSION - OVERRIDING_VERSION = True -else: - PYTHON_VERSION = ORIGINAL_VERSION - OVERRIDING_VERSION = False - class BaseTestCase(TestCase): def assertIsSubclass(self, cls, class_or_tuple, msg=None): @@ -40,22 +30,6 @@ def clear_caches(self): f() -class EnvironmentTest(BaseTestCase): - @skipUnless(OVERRIDING_VERSION, "Environment tests apply only when overriding") - def test_environment_is_ok(self): - cwd = os.path.abspath(os.getcwd()) - def correct_dir(module): - return os.path.abspath(module.__file__).startswith(cwd) - - self.assertTrue(correct_dir(abc)) - self.assertTrue(correct_dir(collections)) - self.assertTrue(correct_dir(typing_extensions)) - - def test_python_version_is_ok(self): - self.assertTrue(sys.version_info == PYTHON_VERSION) - self.assertTrue(ORIGINAL_VERSION[0] == 2) - - class Employee: pass diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index 4054f20f..c091696a 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -1,9 +1,19 @@ -import os import sys + +# Override version info +ORIGINAL_VERSION = sys.version_info +if len(sys.argv) >= 2 and sys.argv[-1].startswith("PYVERSION"): + PYTHON_VERSION = tuple(map(int, sys.argv[-1].split('.')[1:])) + sys.version_info = PYTHON_VERSION + OVERRIDING_VERSION = True +else: + PYTHON_VERSION = ORIGINAL_VERSION + OVERRIDING_VERSION = False + +import os import abc import contextlib import collections - from unittest import TestCase, main, skipUnless from typing import TypeVar, Optional from typing import T, KT, VT # Not in __all__. @@ -21,16 +31,6 @@ TYPING_V3 = sys.version_info >= (3, 5, 3) TYPING_V4 = sys.version_info >= (3, 6, 1) -# Override version info -ORIGINAL_VERSION = sys.version_info -if len(sys.argv) >= 2 and sys.argv[-1].startswith("PYVERSION"): - PYTHON_VERSION = tuple(map(int, sys.argv[-1].split('.')[1:])) - sys.version_info = PYTHON_VERSION - OVERRIDING_VERSION = True -else: - PYTHON_VERSION = ORIGINAL_VERSION - OVERRIDING_VERSION = False - class BaseTestCase(TestCase): def assertIsSubclass(self, cls, class_or_tuple, msg=None): diff --git a/typing_extensions/test_data/python-2.7.13/_abcoll.py b/typing_extensions/test_data/python-2.7.13/_abcoll.py deleted file mode 100644 index b643692e..00000000 --- a/typing_extensions/test_data/python-2.7.13/_abcoll.py +++ /dev/null @@ -1,695 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. - -DON'T USE THIS MODULE DIRECTLY! The classes here should be imported -via collections; they are defined here only to alleviate certain -bootstrapping issues. Unit tests are in test_collections. -""" - -from abc import ABCMeta, abstractmethod -import sys - -__all__ = ["Hashable", "Iterable", "Iterator", - "Sized", "Container", "Callable", - "Set", "MutableSet", - "Mapping", "MutableMapping", - "MappingView", "KeysView", "ItemsView", "ValuesView", - "Sequence", "MutableSequence", - ] - -### ONE-TRICK PONIES ### - -def _hasattr(C, attr): - try: - return any(attr in B.__dict__ for B in C.__mro__) - except AttributeError: - # Old-style class - return hasattr(C, attr) - - -class Hashable: - __metaclass__ = ABCMeta - - @abstractmethod - def __hash__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Hashable: - try: - for B in C.__mro__: - if "__hash__" in B.__dict__: - if B.__dict__["__hash__"]: - return True - break - except AttributeError: - # Old-style class - if getattr(C, "__hash__", None): - return True - return NotImplemented - - -class Iterable: - __metaclass__ = ABCMeta - - @abstractmethod - def __iter__(self): - while False: - yield None - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterable: - if _hasattr(C, "__iter__"): - return True - return NotImplemented - -Iterable.register(str) - - -class Iterator(Iterable): - - @abstractmethod - def next(self): - 'Return the next item from the iterator. When exhausted, raise StopIteration' - raise StopIteration - - def __iter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterator: - if _hasattr(C, "next") and _hasattr(C, "__iter__"): - return True - return NotImplemented - - -class Sized: - __metaclass__ = ABCMeta - - @abstractmethod - def __len__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Sized: - if _hasattr(C, "__len__"): - return True - return NotImplemented - - -class Container: - __metaclass__ = ABCMeta - - @abstractmethod - def __contains__(self, x): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Container: - if _hasattr(C, "__contains__"): - return True - return NotImplemented - - -class Callable: - __metaclass__ = ABCMeta - - @abstractmethod - def __call__(self, *args, **kwds): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Callable: - if _hasattr(C, "__call__"): - return True - return NotImplemented - - -### SETS ### - - -class Set(Sized, Iterable, Container): - """A set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__ and __len__. - - To override the comparisons (presumably for speed, as the - semantics are fixed), redefine __le__ and __ge__, - then the other operations will automatically follow suit. - """ - - def __le__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) > len(other): - return False - for elem in self: - if elem not in other: - return False - return True - - def __lt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) < len(other) and self.__le__(other) - - def __gt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) > len(other) and self.__ge__(other) - - def __ge__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) < len(other): - return False - for elem in other: - if elem not in self: - return False - return True - - def __eq__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) == len(other) and self.__le__(other) - - def __ne__(self, other): - return not (self == other) - - @classmethod - def _from_iterable(cls, it): - '''Construct an instance of the class from any iterable input. - - Must override this method if the class constructor signature - does not accept an iterable for an input. - ''' - return cls(it) - - def __and__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - return self._from_iterable(value for value in other if value in self) - - __rand__ = __and__ - - def isdisjoint(self, other): - 'Return True if two sets have a null intersection.' - for value in other: - if value in self: - return False - return True - - def __or__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - chain = (e for s in (self, other) for e in s) - return self._from_iterable(chain) - - __ror__ = __or__ - - def __sub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in self - if value not in other) - - def __rsub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in other - if value not in self) - - def __xor__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return (self - other) | (other - self) - - __rxor__ = __xor__ - - # Sets are not hashable by default, but subclasses can change this - __hash__ = None - - def _hash(self): - """Compute the hash value of a set. - - Note that we don't define __hash__: not all sets are hashable. - But if you define a hashable set type, its __hash__ should - call this function. - - This must be compatible __eq__. - - All sets ought to compare equal if they contain the same - elements, regardless of how they are implemented, and - regardless of the order of the elements; so there's not much - freedom for __eq__ or __hash__. We match the algorithm used - by the built-in frozenset type. - """ - MAX = sys.maxint - MASK = 2 * MAX + 1 - n = len(self) - h = 1927868237 * (n + 1) - h &= MASK - for x in self: - hx = hash(x) - h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 - h &= MASK - h = h * 69069 + 907133923 - h &= MASK - if h > MAX: - h -= MASK + 1 - if h == -1: - h = 590923713 - return h - -Set.register(frozenset) - - -class MutableSet(Set): - """A mutable set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__, __len__, - add(), and discard(). - - To override the comparisons (presumably for speed, as the - semantics are fixed), all you have to do is redefine __le__ and - then the other operations will automatically follow suit. - """ - - @abstractmethod - def add(self, value): - """Add an element.""" - raise NotImplementedError - - @abstractmethod - def discard(self, value): - """Remove an element. Do not raise an exception if absent.""" - raise NotImplementedError - - def remove(self, value): - """Remove an element. If not a member, raise a KeyError.""" - if value not in self: - raise KeyError(value) - self.discard(value) - - def pop(self): - """Return the popped value. Raise KeyError if empty.""" - it = iter(self) - try: - value = next(it) - except StopIteration: - raise KeyError - self.discard(value) - return value - - def clear(self): - """This is slow (creates N new iterators!) but effective.""" - try: - while True: - self.pop() - except KeyError: - pass - - def __ior__(self, it): - for value in it: - self.add(value) - return self - - def __iand__(self, it): - for value in (self - it): - self.discard(value) - return self - - def __ixor__(self, it): - if it is self: - self.clear() - else: - if not isinstance(it, Set): - it = self._from_iterable(it) - for value in it: - if value in self: - self.discard(value) - else: - self.add(value) - return self - - def __isub__(self, it): - if it is self: - self.clear() - else: - for value in it: - self.discard(value) - return self - -MutableSet.register(set) - - -### MAPPINGS ### - - -class Mapping(Sized, Iterable, Container): - - """A Mapping is a generic container for associating key/value - pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __iter__, and __len__. - - """ - - @abstractmethod - def __getitem__(self, key): - raise KeyError - - def get(self, key, default=None): - 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' - try: - return self[key] - except KeyError: - return default - - def __contains__(self, key): - try: - self[key] - except KeyError: - return False - else: - return True - - def iterkeys(self): - 'D.iterkeys() -> an iterator over the keys of D' - return iter(self) - - def itervalues(self): - 'D.itervalues() -> an iterator over the values of D' - for key in self: - yield self[key] - - def iteritems(self): - 'D.iteritems() -> an iterator over the (key, value) items of D' - for key in self: - yield (key, self[key]) - - def keys(self): - "D.keys() -> list of D's keys" - return list(self) - - def items(self): - "D.items() -> list of D's (key, value) pairs, as 2-tuples" - return [(key, self[key]) for key in self] - - def values(self): - "D.values() -> list of D's values" - return [self[key] for key in self] - - # Mappings are not hashable by default, but subclasses can change this - __hash__ = None - - def __eq__(self, other): - if not isinstance(other, Mapping): - return NotImplemented - return dict(self.items()) == dict(other.items()) - - def __ne__(self, other): - return not (self == other) - -class MappingView(Sized): - - def __init__(self, mapping): - self._mapping = mapping - - def __len__(self): - return len(self._mapping) - - def __repr__(self): - return '{0.__class__.__name__}({0._mapping!r})'.format(self) - - -class KeysView(MappingView, Set): - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, key): - return key in self._mapping - - def __iter__(self): - for key in self._mapping: - yield key - -KeysView.register(type({}.viewkeys())) - -class ItemsView(MappingView, Set): - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, item): - key, value = item - try: - v = self._mapping[key] - except KeyError: - return False - else: - return v == value - - def __iter__(self): - for key in self._mapping: - yield (key, self._mapping[key]) - -ItemsView.register(type({}.viewitems())) - -class ValuesView(MappingView): - - def __contains__(self, value): - for key in self._mapping: - if value == self._mapping[key]: - return True - return False - - def __iter__(self): - for key in self._mapping: - yield self._mapping[key] - -ValuesView.register(type({}.viewvalues())) - -class MutableMapping(Mapping): - - """A MutableMapping is a generic container for associating - key/value pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __setitem__, __delitem__, - __iter__, and __len__. - - """ - - @abstractmethod - def __setitem__(self, key, value): - raise KeyError - - @abstractmethod - def __delitem__(self, key): - raise KeyError - - __marker = object() - - def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - ''' - try: - value = self[key] - except KeyError: - if default is self.__marker: - raise - return default - else: - del self[key] - return value - - def popitem(self): - '''D.popitem() -> (k, v), remove and return some (key, value) pair - as a 2-tuple; but raise KeyError if D is empty. - ''' - try: - key = next(iter(self)) - except StopIteration: - raise KeyError - value = self[key] - del self[key] - return key, value - - def clear(self): - 'D.clear() -> None. Remove all items from D.' - try: - while True: - self.popitem() - except KeyError: - pass - - def update(*args, **kwds): - ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. - If E present and has a .keys() method, does: for k in E: D[k] = E[k] - If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v - In either case, this is followed by: for k, v in F.items(): D[k] = v - ''' - if not args: - raise TypeError("descriptor 'update' of 'MutableMapping' object " - "needs an argument") - self = args[0] - args = args[1:] - if len(args) > 1: - raise TypeError('update expected at most 1 arguments, got %d' % - len(args)) - if args: - other = args[0] - if isinstance(other, Mapping): - for key in other: - self[key] = other[key] - elif hasattr(other, "keys"): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - def setdefault(self, key, default=None): - 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' - try: - return self[key] - except KeyError: - self[key] = default - return default - -MutableMapping.register(dict) - - -### SEQUENCES ### - - -class Sequence(Sized, Iterable, Container): - """All the operations on a read-only sequence. - - Concrete subclasses must override __new__ or __init__, - __getitem__, and __len__. - """ - - @abstractmethod - def __getitem__(self, index): - raise IndexError - - def __iter__(self): - i = 0 - try: - while True: - v = self[i] - yield v - i += 1 - except IndexError: - return - - def __contains__(self, value): - for v in self: - if v == value: - return True - return False - - def __reversed__(self): - for i in reversed(range(len(self))): - yield self[i] - - def index(self, value): - '''S.index(value) -> integer -- return first index of value. - Raises ValueError if the value is not present. - ''' - for i, v in enumerate(self): - if v == value: - return i - raise ValueError - - def count(self, value): - 'S.count(value) -> integer -- return number of occurrences of value' - return sum(1 for v in self if v == value) - -Sequence.register(tuple) -Sequence.register(basestring) -Sequence.register(buffer) -Sequence.register(xrange) - - -class MutableSequence(Sequence): - - """All the operations on a read-only sequence. - - Concrete subclasses must provide __new__ or __init__, - __getitem__, __setitem__, __delitem__, __len__, and insert(). - - """ - - @abstractmethod - def __setitem__(self, index, value): - raise IndexError - - @abstractmethod - def __delitem__(self, index): - raise IndexError - - @abstractmethod - def insert(self, index, value): - 'S.insert(index, object) -- insert object before index' - raise IndexError - - def append(self, value): - 'S.append(object) -- append object to the end of the sequence' - self.insert(len(self), value) - - def reverse(self): - 'S.reverse() -- reverse *IN PLACE*' - n = len(self) - for i in range(n//2): - self[i], self[n-i-1] = self[n-i-1], self[i] - - def extend(self, values): - 'S.extend(iterable) -- extend sequence by appending elements from the iterable' - for v in values: - self.append(v) - - def pop(self, index=-1): - '''S.pop([index]) -> item -- remove and return item at index (default last). - Raise IndexError if list is empty or index is out of range. - ''' - v = self[index] - del self[index] - return v - - def remove(self, value): - '''S.remove(value) -- remove first occurrence of value. - Raise ValueError if the value is not present. - ''' - del self[self.index(value)] - - def __iadd__(self, values): - self.extend(values) - return self - -MutableSequence.register(list) diff --git a/typing_extensions/test_data/python-2.7.13/abc.py b/typing_extensions/test_data/python-2.7.13/abc.py deleted file mode 100644 index 02e48a1b..00000000 --- a/typing_extensions/test_data/python-2.7.13/abc.py +++ /dev/null @@ -1,185 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) according to PEP 3119.""" - -import types - -from _weakrefset import WeakSet - -# Instance of old-style class -class _C: pass -_InstanceType = type(_C()) - - -def abstractmethod(funcobj): - """A decorator indicating abstract methods. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract methods are overridden. - The abstract methods can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C: - __metaclass__ = ABCMeta - @abstractmethod - def my_abstract_method(self, ...): - ... - """ - funcobj.__isabstractmethod__ = True - return funcobj - - -class abstractproperty(property): - """A decorator indicating abstract properties. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract properties are overridden. - The abstract properties can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C: - __metaclass__ = ABCMeta - @abstractproperty - def my_abstract_property(self): - ... - - This defines a read-only property; you can also define a read-write - abstract property using the 'long' form of property declaration: - - class C: - __metaclass__ = ABCMeta - def getx(self): ... - def setx(self, value): ... - x = abstractproperty(getx, setx) - """ - __isabstractmethod__ = True - - -class ABCMeta(type): - - """Metaclass for defining Abstract Base Classes (ABCs). - - Use this metaclass to create an ABC. An ABC can be subclassed - directly, and then acts as a mix-in class. You can also register - unrelated concrete classes (even built-in classes) and unrelated - ABCs as 'virtual subclasses' -- these and their descendants will - be considered subclasses of the registering ABC by the built-in - issubclass() function, but the registering ABC won't show up in - their MRO (Method Resolution Order) nor will method - implementations defined by the registering ABC be callable (not - even via super()). - - """ - - # A global counter that is incremented each time a class is - # registered as a virtual subclass of anything. It forces the - # negative cache to be cleared before its next use. - _abc_invalidation_counter = 0 - - def __new__(mcls, name, bases, namespace): - cls = super(ABCMeta, mcls).__new__(mcls, name, bases, namespace) - # Compute set of abstract method names - abstracts = set(name - for name, value in namespace.items() - if getattr(value, "__isabstractmethod__", False)) - for base in bases: - for name in getattr(base, "__abstractmethods__", set()): - value = getattr(cls, name, None) - if getattr(value, "__isabstractmethod__", False): - abstracts.add(name) - cls.__abstractmethods__ = frozenset(abstracts) - # Set up inheritance registry - cls._abc_registry = WeakSet() - cls._abc_cache = WeakSet() - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - return cls - - def register(cls, subclass): - """Register a virtual subclass of an ABC.""" - if not isinstance(subclass, (type, types.ClassType)): - raise TypeError("Can only register classes") - if issubclass(subclass, cls): - return # Already a subclass - # Subtle: test for cycles *after* testing for "already a subclass"; - # this means we allow X.register(X) and interpret it as a no-op. - if issubclass(cls, subclass): - # This would create a cycle, which is bad for the algorithm below - raise RuntimeError("Refusing to create an inheritance cycle") - cls._abc_registry.add(subclass) - ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache - - def _dump_registry(cls, file=None): - """Debug helper to print the ABC registry.""" - print >> file, "Class: %s.%s" % (cls.__module__, cls.__name__) - print >> file, "Inv.counter: %s" % ABCMeta._abc_invalidation_counter - for name in sorted(cls.__dict__.keys()): - if name.startswith("_abc_"): - value = getattr(cls, name) - print >> file, "%s: %r" % (name, value) - - def __instancecheck__(cls, instance): - """Override for isinstance(instance, cls).""" - # Inline the cache checking when it's simple. - subclass = getattr(instance, '__class__', None) - if subclass is not None and subclass in cls._abc_cache: - return True - subtype = type(instance) - # Old-style instances - if subtype is _InstanceType: - subtype = subclass - if subtype is subclass or subclass is None: - if (cls._abc_negative_cache_version == - ABCMeta._abc_invalidation_counter and - subtype in cls._abc_negative_cache): - return False - # Fall back to the subclass check. - return cls.__subclasscheck__(subtype) - return (cls.__subclasscheck__(subclass) or - cls.__subclasscheck__(subtype)) - - def __subclasscheck__(cls, subclass): - """Override for issubclass(subclass, cls).""" - # Check cache - if subclass in cls._abc_cache: - return True - # Check negative cache; may have to invalidate - if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: - # Invalidate the negative cache - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - elif subclass in cls._abc_negative_cache: - return False - # Check the subclass hook - ok = cls.__subclasshook__(subclass) - if ok is not NotImplemented: - assert isinstance(ok, bool) - if ok: - cls._abc_cache.add(subclass) - else: - cls._abc_negative_cache.add(subclass) - return ok - # Check if it's a direct subclass - if cls in getattr(subclass, '__mro__', ()): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a registered class (recursive) - for rcls in cls._abc_registry: - if issubclass(subclass, rcls): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a subclass (recursive) - for scls in cls.__subclasses__(): - if issubclass(subclass, scls): - cls._abc_cache.add(subclass) - return True - # No dice; update negative cache - cls._abc_negative_cache.add(subclass) - return False diff --git a/typing_extensions/test_data/python-2.7.13/collections.py b/typing_extensions/test_data/python-2.7.13/collections.py deleted file mode 100644 index f2ad9726..00000000 --- a/typing_extensions/test_data/python-2.7.13/collections.py +++ /dev/null @@ -1,742 +0,0 @@ -'''This module implements specialized container datatypes providing -alternatives to Python's general purpose built-in containers, dict, -list, set, and tuple. - -* namedtuple factory function for creating tuple subclasses with named fields -* deque list-like container with fast appends and pops on either end -* Counter dict subclass for counting hashable objects -* OrderedDict dict subclass that remembers the order entries were added -* defaultdict dict subclass that calls a factory function to supply missing values - -''' - -__all__ = ['Counter', 'deque', 'defaultdict', 'namedtuple', 'OrderedDict'] -# For bootstrapping reasons, the collection ABCs are defined in _abcoll.py. -# They should however be considered an integral part of collections.py. -from _abcoll import * -import _abcoll -__all__ += _abcoll.__all__ - -from _collections import deque, defaultdict -from operator import itemgetter as _itemgetter, eq as _eq -from keyword import iskeyword as _iskeyword -import sys as _sys -import heapq as _heapq -from itertools import repeat as _repeat, chain as _chain, starmap as _starmap -from itertools import imap as _imap - -try: - from thread import get_ident as _get_ident -except ImportError: - from dummy_thread import get_ident as _get_ident - - -################################################################################ -### OrderedDict -################################################################################ - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as regular dictionaries. - - # The internal self.__map dict maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # Each link is stored as a list of length three: [PREV, NEXT, KEY]. - - def __init__(*args, **kwds): - '''Initialize an ordered dictionary. The signature is the same as - regular dictionaries, but keyword arguments are not recommended because - their insertion order is arbitrary. - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'OrderedDict' object " - "needs an argument") - self = args[0] - args = args[1:] - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__root = root = [] # sentinel node - root[:] = [root, root, None] - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, dict_setitem=dict.__setitem__): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link at the end of the linked list, - # and the inherited dictionary is updated with the new key/value pair. - if key not in self: - root = self.__root - last = root[0] - last[1] = root[0] = self.__map[key] = [last, root, key] - return dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which gets - # removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link_prev, link_next, _ = self.__map.pop(key) - link_prev[1] = link_next # update link_prev[NEXT] - link_next[0] = link_prev # update link_next[PREV] - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - # Traverse the linked list in order. - root = self.__root - curr = root[1] # start at the first node - while curr is not root: - yield curr[2] # yield the curr[KEY] - curr = curr[1] # move to next node - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - # Traverse the linked list in reverse order. - root = self.__root - curr = root[0] # start at the last node - while curr is not root: - yield curr[2] # yield the curr[KEY] - curr = curr[0] # move to previous node - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - root = self.__root - root[:] = [root, root, None] - self.__map.clear() - dict.clear(self) - - # -- the following methods do not depend on the internal structure -- - - def keys(self): - 'od.keys() -> list of keys in od' - return list(self) - - def values(self): - 'od.values() -> list of values in od' - return [self[key] for key in self] - - def items(self): - 'od.items() -> list of (key, value) pairs in od' - return [(key, self[key]) for key in self] - - def iterkeys(self): - 'od.iterkeys() -> an iterator over the keys in od' - return iter(self) - - def itervalues(self): - 'od.itervalues -> an iterator over the values in od' - for k in self: - yield self[k] - - def iteritems(self): - 'od.iteritems -> an iterator over the (key, value) pairs in od' - for k in self: - yield (k, self[k]) - - update = MutableMapping.update - - __update = update # let subclasses override update without breaking __init__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding - value. If key is not found, d is returned if given, otherwise KeyError - is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - key = next(reversed(self) if last else iter(self)) - value = self.pop(key) - return key, value - - def __repr__(self, _repr_running={}): - 'od.__repr__() <==> repr(od)' - call_key = id(self), _get_ident() - if call_key in _repr_running: - return '...' - _repr_running[call_key] = 1 - try: - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - finally: - del _repr_running[call_key] - - def __reduce__(self): - 'Return state information for pickling' - items = [[k, self[k]] for k in self] - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. - If not specified, the value defaults to None. - - ''' - self = cls() - for key in iterable: - self[key] = value - return self - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return dict.__eq__(self, other) and all(_imap(_eq, self, other)) - return dict.__eq__(self, other) - - def __ne__(self, other): - 'od.__ne__(y) <==> od!=y' - return not self == other - - # -- the following methods support python 3.x style dictionary views -- - - def viewkeys(self): - "od.viewkeys() -> a set-like object providing a view on od's keys" - return KeysView(self) - - def viewvalues(self): - "od.viewvalues() -> an object providing a view on od's values" - return ValuesView(self) - - def viewitems(self): - "od.viewitems() -> a set-like object providing a view on od's items" - return ItemsView(self) - - -################################################################################ -### namedtuple -################################################################################ - -_class_template = '''\ -class {typename}(tuple): - '{typename}({arg_list})' - - __slots__ = () - - _fields = {field_names!r} - - def __new__(_cls, {arg_list}): - 'Create new instance of {typename}({arg_list})' - return _tuple.__new__(_cls, ({arg_list})) - - @classmethod - def _make(cls, iterable, new=tuple.__new__, len=len): - 'Make a new {typename} object from a sequence or iterable' - result = new(cls, iterable) - if len(result) != {num_fields:d}: - raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) - return result - - def __repr__(self): - 'Return a nicely formatted representation string' - return '{typename}({repr_fmt})' % self - - def _asdict(self): - 'Return a new OrderedDict which maps field names to their values' - return OrderedDict(zip(self._fields, self)) - - def _replace(_self, **kwds): - 'Return a new {typename} object replacing specified fields with new values' - result = _self._make(map(kwds.pop, {field_names!r}, _self)) - if kwds: - raise ValueError('Got unexpected field names: %r' % kwds.keys()) - return result - - def __getnewargs__(self): - 'Return self as a plain tuple. Used by copy and pickle.' - return tuple(self) - - __dict__ = _property(_asdict) - - def __getstate__(self): - 'Exclude the OrderedDict from pickling' - pass - -{field_defs} -''' - -_repr_template = '{name}=%r' - -_field_template = '''\ - {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') -''' - -def namedtuple(typename, field_names, verbose=False, rename=False): - """Returns a new subclass of tuple with named fields. - - >>> Point = namedtuple('Point', ['x', 'y']) - >>> Point.__doc__ # docstring for the new class - 'Point(x, y)' - >>> p = Point(11, y=22) # instantiate with positional args or keywords - >>> p[0] + p[1] # indexable like a plain tuple - 33 - >>> x, y = p # unpack like a regular tuple - >>> x, y - (11, 22) - >>> p.x + p.y # fields also accessible by name - 33 - >>> d = p._asdict() # convert to a dictionary - >>> d['x'] - 11 - >>> Point(**d) # convert from a dictionary - Point(x=11, y=22) - >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields - Point(x=100, y=22) - - """ - - # Validate the field names. At the user's option, either generate an error - # message or automatically replace the field name with a valid name. - if isinstance(field_names, basestring): - field_names = field_names.replace(',', ' ').split() - field_names = map(str, field_names) - typename = str(typename) - if rename: - seen = set() - for index, name in enumerate(field_names): - if (not all(c.isalnum() or c=='_' for c in name) - or _iskeyword(name) - or not name - or name[0].isdigit() - or name.startswith('_') - or name in seen): - field_names[index] = '_%d' % index - seen.add(name) - for name in [typename] + field_names: - if type(name) != str: - raise TypeError('Type names and field names must be strings') - if not all(c.isalnum() or c=='_' for c in name): - raise ValueError('Type names and field names can only contain ' - 'alphanumeric characters and underscores: %r' % name) - if _iskeyword(name): - raise ValueError('Type names and field names cannot be a ' - 'keyword: %r' % name) - if name[0].isdigit(): - raise ValueError('Type names and field names cannot start with ' - 'a number: %r' % name) - seen = set() - for name in field_names: - if name.startswith('_') and not rename: - raise ValueError('Field names cannot start with an underscore: ' - '%r' % name) - if name in seen: - raise ValueError('Encountered duplicate field name: %r' % name) - seen.add(name) - - # Fill-in the class template - class_definition = _class_template.format( - typename = typename, - field_names = tuple(field_names), - num_fields = len(field_names), - arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], - repr_fmt = ', '.join(_repr_template.format(name=name) - for name in field_names), - field_defs = '\n'.join(_field_template.format(index=index, name=name) - for index, name in enumerate(field_names)) - ) - if verbose: - print class_definition - - # Execute the template string in a temporary namespace and support - # tracing utilities by setting a value for frame.f_globals['__name__'] - namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename, - OrderedDict=OrderedDict, _property=property, _tuple=tuple) - try: - exec class_definition in namespace - except SyntaxError as e: - raise SyntaxError(e.message + ':\n' + class_definition) - result = namespace[typename] - - # For pickling to work, the __module__ variable needs to be set to the frame - # where the named tuple is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython). - try: - result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - - return result - - -######################################################################## -### Counter -######################################################################## - -class Counter(dict): - '''Dict subclass for counting hashable items. Sometimes called a bag - or multiset. Elements are stored as dictionary keys and their counts - are stored as dictionary values. - - >>> c = Counter('abcdeabcdabcaba') # count elements from a string - - >>> c.most_common(3) # three most common elements - [('a', 5), ('b', 4), ('c', 3)] - >>> sorted(c) # list all unique elements - ['a', 'b', 'c', 'd', 'e'] - >>> ''.join(sorted(c.elements())) # list elements with repetitions - 'aaaaabbbbcccdde' - >>> sum(c.values()) # total of all counts - 15 - - >>> c['a'] # count of letter 'a' - 5 - >>> for elem in 'shazam': # update counts from an iterable - ... c[elem] += 1 # by adding 1 to each element's count - >>> c['a'] # now there are seven 'a' - 7 - >>> del c['b'] # remove all 'b' - >>> c['b'] # now there are zero 'b' - 0 - - >>> d = Counter('simsalabim') # make another counter - >>> c.update(d) # add in the second counter - >>> c['a'] # now there are nine 'a' - 9 - - >>> c.clear() # empty the counter - >>> c - Counter() - - Note: If a count is set to zero or reduced to zero, it will remain - in the counter until the entry is deleted or the counter is cleared: - - >>> c = Counter('aaabbc') - >>> c['b'] -= 2 # reduce the count of 'b' by two - >>> c.most_common() # 'b' is still in, but its count is zero - [('a', 3), ('c', 1), ('b', 0)] - - ''' - # References: - # http://en.wikipedia.org/wiki/Multiset - # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html - # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm - # http://code.activestate.com/recipes/259174/ - # Knuth, TAOCP Vol. II section 4.6.3 - - def __init__(*args, **kwds): - '''Create a new, empty Counter object. And if given, count elements - from an input iterable. Or, initialize the count from another mapping - of elements to their counts. - - >>> c = Counter() # a new, empty counter - >>> c = Counter('gallahad') # a new counter from an iterable - >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping - >>> c = Counter(a=4, b=2) # a new counter from keyword args - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'Counter' object " - "needs an argument") - self = args[0] - args = args[1:] - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - super(Counter, self).__init__() - self.update(*args, **kwds) - - def __missing__(self, key): - 'The count of elements not in the Counter is zero.' - # Needed so that self[missing_item] does not raise KeyError - return 0 - - def most_common(self, n=None): - '''List the n most common elements and their counts from the most - common to the least. If n is None, then list all element counts. - - >>> Counter('abcdeabcdabcaba').most_common(3) - [('a', 5), ('b', 4), ('c', 3)] - - ''' - # Emulate Bag.sortedByCount from Smalltalk - if n is None: - return sorted(self.iteritems(), key=_itemgetter(1), reverse=True) - return _heapq.nlargest(n, self.iteritems(), key=_itemgetter(1)) - - def elements(self): - '''Iterator over elements repeating each as many times as its count. - - >>> c = Counter('ABCABC') - >>> sorted(c.elements()) - ['A', 'A', 'B', 'B', 'C', 'C'] - - # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 - >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) - >>> product = 1 - >>> for factor in prime_factors.elements(): # loop over factors - ... product *= factor # and multiply them - >>> product - 1836 - - Note, if an element's count has been set to zero or is a negative - number, elements() will ignore it. - - ''' - # Emulate Bag.do from Smalltalk and Multiset.begin from C++. - return _chain.from_iterable(_starmap(_repeat, self.iteritems())) - - # Override dict methods where necessary - - @classmethod - def fromkeys(cls, iterable, v=None): - # There is no equivalent method for counters because setting v=1 - # means that no element can have a count greater than one. - raise NotImplementedError( - 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') - - def update(*args, **kwds): - '''Like dict.update() but add counts instead of replacing them. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.update('witch') # add elements from another iterable - >>> d = Counter('watch') - >>> c.update(d) # add elements from another counter - >>> c['h'] # four 'h' in which, witch, and watch - 4 - - ''' - # The regular dict.update() operation makes no sense here because the - # replace behavior results in the some of original untouched counts - # being mixed-in with all of the other counts for a mismash that - # doesn't have a straight-forward interpretation in most counting - # contexts. Instead, we implement straight-addition. Both the inputs - # and outputs are allowed to contain zero and negative counts. - - if not args: - raise TypeError("descriptor 'update' of 'Counter' object " - "needs an argument") - self = args[0] - args = args[1:] - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - if isinstance(iterable, Mapping): - if self: - self_get = self.get - for elem, count in iterable.iteritems(): - self[elem] = self_get(elem, 0) + count - else: - super(Counter, self).update(iterable) # fast path when counter is empty - else: - self_get = self.get - for elem in iterable: - self[elem] = self_get(elem, 0) + 1 - if kwds: - self.update(kwds) - - def subtract(*args, **kwds): - '''Like dict.update() but subtracts counts instead of replacing them. - Counts can be reduced below zero. Both the inputs and outputs are - allowed to contain zero and negative counts. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.subtract('witch') # subtract elements from another iterable - >>> c.subtract(Counter('watch')) # subtract elements from another counter - >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch - 0 - >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch - -1 - - ''' - if not args: - raise TypeError("descriptor 'subtract' of 'Counter' object " - "needs an argument") - self = args[0] - args = args[1:] - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - self_get = self.get - if isinstance(iterable, Mapping): - for elem, count in iterable.items(): - self[elem] = self_get(elem, 0) - count - else: - for elem in iterable: - self[elem] = self_get(elem, 0) - 1 - if kwds: - self.subtract(kwds) - - def copy(self): - 'Return a shallow copy.' - return self.__class__(self) - - def __reduce__(self): - return self.__class__, (dict(self),) - - def __delitem__(self, elem): - 'Like dict.__delitem__() but does not raise KeyError for missing values.' - if elem in self: - super(Counter, self).__delitem__(elem) - - def __repr__(self): - if not self: - return '%s()' % self.__class__.__name__ - items = ', '.join(map('%r: %r'.__mod__, self.most_common())) - return '%s({%s})' % (self.__class__.__name__, items) - - # Multiset-style mathematical operations discussed in: - # Knuth TAOCP Volume II section 4.6.3 exercise 19 - # and at http://en.wikipedia.org/wiki/Multiset - # - # Outputs guaranteed to only include positive counts. - # - # To strip negative and zero counts, add-in an empty counter: - # c += Counter() - - def __add__(self, other): - '''Add counts from two counters. - - >>> Counter('abbb') + Counter('bcc') - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count + other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __sub__(self, other): - ''' Subtract count, but keep only results with positive counts. - - >>> Counter('abbbc') - Counter('bccd') - Counter({'b': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count - other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count < 0: - result[elem] = 0 - count - return result - - def __or__(self, other): - '''Union is the maximum of value in either of the input counters. - - >>> Counter('abbb') | Counter('bcc') - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = other_count if count < other_count else count - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __and__(self, other): - ''' Intersection is the minimum of corresponding counts. - - >>> Counter('abbb') & Counter('bcc') - Counter({'b': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = count if count < other_count else other_count - if newcount > 0: - result[elem] = newcount - return result - - -if __name__ == '__main__': - # verify that instances can be pickled - from cPickle import loads, dumps - Point = namedtuple('Point', 'x, y', True) - p = Point(x=10, y=20) - assert p == loads(dumps(p)) - - # test and demonstrate ability to override methods - class Point(namedtuple('Point', 'x y')): - __slots__ = () - @property - def hypot(self): - return (self.x ** 2 + self.y ** 2) ** 0.5 - def __str__(self): - return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) - - for p in Point(3, 4), Point(14, 5/7.): - print p - - class Point(namedtuple('Point', 'x y')): - 'Point class with optimized _make() and _replace() without error-checking' - __slots__ = () - _make = classmethod(tuple.__new__) - def _replace(self, _map=map, **kwds): - return self._make(_map(kwds.get, ('x', 'y'), self)) - - print Point(11, 22)._replace(x=100) - - Point3D = namedtuple('Point3D', Point._fields + ('z',)) - print Point3D.__doc__ - - import doctest - TestResults = namedtuple('TestResults', 'failed attempted') - print TestResults(*doctest.testmod()) From 449a339b7525716a4ee8f37a2b58213de551e32b Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Tue, 27 Jun 2017 00:31:36 -0700 Subject: [PATCH 05/23] Fix typo in .travis.yml --- .travis.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 0b609057..4ee3daa9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,11 +15,11 @@ install: script: - export PYTHONPATH=`python -c "import sys; print('python2' if sys.version.startswith('2') else 'src')"`; py.test $PYTHONPATH - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then - cd typing_extensions - python run_tests.py + cd typing_extensions; + python run_tests.py; fi - - pip setup.py install - export PYTHONPATH=`python -c "import sys; print('typing_extensions/src_py2' if sys.version.startswith('2') else 'typing_extensions/src_py3')"` - py.test $PYTHONPATH + - python setup.py install; + export PYTHONPATH=`python -c "import sys; print('typing_extensions/src_py2' if sys.version.startswith('2') else 'typing_extensions/src_py3')"`; + py.test $PYTHONPATH; - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8; fi - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8 --config=.flake8-tests src/test_typing.py python2/test_typing.py typing_extensions/src_py2/test_typing_extensions.py typing_extensions/src_py3/test_typing_extensions.py; fi From baa6ba70b91ee3cde9bcfea91f34dfec224ef28c Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Tue, 27 Jun 2017 01:16:17 -0700 Subject: [PATCH 06/23] Try making travis run on each minor version --- .travis.yml | 12 ++++++------ typing_extensions/src_py3/test_typing_extensions.py | 5 ++++- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index 4ee3daa9..aae74db4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,8 +3,12 @@ language: python python: - "nightly" - "3.7-dev" - - "3.6" - - "3.5" + - "3.6.1" + - "3.6.0" + - "3.5.3" + - "3.5.2" + - "3.5.1" + - "3.5.0" - "3.4" - "3.3" - "2.7" @@ -14,10 +18,6 @@ install: script: - export PYTHONPATH=`python -c "import sys; print('python2' if sys.version.startswith('2') else 'src')"`; py.test $PYTHONPATH - - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then - cd typing_extensions; - python run_tests.py; - fi - python setup.py install; export PYTHONPATH=`python -c "import sys; print('typing_extensions/src_py2' if sys.version.startswith('2') else 'typing_extensions/src_py3')"`; py.test $PYTHONPATH; diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index c091696a..ebbc7503 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -24,7 +24,10 @@ from typing_extensions import NoReturn, ClassVar, Type, NewType import typing import typing_extensions -import collections.abc as collections_abc +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. import _collections_abc TYPING_V2 = sys.version_info >= (3, 5, 1) From 5f55de20578dfb1753e1f0cdd33b89f56df2ba40 Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Tue, 27 Jun 2017 01:25:39 -0700 Subject: [PATCH 07/23] Experiment with removing sys.argv hacks --- .../src_py3/test_typing_extensions.py | 43 +++++-------------- 1 file changed, 10 insertions(+), 33 deletions(-) diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index ebbc7503..6893a597 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -1,14 +1,14 @@ import sys # Override version info -ORIGINAL_VERSION = sys.version_info -if len(sys.argv) >= 2 and sys.argv[-1].startswith("PYVERSION"): - PYTHON_VERSION = tuple(map(int, sys.argv[-1].split('.')[1:])) - sys.version_info = PYTHON_VERSION - OVERRIDING_VERSION = True -else: - PYTHON_VERSION = ORIGINAL_VERSION - OVERRIDING_VERSION = False +#ORIGINAL_VERSION = sys.version_info +#if len(sys.argv) >= 2 and sys.argv[-1].startswith("PYVERSION"): +# PYTHON_VERSION = tuple(map(int, sys.argv[-1].split('.')[1:])) +# sys.version_info = PYTHON_VERSION +# OVERRIDING_VERSION = True +#else: +# PYTHON_VERSION = ORIGINAL_VERSION +# OVERRIDING_VERSION = False import os import abc @@ -28,7 +28,6 @@ import collections.abc as collections_abc except ImportError: import collections as collections_abc # Fallback for PY3.2. -import _collections_abc TYPING_V2 = sys.version_info >= (3, 5, 1) TYPING_V3 = sys.version_info >= (3, 5, 3) @@ -55,29 +54,6 @@ def clear_caches(self): f() -class EnvironmentTest(BaseTestCase): - @skipUnless(OVERRIDING_VERSION, "Environment tests apply only when overriding") - def test_environment_is_ok(self): - cwd = os.path.abspath(os.getcwd()) - def correct_dir(module): - return os.path.abspath(module.__file__).startswith(cwd) - - self.assertTrue(correct_dir(abc)) - self.assertTrue(correct_dir(collections)) - self.assertTrue(correct_dir(collections_abc)) - self.assertTrue(correct_dir(_collections_abc)) - self.assertTrue(correct_dir(typing_extensions)) - - # Use system-installed version for other Python versions - if sys.version_info >= (3, 5, 0): - self.assertTrue(correct_dir(typing)) - - def test_python_version_is_ok(self): - self.assertTrue(sys.version_info == PYTHON_VERSION) - self.assertTrue(sys.version_info <= ORIGINAL_VERSION) - self.assertTrue(ORIGINAL_VERSION[0] == 3) - - class Employee: pass @@ -667,4 +643,5 @@ def test_typing_extensions_defers_when_possible(self): if __name__ == '__main__': - main(argv=[sys.argv[0]] + sys.argv[2:]) + # main(argv=[sys.argv[0]] + sys.argv[2:]) + main() From d9d10a5d2ee794f40f675cd6d96673c4cd57a3d4 Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Tue, 27 Jun 2017 02:27:39 -0700 Subject: [PATCH 08/23] Correctly use sys.version_info --- typing_extensions/src_py3/test_typing_extensions.py | 2 +- typing_extensions/src_py3/typing_extensions.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index 6893a597..e300409e 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -572,7 +572,7 @@ def new_user(user_class: Type[U]) -> U: new_user(BasicUser) - @skipUnless(sys.version_info != (3, 5, 2), + @skipUnless(sys.version_info[:3] != (3, 5, 2), 'Python 3.5.2 has a somewhat buggy Type impl') def test_type_optional(self): A = Optional[Type[BaseException]] diff --git a/typing_extensions/src_py3/typing_extensions.py b/typing_extensions/src_py3/typing_extensions.py index ca8cf363..32f6fc9f 100644 --- a/typing_extensions/src_py3/typing_extensions.py +++ b/typing_extensions/src_py3/typing_extensions.py @@ -512,7 +512,7 @@ def __new__(cls, *args, **kwds): if hasattr(typing, 'Counter'): Counter = typing.Counter -elif (3, 5, 0) <= sys.version_info <= (3, 5, 1): +elif (3, 5, 0) <= sys.version_info[:3] <= (3, 5, 1): _TInt = typing.TypeVar('_TInt') class CounterMeta(typing.GenericMeta): From 2746faeb9c001b8d3795db71f82341a9fe12100c Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Tue, 27 Jun 2017 02:36:19 -0700 Subject: [PATCH 09/23] Cleanup lingering test changes --- .flake8-tests | 2 - typing_extensions/README.md | 33 +- typing_extensions/run_tests.py | 100 - .../src_py3/test_typing_extensions.py | 11 - .../python-3.4.6/_collections_abc.py | 748 ------ .../test_data/python-3.4.6/abc.py | 248 -- .../python-3.4.6/collections/__init__.py | 1161 -------- .../python-3.4.6/collections/__main__.py | 38 - .../test_data/python-3.4.6/collections/abc.py | 2 - .../python-3.5.0/_collections_abc.py | 939 ------- .../test_data/python-3.5.0/abc.py | 248 -- .../python-3.5.0/collections/__init__.py | 1217 --------- .../python-3.5.0/collections/__main__.py | 38 - .../test_data/python-3.5.0/collections/abc.py | 2 - .../test_data/python-3.5.0/typing.py | 1648 ------------ .../python-3.5.1/_collections_abc.py | 939 ------- .../test_data/python-3.5.1/abc.py | 248 -- .../python-3.5.1/collections/__init__.py | 1222 --------- .../python-3.5.1/collections/__main__.py | 38 - .../test_data/python-3.5.1/collections/abc.py | 2 - .../test_data/python-3.5.1/typing.py | 1656 ------------ .../python-3.5.2/_collections_abc.py | 939 ------- .../test_data/python-3.5.2/abc.py | 248 -- .../python-3.5.2/collections/__init__.py | 1239 --------- .../python-3.5.2/collections/__main__.py | 38 - .../test_data/python-3.5.2/collections/abc.py | 2 - .../test_data/python-3.5.2/typing.py | 1843 ------------- .../python-3.5.3/_collections_abc.py | 941 ------- .../test_data/python-3.5.3/abc.py | 248 -- .../python-3.5.3/collections/__init__.py | 1242 --------- .../python-3.5.3/collections/__main__.py | 38 - .../test_data/python-3.5.3/collections/abc.py | 2 - .../test_data/python-3.5.3/typing.py | 2160 --------------- .../python-3.6.0/_collections_abc.py | 1007 ------- .../test_data/python-3.6.0/abc.py | 248 -- .../python-3.6.0/collections/__init__.py | 1243 --------- .../test_data/python-3.6.0/collections/abc.py | 2 - .../test_data/python-3.6.0/typing.py | 2160 --------------- .../python-3.6.1/_collections_abc.py | 1007 ------- .../test_data/python-3.6.1/abc.py | 248 -- .../python-3.6.1/collections/__init__.py | 1246 --------- .../test_data/python-3.6.1/collections/abc.py | 2 - .../test_data/python-3.6.1/typing.py | 2335 ----------------- 43 files changed, 4 insertions(+), 28974 deletions(-) delete mode 100644 typing_extensions/run_tests.py delete mode 100644 typing_extensions/test_data/python-3.4.6/_collections_abc.py delete mode 100644 typing_extensions/test_data/python-3.4.6/abc.py delete mode 100644 typing_extensions/test_data/python-3.4.6/collections/__init__.py delete mode 100644 typing_extensions/test_data/python-3.4.6/collections/__main__.py delete mode 100644 typing_extensions/test_data/python-3.4.6/collections/abc.py delete mode 100644 typing_extensions/test_data/python-3.5.0/_collections_abc.py delete mode 100644 typing_extensions/test_data/python-3.5.0/abc.py delete mode 100644 typing_extensions/test_data/python-3.5.0/collections/__init__.py delete mode 100644 typing_extensions/test_data/python-3.5.0/collections/__main__.py delete mode 100644 typing_extensions/test_data/python-3.5.0/collections/abc.py delete mode 100644 typing_extensions/test_data/python-3.5.0/typing.py delete mode 100644 typing_extensions/test_data/python-3.5.1/_collections_abc.py delete mode 100644 typing_extensions/test_data/python-3.5.1/abc.py delete mode 100644 typing_extensions/test_data/python-3.5.1/collections/__init__.py delete mode 100644 typing_extensions/test_data/python-3.5.1/collections/__main__.py delete mode 100644 typing_extensions/test_data/python-3.5.1/collections/abc.py delete mode 100644 typing_extensions/test_data/python-3.5.1/typing.py delete mode 100644 typing_extensions/test_data/python-3.5.2/_collections_abc.py delete mode 100644 typing_extensions/test_data/python-3.5.2/abc.py delete mode 100644 typing_extensions/test_data/python-3.5.2/collections/__init__.py delete mode 100644 typing_extensions/test_data/python-3.5.2/collections/__main__.py delete mode 100644 typing_extensions/test_data/python-3.5.2/collections/abc.py delete mode 100644 typing_extensions/test_data/python-3.5.2/typing.py delete mode 100644 typing_extensions/test_data/python-3.5.3/_collections_abc.py delete mode 100644 typing_extensions/test_data/python-3.5.3/abc.py delete mode 100644 typing_extensions/test_data/python-3.5.3/collections/__init__.py delete mode 100644 typing_extensions/test_data/python-3.5.3/collections/__main__.py delete mode 100644 typing_extensions/test_data/python-3.5.3/collections/abc.py delete mode 100644 typing_extensions/test_data/python-3.5.3/typing.py delete mode 100644 typing_extensions/test_data/python-3.6.0/_collections_abc.py delete mode 100644 typing_extensions/test_data/python-3.6.0/abc.py delete mode 100644 typing_extensions/test_data/python-3.6.0/collections/__init__.py delete mode 100644 typing_extensions/test_data/python-3.6.0/collections/abc.py delete mode 100644 typing_extensions/test_data/python-3.6.0/typing.py delete mode 100644 typing_extensions/test_data/python-3.6.1/_collections_abc.py delete mode 100644 typing_extensions/test_data/python-3.6.1/abc.py delete mode 100644 typing_extensions/test_data/python-3.6.1/collections/__init__.py delete mode 100644 typing_extensions/test_data/python-3.6.1/collections/abc.py delete mode 100644 typing_extensions/test_data/python-3.6.1/typing.py diff --git a/.flake8-tests b/.flake8-tests index 3afdb192..fedc1cbb 100644 --- a/.flake8-tests +++ b/.flake8-tests @@ -15,8 +15,6 @@ ignore = E701, E704, F811, - # Included to allow sys.version_info hack - E402, # irrelevant plugins B3, DW12 diff --git a/typing_extensions/README.md b/typing_extensions/README.md index 94478a37..0360fe1a 100644 --- a/typing_extensions/README.md +++ b/typing_extensions/README.md @@ -59,33 +59,8 @@ issues when mixing the differing implementations of modified classes. ## Running tests -### Python 2 tests +To run tests, navigate into the appropriate source directory and run +`test_typing_extensions.py`. You will also need to install the latest +version of `typing` if you are using a version of Python that does not +include `typing` as a part of the standard library. -To test Python 2, you should: - -1. Install the latest version of typing -2. Navigate into `src_py2` and run `test_typing_extensions.py` - -### Python 3 tests - -Similarly, to test Python 3, you can: - -1. Install the latest version of typing (for Python 3.4 or under) -2. Navigate into `src_py3` and run `test_typing_extensions.py` - -However, you would need to repeat this test for each version of Python 3 -that has been released since Python 3.5.0 for full coverage. - -Since installing 7+ different versions of Python 3 is an onerous requirement, -this module provides a second way of running tests that requires you to only -have the latest version of Python 3 installed on your system: - -- Run `py -3 run_tests.py` (for Windows) -- Run `python3 run_tests.py` (for Linux) - -The `run_tests.py` file will essentially "modify" the standard library by -changing `PYTHONPATH` to point to individual folders in the `test_data` repo. - -Each individual folder contains a snapshot of the source code for the -`collections`, `typing,` and `abc` modules for that given release, letting us -test `typing` against those particular implementations. diff --git a/typing_extensions/run_tests.py b/typing_extensions/run_tests.py deleted file mode 100644 index a22cb727..00000000 --- a/typing_extensions/run_tests.py +++ /dev/null @@ -1,100 +0,0 @@ -#!/usr/bin/env python - -from typing import List, Iterator, Tuple -from contextlib import contextmanager -import glob -import os -import os.path -import shutil -import subprocess -import sys -import textwrap - -CORE_FILES = [ - "./src_py3/typing_extensions.py", - "./src_py3/test_typing_extensions.py" -] -TEST_DIR = "test_data" - - -def get_test_dirs() -> List[str]: - """Get all folders to test inside TEST_DIR.""" - return list(glob.glob(os.path.join(TEST_DIR, "*"))) - - -@contextmanager -def temp_copy(src_files: List[str], dest_dir: str) -> Iterator[None]: - """ - A context manager that temporarily copies the given files to the - given destination directory, and deletes those temp files upon - exiting. - """ - # Copy - for src_path in src_files: - shutil.copy(src_path, dest_dir) - - yield - - # Delete - for src_path in src_files: - dst_path = os.path.join(dest_dir, os.path.basename(src_path)) - os.remove(dst_path) - - -@contextmanager -def change_directory(dir_path: str) -> Iterator[None]: - """ - A context manager that temporarily changes the working directory - to the specified directory, and changes back to the original - upon exiting. - """ - original = os.getcwd() - os.chdir(dir_path) - - yield - - os.chdir(original) - - -def run_shell(command: str) -> Tuple[bool, str]: - env = os.environ.copy() - env["PYTHONPATH"] = ":".join([os.getcwd(), env["PYTHONPATH"], env["PATH"]]) - out = subprocess.run( - command, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - shell=True, - env=env) - success = out.returncode == 0 - stdout = '' if out.stdout is None else out.stdout.decode('utf-8') - return (success, stdout) - - -def main() -> int: - test_dirs = get_test_dirs() - exit_code = 0 - for test_dir in test_dirs: - _, version_number = test_dir.split('-') - test_version = tuple(map(int, version_number.split('.'))) - - if sys.version_info[0] != test_version[0]: - print("Skipping Python {}".format(version_number)) - continue - else: - print("Testing Python {}".format(version_number)) - - with temp_copy(CORE_FILES, test_dir), change_directory(test_dir): - success, output = run_shell("{} {} {}".format( - sys.executable, - "test_typing_extensions.py", - "PYVERSION.{}".format(version_number))) - if success: - print(" All tests passed!") - else: - print(textwrap.indent(output, " ")) - exit_code = 1 - return exit_code - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index e300409e..f0588fe9 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -1,15 +1,4 @@ import sys - -# Override version info -#ORIGINAL_VERSION = sys.version_info -#if len(sys.argv) >= 2 and sys.argv[-1].startswith("PYVERSION"): -# PYTHON_VERSION = tuple(map(int, sys.argv[-1].split('.')[1:])) -# sys.version_info = PYTHON_VERSION -# OVERRIDING_VERSION = True -#else: -# PYTHON_VERSION = ORIGINAL_VERSION -# OVERRIDING_VERSION = False - import os import abc import contextlib diff --git a/typing_extensions/test_data/python-3.4.6/_collections_abc.py b/typing_extensions/test_data/python-3.4.6/_collections_abc.py deleted file mode 100644 index 33b59aba..00000000 --- a/typing_extensions/test_data/python-3.4.6/_collections_abc.py +++ /dev/null @@ -1,748 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. - -Unit tests are in test_collections. -""" - -from abc import ABCMeta, abstractmethod -import sys - -__all__ = ["Hashable", "Iterable", "Iterator", - "Sized", "Container", "Callable", - "Set", "MutableSet", - "Mapping", "MutableMapping", - "MappingView", "KeysView", "ItemsView", "ValuesView", - "Sequence", "MutableSequence", - "ByteString", - ] - -# This module has been renamed from collections.abc to _collections_abc to -# speed up interpreter startup. Some of the types such as MutableMapping are -# required early but collections module imports a lot of other modules. -# See issue #19218 -__name__ = "collections.abc" - -# Private list of types that we want to register with the various ABCs -# so that they will pass tests like: -# it = iter(somebytearray) -# assert isinstance(it, Iterable) -# Note: in other implementations, these types many not be distinct -# and they make have their own implementation specific types that -# are not included on this list. -bytes_iterator = type(iter(b'')) -bytearray_iterator = type(iter(bytearray())) -#callable_iterator = ??? -dict_keyiterator = type(iter({}.keys())) -dict_valueiterator = type(iter({}.values())) -dict_itemiterator = type(iter({}.items())) -list_iterator = type(iter([])) -list_reverseiterator = type(iter(reversed([]))) -range_iterator = type(iter(range(0))) -set_iterator = type(iter(set())) -str_iterator = type(iter("")) -tuple_iterator = type(iter(())) -zip_iterator = type(iter(zip())) -## views ## -dict_keys = type({}.keys()) -dict_values = type({}.values()) -dict_items = type({}.items()) -## misc ## -mappingproxy = type(type.__dict__) - - -### ONE-TRICK PONIES ### - -class Hashable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __hash__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Hashable: - for B in C.__mro__: - if "__hash__" in B.__dict__: - if B.__dict__["__hash__"]: - return True - break - return NotImplemented - - -class Iterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __iter__(self): - while False: - yield None - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterable: - if any("__iter__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Iterator(Iterable): - - __slots__ = () - - @abstractmethod - def __next__(self): - 'Return the next item from the iterator. When exhausted, raise StopIteration' - raise StopIteration - - def __iter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterator: - if (any("__next__" in B.__dict__ for B in C.__mro__) and - any("__iter__" in B.__dict__ for B in C.__mro__)): - return True - return NotImplemented - -Iterator.register(bytes_iterator) -Iterator.register(bytearray_iterator) -#Iterator.register(callable_iterator) -Iterator.register(dict_keyiterator) -Iterator.register(dict_valueiterator) -Iterator.register(dict_itemiterator) -Iterator.register(list_iterator) -Iterator.register(list_reverseiterator) -Iterator.register(range_iterator) -Iterator.register(set_iterator) -Iterator.register(str_iterator) -Iterator.register(tuple_iterator) -Iterator.register(zip_iterator) - -class Sized(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __len__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Sized: - if any("__len__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Container(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __contains__(self, x): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Container: - if any("__contains__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Callable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __call__(self, *args, **kwds): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Callable: - if any("__call__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -### SETS ### - - -class Set(Sized, Iterable, Container): - - """A set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__ and __len__. - - To override the comparisons (presumably for speed, as the - semantics are fixed), redefine __le__ and __ge__, - then the other operations will automatically follow suit. - """ - - __slots__ = () - - def __le__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) > len(other): - return False - for elem in self: - if elem not in other: - return False - return True - - def __lt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) < len(other) and self.__le__(other) - - def __gt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) > len(other) and self.__ge__(other) - - def __ge__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) < len(other): - return False - for elem in other: - if elem not in self: - return False - return True - - def __eq__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) == len(other) and self.__le__(other) - - @classmethod - def _from_iterable(cls, it): - '''Construct an instance of the class from any iterable input. - - Must override this method if the class constructor signature - does not accept an iterable for an input. - ''' - return cls(it) - - def __and__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - return self._from_iterable(value for value in other if value in self) - - __rand__ = __and__ - - def isdisjoint(self, other): - 'Return True if two sets have a null intersection.' - for value in other: - if value in self: - return False - return True - - def __or__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - chain = (e for s in (self, other) for e in s) - return self._from_iterable(chain) - - __ror__ = __or__ - - def __sub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in self - if value not in other) - - def __rsub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in other - if value not in self) - - def __xor__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return (self - other) | (other - self) - - __rxor__ = __xor__ - - def _hash(self): - """Compute the hash value of a set. - - Note that we don't define __hash__: not all sets are hashable. - But if you define a hashable set type, its __hash__ should - call this function. - - This must be compatible __eq__. - - All sets ought to compare equal if they contain the same - elements, regardless of how they are implemented, and - regardless of the order of the elements; so there's not much - freedom for __eq__ or __hash__. We match the algorithm used - by the built-in frozenset type. - """ - MAX = sys.maxsize - MASK = 2 * MAX + 1 - n = len(self) - h = 1927868237 * (n + 1) - h &= MASK - for x in self: - hx = hash(x) - h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 - h &= MASK - h = h * 69069 + 907133923 - h &= MASK - if h > MAX: - h -= MASK + 1 - if h == -1: - h = 590923713 - return h - -Set.register(frozenset) - - -class MutableSet(Set): - """A mutable set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__, __len__, - add(), and discard(). - - To override the comparisons (presumably for speed, as the - semantics are fixed), all you have to do is redefine __le__ and - then the other operations will automatically follow suit. - """ - - __slots__ = () - - @abstractmethod - def add(self, value): - """Add an element.""" - raise NotImplementedError - - @abstractmethod - def discard(self, value): - """Remove an element. Do not raise an exception if absent.""" - raise NotImplementedError - - def remove(self, value): - """Remove an element. If not a member, raise a KeyError.""" - if value not in self: - raise KeyError(value) - self.discard(value) - - def pop(self): - """Return the popped value. Raise KeyError if empty.""" - it = iter(self) - try: - value = next(it) - except StopIteration: - raise KeyError - self.discard(value) - return value - - def clear(self): - """This is slow (creates N new iterators!) but effective.""" - try: - while True: - self.pop() - except KeyError: - pass - - def __ior__(self, it): - for value in it: - self.add(value) - return self - - def __iand__(self, it): - for value in (self - it): - self.discard(value) - return self - - def __ixor__(self, it): - if it is self: - self.clear() - else: - if not isinstance(it, Set): - it = self._from_iterable(it) - for value in it: - if value in self: - self.discard(value) - else: - self.add(value) - return self - - def __isub__(self, it): - if it is self: - self.clear() - else: - for value in it: - self.discard(value) - return self - -MutableSet.register(set) - - -### MAPPINGS ### - - -class Mapping(Sized, Iterable, Container): - - __slots__ = () - - """A Mapping is a generic container for associating key/value - pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __iter__, and __len__. - - """ - - @abstractmethod - def __getitem__(self, key): - raise KeyError - - def get(self, key, default=None): - 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' - try: - return self[key] - except KeyError: - return default - - def __contains__(self, key): - try: - self[key] - except KeyError: - return False - else: - return True - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return KeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return ItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return ValuesView(self) - - def __eq__(self, other): - if not isinstance(other, Mapping): - return NotImplemented - return dict(self.items()) == dict(other.items()) - -Mapping.register(mappingproxy) - - -class MappingView(Sized): - - def __init__(self, mapping): - self._mapping = mapping - - def __len__(self): - return len(self._mapping) - - def __repr__(self): - return '{0.__class__.__name__}({0._mapping!r})'.format(self) - - -class KeysView(MappingView, Set): - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, key): - return key in self._mapping - - def __iter__(self): - yield from self._mapping - -KeysView.register(dict_keys) - - -class ItemsView(MappingView, Set): - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, item): - key, value = item - try: - v = self._mapping[key] - except KeyError: - return False - else: - return v == value - - def __iter__(self): - for key in self._mapping: - yield (key, self._mapping[key]) - -ItemsView.register(dict_items) - - -class ValuesView(MappingView): - - def __contains__(self, value): - for key in self._mapping: - if value == self._mapping[key]: - return True - return False - - def __iter__(self): - for key in self._mapping: - yield self._mapping[key] - -ValuesView.register(dict_values) - - -class MutableMapping(Mapping): - - __slots__ = () - - """A MutableMapping is a generic container for associating - key/value pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __setitem__, __delitem__, - __iter__, and __len__. - - """ - - @abstractmethod - def __setitem__(self, key, value): - raise KeyError - - @abstractmethod - def __delitem__(self, key): - raise KeyError - - __marker = object() - - def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - ''' - try: - value = self[key] - except KeyError: - if default is self.__marker: - raise - return default - else: - del self[key] - return value - - def popitem(self): - '''D.popitem() -> (k, v), remove and return some (key, value) pair - as a 2-tuple; but raise KeyError if D is empty. - ''' - try: - key = next(iter(self)) - except StopIteration: - raise KeyError - value = self[key] - del self[key] - return key, value - - def clear(self): - 'D.clear() -> None. Remove all items from D.' - try: - while True: - self.popitem() - except KeyError: - pass - - def update(*args, **kwds): - ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. - If E present and has a .keys() method, does: for k in E: D[k] = E[k] - If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v - In either case, this is followed by: for k, v in F.items(): D[k] = v - ''' - if not args: - raise TypeError("descriptor 'update' of 'MutableMapping' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('update expected at most 1 arguments, got %d' % - len(args)) - if args: - other = args[0] - if isinstance(other, Mapping): - for key in other: - self[key] = other[key] - elif hasattr(other, "keys"): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - def setdefault(self, key, default=None): - 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' - try: - return self[key] - except KeyError: - self[key] = default - return default - -MutableMapping.register(dict) - - -### SEQUENCES ### - - -class Sequence(Sized, Iterable, Container): - - """All the operations on a read-only sequence. - - Concrete subclasses must override __new__ or __init__, - __getitem__, and __len__. - """ - - __slots__ = () - - @abstractmethod - def __getitem__(self, index): - raise IndexError - - def __iter__(self): - i = 0 - try: - while True: - v = self[i] - yield v - i += 1 - except IndexError: - return - - def __contains__(self, value): - for v in self: - if v == value: - return True - return False - - def __reversed__(self): - for i in reversed(range(len(self))): - yield self[i] - - def index(self, value): - '''S.index(value) -> integer -- return first index of value. - Raises ValueError if the value is not present. - ''' - for i, v in enumerate(self): - if v == value: - return i - raise ValueError - - def count(self, value): - 'S.count(value) -> integer -- return number of occurrences of value' - return sum(1 for v in self if v == value) - -Sequence.register(tuple) -Sequence.register(str) -Sequence.register(range) -Sequence.register(memoryview) - - -class ByteString(Sequence): - - """This unifies bytes and bytearray. - - XXX Should add all their methods. - """ - - __slots__ = () - -ByteString.register(bytes) -ByteString.register(bytearray) - - -class MutableSequence(Sequence): - - __slots__ = () - - """All the operations on a read-write sequence. - - Concrete subclasses must provide __new__ or __init__, - __getitem__, __setitem__, __delitem__, __len__, and insert(). - - """ - - @abstractmethod - def __setitem__(self, index, value): - raise IndexError - - @abstractmethod - def __delitem__(self, index): - raise IndexError - - @abstractmethod - def insert(self, index, value): - 'S.insert(index, value) -- insert value before index' - raise IndexError - - def append(self, value): - 'S.append(value) -- append value to the end of the sequence' - self.insert(len(self), value) - - def clear(self): - 'S.clear() -> None -- remove all items from S' - try: - while True: - self.pop() - except IndexError: - pass - - def reverse(self): - 'S.reverse() -- reverse *IN PLACE*' - n = len(self) - for i in range(n//2): - self[i], self[n-i-1] = self[n-i-1], self[i] - - def extend(self, values): - 'S.extend(iterable) -- extend sequence by appending elements from the iterable' - for v in values: - self.append(v) - - def pop(self, index=-1): - '''S.pop([index]) -> item -- remove and return item at index (default last). - Raise IndexError if list is empty or index is out of range. - ''' - v = self[index] - del self[index] - return v - - def remove(self, value): - '''S.remove(value) -- remove first occurrence of value. - Raise ValueError if the value is not present. - ''' - del self[self.index(value)] - - def __iadd__(self, values): - self.extend(values) - return self - -MutableSequence.register(list) -MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.4.6/abc.py b/typing_extensions/test_data/python-3.4.6/abc.py deleted file mode 100644 index 0358a469..00000000 --- a/typing_extensions/test_data/python-3.4.6/abc.py +++ /dev/null @@ -1,248 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) according to PEP 3119.""" - -from _weakrefset import WeakSet - - -def abstractmethod(funcobj): - """A decorator indicating abstract methods. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract methods are overridden. - The abstract methods can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractmethod - def my_abstract_method(self, ...): - ... - """ - funcobj.__isabstractmethod__ = True - return funcobj - - -class abstractclassmethod(classmethod): - """ - A decorator indicating abstract classmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractclassmethod - def my_abstract_classmethod(cls, ...): - ... - - 'abstractclassmethod' is deprecated. Use 'classmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractstaticmethod(staticmethod): - """ - A decorator indicating abstract staticmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractstaticmethod - def my_abstract_staticmethod(...): - ... - - 'abstractstaticmethod' is deprecated. Use 'staticmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractproperty(property): - """ - A decorator indicating abstract properties. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract properties are overridden. - The abstract properties can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractproperty - def my_abstract_property(self): - ... - - This defines a read-only property; you can also define a read-write - abstract property using the 'long' form of property declaration: - - class C(metaclass=ABCMeta): - def getx(self): ... - def setx(self, value): ... - x = abstractproperty(getx, setx) - - 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' - instead. - """ - - __isabstractmethod__ = True - - -class ABCMeta(type): - - """Metaclass for defining Abstract Base Classes (ABCs). - - Use this metaclass to create an ABC. An ABC can be subclassed - directly, and then acts as a mix-in class. You can also register - unrelated concrete classes (even built-in classes) and unrelated - ABCs as 'virtual subclasses' -- these and their descendants will - be considered subclasses of the registering ABC by the built-in - issubclass() function, but the registering ABC won't show up in - their MRO (Method Resolution Order) nor will method - implementations defined by the registering ABC be callable (not - even via super()). - - """ - - # A global counter that is incremented each time a class is - # registered as a virtual subclass of anything. It forces the - # negative cache to be cleared before its next use. - # Note: this counter is private. Use `abc.get_cache_token()` for - # external code. - _abc_invalidation_counter = 0 - - def __new__(mcls, name, bases, namespace): - cls = super().__new__(mcls, name, bases, namespace) - # Compute set of abstract method names - abstracts = {name - for name, value in namespace.items() - if getattr(value, "__isabstractmethod__", False)} - for base in bases: - for name in getattr(base, "__abstractmethods__", set()): - value = getattr(cls, name, None) - if getattr(value, "__isabstractmethod__", False): - abstracts.add(name) - cls.__abstractmethods__ = frozenset(abstracts) - # Set up inheritance registry - cls._abc_registry = WeakSet() - cls._abc_cache = WeakSet() - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - return cls - - def register(cls, subclass): - """Register a virtual subclass of an ABC. - - Returns the subclass, to allow usage as a class decorator. - """ - if not isinstance(subclass, type): - raise TypeError("Can only register classes") - if issubclass(subclass, cls): - return subclass # Already a subclass - # Subtle: test for cycles *after* testing for "already a subclass"; - # this means we allow X.register(X) and interpret it as a no-op. - if issubclass(cls, subclass): - # This would create a cycle, which is bad for the algorithm below - raise RuntimeError("Refusing to create an inheritance cycle") - cls._abc_registry.add(subclass) - ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache - return subclass - - def _dump_registry(cls, file=None): - """Debug helper to print the ABC registry.""" - print("Class: %s.%s" % (cls.__module__, cls.__name__), file=file) - print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) - for name in sorted(cls.__dict__.keys()): - if name.startswith("_abc_"): - value = getattr(cls, name) - print("%s: %r" % (name, value), file=file) - - def __instancecheck__(cls, instance): - """Override for isinstance(instance, cls).""" - # Inline the cache checking - subclass = instance.__class__ - if subclass in cls._abc_cache: - return True - subtype = type(instance) - if subtype is subclass: - if (cls._abc_negative_cache_version == - ABCMeta._abc_invalidation_counter and - subclass in cls._abc_negative_cache): - return False - # Fall back to the subclass check. - return cls.__subclasscheck__(subclass) - return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) - - def __subclasscheck__(cls, subclass): - """Override for issubclass(subclass, cls).""" - # Check cache - if subclass in cls._abc_cache: - return True - # Check negative cache; may have to invalidate - if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: - # Invalidate the negative cache - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - elif subclass in cls._abc_negative_cache: - return False - # Check the subclass hook - ok = cls.__subclasshook__(subclass) - if ok is not NotImplemented: - assert isinstance(ok, bool) - if ok: - cls._abc_cache.add(subclass) - else: - cls._abc_negative_cache.add(subclass) - return ok - # Check if it's a direct subclass - if cls in getattr(subclass, '__mro__', ()): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a registered class (recursive) - for rcls in cls._abc_registry: - if issubclass(subclass, rcls): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a subclass (recursive) - for scls in cls.__subclasses__(): - if issubclass(subclass, scls): - cls._abc_cache.add(subclass) - return True - # No dice; update negative cache - cls._abc_negative_cache.add(subclass) - return False - - -class ABC(metaclass=ABCMeta): - """Helper class that provides a standard way to create an ABC using - inheritance. - """ - pass - - -def get_cache_token(): - """Returns the current ABC cache token. - - The token is an opaque object (supporting equality testing) identifying the - current version of the ABC cache for virtual subclasses. The token changes - with every call to ``register()`` on any ABC. - """ - return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.4.6/collections/__init__.py b/typing_extensions/test_data/python-3.4.6/collections/__init__.py deleted file mode 100644 index f94210ef..00000000 --- a/typing_extensions/test_data/python-3.4.6/collections/__init__.py +++ /dev/null @@ -1,1161 +0,0 @@ -__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', - 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] - -# For backwards compatibility, continue to make the collections ABCs -# available through the collections module. -from _collections_abc import * -import _collections_abc -__all__ += _collections_abc.__all__ - -from _collections import deque, defaultdict -from operator import itemgetter as _itemgetter, eq as _eq -from keyword import iskeyword as _iskeyword -import sys as _sys -import heapq as _heapq -from _weakref import proxy as _proxy -from itertools import repeat as _repeat, chain as _chain, starmap as _starmap -from reprlib import recursive_repr as _recursive_repr - -################################################################################ -### OrderedDict -################################################################################ - -class _Link(object): - __slots__ = 'prev', 'next', 'key', '__weakref__' - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as regular dictionaries. - - # The internal self.__map dict maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # The sentinel is in self.__hardroot with a weakref proxy in self.__root. - # The prev links are weakref proxies (to prevent circular references). - # Individual links are kept alive by the hard reference in self.__map. - # Those hard references disappear when a key is deleted from an OrderedDict. - - def __init__(*args, **kwds): - '''Initialize an ordered dictionary. The signature is the same as - regular dictionaries, but keyword arguments are not recommended because - their insertion order is arbitrary. - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'OrderedDict' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__hardroot = _Link() - self.__root = root = _proxy(self.__hardroot) - root.prev = root.next = root - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, - dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link at the end of the linked list, - # and the inherited dictionary is updated with the new key/value pair. - if key not in self: - self.__map[key] = link = Link() - root = self.__root - last = root.prev - link.prev, link.next, link.key = last, root, key - last.next = link - root.prev = proxy(link) - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which gets - # removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link = self.__map.pop(key) - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - # Traverse the linked list in order. - root = self.__root - curr = root.next - while curr is not root: - yield curr.key - curr = curr.next - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - # Traverse the linked list in reverse order. - root = self.__root - curr = root.prev - while curr is not root: - yield curr.key - curr = curr.prev - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - root = self.__root - root.prev = root.next = root - self.__map.clear() - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root.prev - link_prev = link.prev - link_prev.next = root - root.prev = link_prev - else: - link = root.next - link_next = link.next - root.next = link_next - link_next.prev = root - key = link.key - del self.__map[key] - value = dict.pop(self, key) - return key, value - - def move_to_end(self, key, last=True): - '''Move an existing element to the end (or beginning if last==False). - - Raises KeyError if the element does not exist. - When last=True, acts like a fast version of self[key]=self.pop(key). - - ''' - link = self.__map[key] - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - root = self.__root - if last: - last = root.prev - link.prev = last - link.next = root - last.next = root.prev = link - else: - first = root.next - link.prev = root - link.next = first - root.next = first.prev = link - - def __sizeof__(self): - sizeof = _sys.getsizeof - n = len(self) + 1 # number of links including root - size = sizeof(self.__dict__) # instance dictionary - size += sizeof(self.__map) * 2 # internal dict and inherited dict - size += sizeof(self.__hardroot) * n # link objects - size += sizeof(self.__root) * n # proxy objects - return size - - update = __update = MutableMapping.update - keys = MutableMapping.keys - values = MutableMapping.values - items = MutableMapping.items - __ne__ = MutableMapping.__ne__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding - value. If key is not found, d is returned if given, otherwise KeyError - is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - @_recursive_repr() - def __repr__(self): - 'od.__repr__() <==> repr(od)' - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, list(self.items())) - - def __reduce__(self): - 'Return state information for pickling' - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - return self.__class__, (), inst_dict or None, None, iter(self.items()) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. - If not specified, the value defaults to None. - - ''' - self = cls() - for key in iterable: - self[key] = value - return self - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return dict.__eq__(self, other) and all(map(_eq, self, other)) - return dict.__eq__(self, other) - - -################################################################################ -### namedtuple -################################################################################ - -_class_template = """\ -from builtins import property as _property, tuple as _tuple -from operator import itemgetter as _itemgetter -from collections import OrderedDict - -class {typename}(tuple): - '{typename}({arg_list})' - - __slots__ = () - - _fields = {field_names!r} - - def __new__(_cls, {arg_list}): - 'Create new instance of {typename}({arg_list})' - return _tuple.__new__(_cls, ({arg_list})) - - @classmethod - def _make(cls, iterable, new=tuple.__new__, len=len): - 'Make a new {typename} object from a sequence or iterable' - result = new(cls, iterable) - if len(result) != {num_fields:d}: - raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) - return result - - def _replace(_self, **kwds): - 'Return a new {typename} object replacing specified fields with new values' - result = _self._make(map(kwds.pop, {field_names!r}, _self)) - if kwds: - raise ValueError('Got unexpected field names: %r' % list(kwds)) - return result - - def __repr__(self): - 'Return a nicely formatted representation string' - return self.__class__.__name__ + '({repr_fmt})' % self - - def _asdict(self): - 'Return a new OrderedDict which maps field names to their values.' - return OrderedDict(zip(self._fields, self)) - - def __getnewargs__(self): - 'Return self as a plain tuple. Used by copy and pickle.' - return tuple(self) - -{field_defs} -""" - -_repr_template = '{name}=%r' - -_field_template = '''\ - {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') -''' - -def namedtuple(typename, field_names, verbose=False, rename=False): - """Returns a new subclass of tuple with named fields. - - >>> Point = namedtuple('Point', ['x', 'y']) - >>> Point.__doc__ # docstring for the new class - 'Point(x, y)' - >>> p = Point(11, y=22) # instantiate with positional args or keywords - >>> p[0] + p[1] # indexable like a plain tuple - 33 - >>> x, y = p # unpack like a regular tuple - >>> x, y - (11, 22) - >>> p.x + p.y # fields also accessable by name - 33 - >>> d = p._asdict() # convert to a dictionary - >>> d['x'] - 11 - >>> Point(**d) # convert from a dictionary - Point(x=11, y=22) - >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields - Point(x=100, y=22) - - """ - - # Validate the field names. At the user's option, either generate an error - # message or automatically replace the field name with a valid name. - if isinstance(field_names, str): - field_names = field_names.replace(',', ' ').split() - field_names = list(map(str, field_names)) - typename = str(typename) - if rename: - seen = set() - for index, name in enumerate(field_names): - if (not name.isidentifier() - or _iskeyword(name) - or name.startswith('_') - or name in seen): - field_names[index] = '_%d' % index - seen.add(name) - for name in [typename] + field_names: - if type(name) != str: - raise TypeError('Type names and field names must be strings') - if not name.isidentifier(): - raise ValueError('Type names and field names must be valid ' - 'identifiers: %r' % name) - if _iskeyword(name): - raise ValueError('Type names and field names cannot be a ' - 'keyword: %r' % name) - seen = set() - for name in field_names: - if name.startswith('_') and not rename: - raise ValueError('Field names cannot start with an underscore: ' - '%r' % name) - if name in seen: - raise ValueError('Encountered duplicate field name: %r' % name) - seen.add(name) - - # Fill-in the class template - class_definition = _class_template.format( - typename = typename, - field_names = tuple(field_names), - num_fields = len(field_names), - arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], - repr_fmt = ', '.join(_repr_template.format(name=name) - for name in field_names), - field_defs = '\n'.join(_field_template.format(index=index, name=name) - for index, name in enumerate(field_names)) - ) - - # Execute the template string in a temporary namespace and support - # tracing utilities by setting a value for frame.f_globals['__name__'] - namespace = dict(__name__='namedtuple_%s' % typename) - exec(class_definition, namespace) - result = namespace[typename] - result._source = class_definition - if verbose: - print(result._source) - - # For pickling to work, the __module__ variable needs to be set to the frame - # where the named tuple is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython). - try: - result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - - return result - - -######################################################################## -### Counter -######################################################################## - -def _count_elements(mapping, iterable): - 'Tally elements from the iterable.' - mapping_get = mapping.get - for elem in iterable: - mapping[elem] = mapping_get(elem, 0) + 1 - -try: # Load C helper function if available - from _collections import _count_elements -except ImportError: - pass - -class Counter(dict): - '''Dict subclass for counting hashable items. Sometimes called a bag - or multiset. Elements are stored as dictionary keys and their counts - are stored as dictionary values. - - >>> c = Counter('abcdeabcdabcaba') # count elements from a string - - >>> c.most_common(3) # three most common elements - [('a', 5), ('b', 4), ('c', 3)] - >>> sorted(c) # list all unique elements - ['a', 'b', 'c', 'd', 'e'] - >>> ''.join(sorted(c.elements())) # list elements with repetitions - 'aaaaabbbbcccdde' - >>> sum(c.values()) # total of all counts - 15 - - >>> c['a'] # count of letter 'a' - 5 - >>> for elem in 'shazam': # update counts from an iterable - ... c[elem] += 1 # by adding 1 to each element's count - >>> c['a'] # now there are seven 'a' - 7 - >>> del c['b'] # remove all 'b' - >>> c['b'] # now there are zero 'b' - 0 - - >>> d = Counter('simsalabim') # make another counter - >>> c.update(d) # add in the second counter - >>> c['a'] # now there are nine 'a' - 9 - - >>> c.clear() # empty the counter - >>> c - Counter() - - Note: If a count is set to zero or reduced to zero, it will remain - in the counter until the entry is deleted or the counter is cleared: - - >>> c = Counter('aaabbc') - >>> c['b'] -= 2 # reduce the count of 'b' by two - >>> c.most_common() # 'b' is still in, but its count is zero - [('a', 3), ('c', 1), ('b', 0)] - - ''' - # References: - # http://en.wikipedia.org/wiki/Multiset - # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html - # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm - # http://code.activestate.com/recipes/259174/ - # Knuth, TAOCP Vol. II section 4.6.3 - - def __init__(*args, **kwds): - '''Create a new, empty Counter object. And if given, count elements - from an input iterable. Or, initialize the count from another mapping - of elements to their counts. - - >>> c = Counter() # a new, empty counter - >>> c = Counter('gallahad') # a new counter from an iterable - >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping - >>> c = Counter(a=4, b=2) # a new counter from keyword args - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - super(Counter, self).__init__() - self.update(*args, **kwds) - - def __missing__(self, key): - 'The count of elements not in the Counter is zero.' - # Needed so that self[missing_item] does not raise KeyError - return 0 - - def most_common(self, n=None): - '''List the n most common elements and their counts from the most - common to the least. If n is None, then list all element counts. - - >>> Counter('abcdeabcdabcaba').most_common(3) - [('a', 5), ('b', 4), ('c', 3)] - - ''' - # Emulate Bag.sortedByCount from Smalltalk - if n is None: - return sorted(self.items(), key=_itemgetter(1), reverse=True) - return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) - - def elements(self): - '''Iterator over elements repeating each as many times as its count. - - >>> c = Counter('ABCABC') - >>> sorted(c.elements()) - ['A', 'A', 'B', 'B', 'C', 'C'] - - # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 - >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) - >>> product = 1 - >>> for factor in prime_factors.elements(): # loop over factors - ... product *= factor # and multiply them - >>> product - 1836 - - Note, if an element's count has been set to zero or is a negative - number, elements() will ignore it. - - ''' - # Emulate Bag.do from Smalltalk and Multiset.begin from C++. - return _chain.from_iterable(_starmap(_repeat, self.items())) - - # Override dict methods where necessary - - @classmethod - def fromkeys(cls, iterable, v=None): - # There is no equivalent method for counters because setting v=1 - # means that no element can have a count greater than one. - raise NotImplementedError( - 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') - - def update(*args, **kwds): - '''Like dict.update() but add counts instead of replacing them. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.update('witch') # add elements from another iterable - >>> d = Counter('watch') - >>> c.update(d) # add elements from another counter - >>> c['h'] # four 'h' in which, witch, and watch - 4 - - ''' - # The regular dict.update() operation makes no sense here because the - # replace behavior results in the some of original untouched counts - # being mixed-in with all of the other counts for a mismash that - # doesn't have a straight-forward interpretation in most counting - # contexts. Instead, we implement straight-addition. Both the inputs - # and outputs are allowed to contain zero and negative counts. - - if not args: - raise TypeError("descriptor 'update' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - if isinstance(iterable, Mapping): - if self: - self_get = self.get - for elem, count in iterable.items(): - self[elem] = count + self_get(elem, 0) - else: - super(Counter, self).update(iterable) # fast path when counter is empty - else: - _count_elements(self, iterable) - if kwds: - self.update(kwds) - - def subtract(*args, **kwds): - '''Like dict.update() but subtracts counts instead of replacing them. - Counts can be reduced below zero. Both the inputs and outputs are - allowed to contain zero and negative counts. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.subtract('witch') # subtract elements from another iterable - >>> c.subtract(Counter('watch')) # subtract elements from another counter - >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch - 0 - >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch - -1 - - ''' - if not args: - raise TypeError("descriptor 'subtract' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - self_get = self.get - if isinstance(iterable, Mapping): - for elem, count in iterable.items(): - self[elem] = self_get(elem, 0) - count - else: - for elem in iterable: - self[elem] = self_get(elem, 0) - 1 - if kwds: - self.subtract(kwds) - - def copy(self): - 'Return a shallow copy.' - return self.__class__(self) - - def __reduce__(self): - return self.__class__, (dict(self),) - - def __delitem__(self, elem): - 'Like dict.__delitem__() but does not raise KeyError for missing values.' - if elem in self: - super().__delitem__(elem) - - def __repr__(self): - if not self: - return '%s()' % self.__class__.__name__ - try: - items = ', '.join(map('%r: %r'.__mod__, self.most_common())) - return '%s({%s})' % (self.__class__.__name__, items) - except TypeError: - # handle case where values are not orderable - return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) - - # Multiset-style mathematical operations discussed in: - # Knuth TAOCP Volume II section 4.6.3 exercise 19 - # and at http://en.wikipedia.org/wiki/Multiset - # - # Outputs guaranteed to only include positive counts. - # - # To strip negative and zero counts, add-in an empty counter: - # c += Counter() - - def __add__(self, other): - '''Add counts from two counters. - - >>> Counter('abbb') + Counter('bcc') - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count + other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __sub__(self, other): - ''' Subtract count, but keep only results with positive counts. - - >>> Counter('abbbc') - Counter('bccd') - Counter({'b': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count - other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count < 0: - result[elem] = 0 - count - return result - - def __or__(self, other): - '''Union is the maximum of value in either of the input counters. - - >>> Counter('abbb') | Counter('bcc') - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = other_count if count < other_count else count - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __and__(self, other): - ''' Intersection is the minimum of corresponding counts. - - >>> Counter('abbb') & Counter('bcc') - Counter({'b': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = count if count < other_count else other_count - if newcount > 0: - result[elem] = newcount - return result - - def __pos__(self): - 'Adds an empty counter, effectively stripping negative and zero counts' - return self + Counter() - - def __neg__(self): - '''Subtracts from an empty counter. Strips positive and zero counts, - and flips the sign on negative counts. - - ''' - return Counter() - self - - def _keep_positive(self): - '''Internal method to strip elements with a negative or zero count''' - nonpositive = [elem for elem, count in self.items() if not count > 0] - for elem in nonpositive: - del self[elem] - return self - - def __iadd__(self, other): - '''Inplace add from another counter, keeping only positive counts. - - >>> c = Counter('abbb') - >>> c += Counter('bcc') - >>> c - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] += count - return self._keep_positive() - - def __isub__(self, other): - '''Inplace subtract counter, but keep only results with positive counts. - - >>> c = Counter('abbbc') - >>> c -= Counter('bccd') - >>> c - Counter({'b': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] -= count - return self._keep_positive() - - def __ior__(self, other): - '''Inplace union is the maximum of value from either counter. - - >>> c = Counter('abbb') - >>> c |= Counter('bcc') - >>> c - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - for elem, other_count in other.items(): - count = self[elem] - if other_count > count: - self[elem] = other_count - return self._keep_positive() - - def __iand__(self, other): - '''Inplace intersection is the minimum of corresponding counts. - - >>> c = Counter('abbb') - >>> c &= Counter('bcc') - >>> c - Counter({'b': 1}) - - ''' - for elem, count in self.items(): - other_count = other[elem] - if other_count < count: - self[elem] = other_count - return self._keep_positive() - - -######################################################################## -### ChainMap (helper for configparser and string.Template) -######################################################################## - -class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together - to create a single, updateable view. - - The underlying mappings are stored in a list. That list is public and can - accessed or updated using the *maps* attribute. There is no other state. - - Lookups search the underlying mappings successively until a key is found. - In contrast, writes, updates, and deletions only operate on the first - mapping. - - ''' - - def __init__(self, *maps): - '''Initialize a ChainMap by setting *maps* to the given mappings. - If no mappings are provided, a single empty dictionary is used. - - ''' - self.maps = list(maps) or [{}] # always at least one map - - def __missing__(self, key): - raise KeyError(key) - - def __getitem__(self, key): - for mapping in self.maps: - try: - return mapping[key] # can't use 'key in mapping' with defaultdict - except KeyError: - pass - return self.__missing__(key) # support subclasses that define __missing__ - - def get(self, key, default=None): - return self[key] if key in self else default - - def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible - - def __iter__(self): - return iter(set().union(*self.maps)) - - def __contains__(self, key): - return any(key in m for m in self.maps) - - def __bool__(self): - return any(self.maps) - - @_recursive_repr() - def __repr__(self): - return '{0.__class__.__name__}({1})'.format( - self, ', '.join(map(repr, self.maps))) - - @classmethod - def fromkeys(cls, iterable, *args): - 'Create a ChainMap with a single dict created from the iterable.' - return cls(dict.fromkeys(iterable, *args)) - - def copy(self): - 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' - return self.__class__(self.maps[0].copy(), *self.maps[1:]) - - __copy__ = copy - - def new_child(self, m=None): # like Django's Context.push() - '''New ChainMap with a new map followed by all previous maps. - If no map is provided, an empty dict is used. - ''' - if m is None: - m = {} - return self.__class__(m, *self.maps) - - @property - def parents(self): # like Django's Context.pop() - 'New ChainMap from maps[1:].' - return self.__class__(*self.maps[1:]) - - def __setitem__(self, key, value): - self.maps[0][key] = value - - def __delitem__(self, key): - try: - del self.maps[0][key] - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def popitem(self): - 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' - try: - return self.maps[0].popitem() - except KeyError: - raise KeyError('No keys found in the first mapping.') - - def pop(self, key, *args): - 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' - try: - return self.maps[0].pop(key, *args) - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def clear(self): - 'Clear maps[0], leaving maps[1:] intact.' - self.maps[0].clear() - - -################################################################################ -### UserDict -################################################################################ - -class UserDict(MutableMapping): - - # Start by filling-out the abstract methods - def __init__(*args, **kwargs): - if not args: - raise TypeError("descriptor '__init__' of 'UserDict' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - if args: - dict = args[0] - elif 'dict' in kwargs: - dict = kwargs.pop('dict') - import warnings - warnings.warn("Passing 'dict' as keyword argument is deprecated", - PendingDeprecationWarning, stacklevel=2) - else: - dict = None - self.data = {} - if dict is not None: - self.update(dict) - if len(kwargs): - self.update(kwargs) - def __len__(self): return len(self.data) - def __getitem__(self, key): - if key in self.data: - return self.data[key] - if hasattr(self.__class__, "__missing__"): - return self.__class__.__missing__(self, key) - raise KeyError(key) - def __setitem__(self, key, item): self.data[key] = item - def __delitem__(self, key): del self.data[key] - def __iter__(self): - return iter(self.data) - - # Modify __contains__ to work correctly when __missing__ is present - def __contains__(self, key): - return key in self.data - - # Now, add the methods in dicts but not in MutableMapping - def __repr__(self): return repr(self.data) - def copy(self): - if self.__class__ is UserDict: - return UserDict(self.data.copy()) - import copy - data = self.data - try: - self.data = {} - c = copy.copy(self) - finally: - self.data = data - c.update(self) - return c - @classmethod - def fromkeys(cls, iterable, value=None): - d = cls() - for key in iterable: - d[key] = value - return d - - - -################################################################################ -### UserList -################################################################################ - -class UserList(MutableSequence): - """A more or less complete user-defined wrapper around list objects.""" - def __init__(self, initlist=None): - self.data = [] - if initlist is not None: - # XXX should this accept an arbitrary sequence? - if type(initlist) == type(self.data): - self.data[:] = initlist - elif isinstance(initlist, UserList): - self.data[:] = initlist.data[:] - else: - self.data = list(initlist) - def __repr__(self): return repr(self.data) - def __lt__(self, other): return self.data < self.__cast(other) - def __le__(self, other): return self.data <= self.__cast(other) - def __eq__(self, other): return self.data == self.__cast(other) - def __ne__(self, other): return self.data != self.__cast(other) - def __gt__(self, other): return self.data > self.__cast(other) - def __ge__(self, other): return self.data >= self.__cast(other) - def __cast(self, other): - return other.data if isinstance(other, UserList) else other - def __contains__(self, item): return item in self.data - def __len__(self): return len(self.data) - def __getitem__(self, i): return self.data[i] - def __setitem__(self, i, item): self.data[i] = item - def __delitem__(self, i): del self.data[i] - def __add__(self, other): - if isinstance(other, UserList): - return self.__class__(self.data + other.data) - elif isinstance(other, type(self.data)): - return self.__class__(self.data + other) - return self.__class__(self.data + list(other)) - def __radd__(self, other): - if isinstance(other, UserList): - return self.__class__(other.data + self.data) - elif isinstance(other, type(self.data)): - return self.__class__(other + self.data) - return self.__class__(list(other) + self.data) - def __iadd__(self, other): - if isinstance(other, UserList): - self.data += other.data - elif isinstance(other, type(self.data)): - self.data += other - else: - self.data += list(other) - return self - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __imul__(self, n): - self.data *= n - return self - def append(self, item): self.data.append(item) - def insert(self, i, item): self.data.insert(i, item) - def pop(self, i=-1): return self.data.pop(i) - def remove(self, item): self.data.remove(item) - def clear(self): self.data.clear() - def copy(self): return self.__class__(self) - def count(self, item): return self.data.count(item) - def index(self, item, *args): return self.data.index(item, *args) - def reverse(self): self.data.reverse() - def sort(self, *args, **kwds): self.data.sort(*args, **kwds) - def extend(self, other): - if isinstance(other, UserList): - self.data.extend(other.data) - else: - self.data.extend(other) - - - -################################################################################ -### UserString -################################################################################ - -class UserString(Sequence): - def __init__(self, seq): - if isinstance(seq, str): - self.data = seq - elif isinstance(seq, UserString): - self.data = seq.data[:] - else: - self.data = str(seq) - def __str__(self): return str(self.data) - def __repr__(self): return repr(self.data) - def __int__(self): return int(self.data) - def __float__(self): return float(self.data) - def __complex__(self): return complex(self.data) - def __hash__(self): return hash(self.data) - - def __eq__(self, string): - if isinstance(string, UserString): - return self.data == string.data - return self.data == string - def __ne__(self, string): - if isinstance(string, UserString): - return self.data != string.data - return self.data != string - def __lt__(self, string): - if isinstance(string, UserString): - return self.data < string.data - return self.data < string - def __le__(self, string): - if isinstance(string, UserString): - return self.data <= string.data - return self.data <= string - def __gt__(self, string): - if isinstance(string, UserString): - return self.data > string.data - return self.data > string - def __ge__(self, string): - if isinstance(string, UserString): - return self.data >= string.data - return self.data >= string - - def __contains__(self, char): - if isinstance(char, UserString): - char = char.data - return char in self.data - - def __len__(self): return len(self.data) - def __getitem__(self, index): return self.__class__(self.data[index]) - def __add__(self, other): - if isinstance(other, UserString): - return self.__class__(self.data + other.data) - elif isinstance(other, str): - return self.__class__(self.data + other) - return self.__class__(self.data + str(other)) - def __radd__(self, other): - if isinstance(other, str): - return self.__class__(other + self.data) - return self.__class__(str(other) + self.data) - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __mod__(self, args): - return self.__class__(self.data % args) - - # the following methods are defined in alphabetical order: - def capitalize(self): return self.__class__(self.data.capitalize()) - def center(self, width, *args): - return self.__class__(self.data.center(width, *args)) - def count(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.count(sub, start, end) - def encode(self, encoding=None, errors=None): # XXX improve this? - if encoding: - if errors: - return self.__class__(self.data.encode(encoding, errors)) - return self.__class__(self.data.encode(encoding)) - return self.__class__(self.data.encode()) - def endswith(self, suffix, start=0, end=_sys.maxsize): - return self.data.endswith(suffix, start, end) - def expandtabs(self, tabsize=8): - return self.__class__(self.data.expandtabs(tabsize)) - def find(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.find(sub, start, end) - def format(self, *args, **kwds): - return self.data.format(*args, **kwds) - def index(self, sub, start=0, end=_sys.maxsize): - return self.data.index(sub, start, end) - def isalpha(self): return self.data.isalpha() - def isalnum(self): return self.data.isalnum() - def isdecimal(self): return self.data.isdecimal() - def isdigit(self): return self.data.isdigit() - def isidentifier(self): return self.data.isidentifier() - def islower(self): return self.data.islower() - def isnumeric(self): return self.data.isnumeric() - def isspace(self): return self.data.isspace() - def istitle(self): return self.data.istitle() - def isupper(self): return self.data.isupper() - def join(self, seq): return self.data.join(seq) - def ljust(self, width, *args): - return self.__class__(self.data.ljust(width, *args)) - def lower(self): return self.__class__(self.data.lower()) - def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) - def partition(self, sep): - return self.data.partition(sep) - def replace(self, old, new, maxsplit=-1): - if isinstance(old, UserString): - old = old.data - if isinstance(new, UserString): - new = new.data - return self.__class__(self.data.replace(old, new, maxsplit)) - def rfind(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.rfind(sub, start, end) - def rindex(self, sub, start=0, end=_sys.maxsize): - return self.data.rindex(sub, start, end) - def rjust(self, width, *args): - return self.__class__(self.data.rjust(width, *args)) - def rpartition(self, sep): - return self.data.rpartition(sep) - def rstrip(self, chars=None): - return self.__class__(self.data.rstrip(chars)) - def split(self, sep=None, maxsplit=-1): - return self.data.split(sep, maxsplit) - def rsplit(self, sep=None, maxsplit=-1): - return self.data.rsplit(sep, maxsplit) - def splitlines(self, keepends=False): return self.data.splitlines(keepends) - def startswith(self, prefix, start=0, end=_sys.maxsize): - return self.data.startswith(prefix, start, end) - def strip(self, chars=None): return self.__class__(self.data.strip(chars)) - def swapcase(self): return self.__class__(self.data.swapcase()) - def title(self): return self.__class__(self.data.title()) - def translate(self, *args): - return self.__class__(self.data.translate(*args)) - def upper(self): return self.__class__(self.data.upper()) - def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.4.6/collections/__main__.py b/typing_extensions/test_data/python-3.4.6/collections/__main__.py deleted file mode 100644 index 763e38e0..00000000 --- a/typing_extensions/test_data/python-3.4.6/collections/__main__.py +++ /dev/null @@ -1,38 +0,0 @@ -################################################################################ -### Simple tests -################################################################################ - -# verify that instances can be pickled -from collections import namedtuple -from pickle import loads, dumps -Point = namedtuple('Point', 'x, y', True) -p = Point(x=10, y=20) -assert p == loads(dumps(p)) - -# test and demonstrate ability to override methods -class Point(namedtuple('Point', 'x y')): - __slots__ = () - @property - def hypot(self): - return (self.x ** 2 + self.y ** 2) ** 0.5 - def __str__(self): - return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) - -for p in Point(3, 4), Point(14, 5/7.): - print (p) - -class Point(namedtuple('Point', 'x y')): - 'Point class with optimized _make() and _replace() without error-checking' - __slots__ = () - _make = classmethod(tuple.__new__) - def _replace(self, _map=map, **kwds): - return self._make(_map(kwds.get, ('x', 'y'), self)) - -print(Point(11, 22)._replace(x=100)) - -Point3D = namedtuple('Point3D', Point._fields + ('z',)) -print(Point3D.__doc__) - -import doctest, collections -TestResults = namedtuple('TestResults', 'failed attempted') -print(TestResults(*doctest.testmod(collections))) diff --git a/typing_extensions/test_data/python-3.4.6/collections/abc.py b/typing_extensions/test_data/python-3.4.6/collections/abc.py deleted file mode 100644 index 891600d1..00000000 --- a/typing_extensions/test_data/python-3.4.6/collections/abc.py +++ /dev/null @@ -1,2 +0,0 @@ -from _collections_abc import * -from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.5.0/_collections_abc.py b/typing_extensions/test_data/python-3.5.0/_collections_abc.py deleted file mode 100644 index f89bb6f0..00000000 --- a/typing_extensions/test_data/python-3.5.0/_collections_abc.py +++ /dev/null @@ -1,939 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. - -Unit tests are in test_collections. -""" - -from abc import ABCMeta, abstractmethod -import sys - -__all__ = ["Awaitable", "Coroutine", "AsyncIterable", "AsyncIterator", - "Hashable", "Iterable", "Iterator", "Generator", - "Sized", "Container", "Callable", - "Set", "MutableSet", - "Mapping", "MutableMapping", - "MappingView", "KeysView", "ItemsView", "ValuesView", - "Sequence", "MutableSequence", - "ByteString", - ] - -# This module has been renamed from collections.abc to _collections_abc to -# speed up interpreter startup. Some of the types such as MutableMapping are -# required early but collections module imports a lot of other modules. -# See issue #19218 -__name__ = "collections.abc" - -# Private list of types that we want to register with the various ABCs -# so that they will pass tests like: -# it = iter(somebytearray) -# assert isinstance(it, Iterable) -# Note: in other implementations, these types many not be distinct -# and they make have their own implementation specific types that -# are not included on this list. -bytes_iterator = type(iter(b'')) -bytearray_iterator = type(iter(bytearray())) -#callable_iterator = ??? -dict_keyiterator = type(iter({}.keys())) -dict_valueiterator = type(iter({}.values())) -dict_itemiterator = type(iter({}.items())) -list_iterator = type(iter([])) -list_reverseiterator = type(iter(reversed([]))) -range_iterator = type(iter(range(0))) -set_iterator = type(iter(set())) -str_iterator = type(iter("")) -tuple_iterator = type(iter(())) -zip_iterator = type(iter(zip())) -## views ## -dict_keys = type({}.keys()) -dict_values = type({}.values()) -dict_items = type({}.items()) -## misc ## -mappingproxy = type(type.__dict__) -generator = type((lambda: (yield))()) -## coroutine ## -async def _coro(): pass -_coro = _coro() -coroutine = type(_coro) -_coro.close() # Prevent ResourceWarning -del _coro - - -### ONE-TRICK PONIES ### - -class Hashable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __hash__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Hashable: - for B in C.__mro__: - if "__hash__" in B.__dict__: - if B.__dict__["__hash__"]: - return True - break - return NotImplemented - - -class Awaitable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __await__(self): - yield - - @classmethod - def __subclasshook__(cls, C): - if cls is Awaitable: - for B in C.__mro__: - if "__await__" in B.__dict__: - if B.__dict__["__await__"]: - return True - break - return NotImplemented - - -class Coroutine(Awaitable): - - __slots__ = () - - @abstractmethod - def send(self, value): - """Send a value into the coroutine. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the coroutine. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside coroutine. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("coroutine ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Coroutine: - mro = C.__mro__ - for method in ('__await__', 'send', 'throw', 'close'): - for base in mro: - if method in base.__dict__: - break - else: - return NotImplemented - return True - return NotImplemented - - -Coroutine.register(coroutine) - - -class AsyncIterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - async def __aiter__(self): - return AsyncIterator() - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterable: - if any("__aiter__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class AsyncIterator(AsyncIterable): - - __slots__ = () - - @abstractmethod - async def __anext__(self): - """Return the next item or raise StopAsyncIteration when exhausted.""" - raise StopAsyncIteration - - async def __aiter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterator: - if (any("__anext__" in B.__dict__ for B in C.__mro__) and - any("__aiter__" in B.__dict__ for B in C.__mro__)): - return True - return NotImplemented - - -class Iterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __iter__(self): - while False: - yield None - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterable: - if any("__iter__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Iterator(Iterable): - - __slots__ = () - - @abstractmethod - def __next__(self): - 'Return the next item from the iterator. When exhausted, raise StopIteration' - raise StopIteration - - def __iter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterator: - if (any("__next__" in B.__dict__ for B in C.__mro__) and - any("__iter__" in B.__dict__ for B in C.__mro__)): - return True - return NotImplemented - -Iterator.register(bytes_iterator) -Iterator.register(bytearray_iterator) -#Iterator.register(callable_iterator) -Iterator.register(dict_keyiterator) -Iterator.register(dict_valueiterator) -Iterator.register(dict_itemiterator) -Iterator.register(list_iterator) -Iterator.register(list_reverseiterator) -Iterator.register(range_iterator) -Iterator.register(set_iterator) -Iterator.register(str_iterator) -Iterator.register(tuple_iterator) -Iterator.register(zip_iterator) - - -class Generator(Iterator): - - __slots__ = () - - def __next__(self): - """Return the next item from the generator. - When exhausted, raise StopIteration. - """ - return self.send(None) - - @abstractmethod - def send(self, value): - """Send a value into the generator. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the generator. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside generator. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("generator ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Generator: - mro = C.__mro__ - for method in ('__iter__', '__next__', 'send', 'throw', 'close'): - for base in mro: - if method in base.__dict__: - break - else: - return NotImplemented - return True - return NotImplemented - - -Generator.register(generator) - - -class Sized(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __len__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Sized: - if any("__len__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Container(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __contains__(self, x): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Container: - if any("__contains__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Callable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __call__(self, *args, **kwds): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Callable: - if any("__call__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -### SETS ### - - -class Set(Sized, Iterable, Container): - - """A set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__ and __len__. - - To override the comparisons (presumably for speed, as the - semantics are fixed), redefine __le__ and __ge__, - then the other operations will automatically follow suit. - """ - - __slots__ = () - - def __le__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) > len(other): - return False - for elem in self: - if elem not in other: - return False - return True - - def __lt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) < len(other) and self.__le__(other) - - def __gt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) > len(other) and self.__ge__(other) - - def __ge__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) < len(other): - return False - for elem in other: - if elem not in self: - return False - return True - - def __eq__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) == len(other) and self.__le__(other) - - @classmethod - def _from_iterable(cls, it): - '''Construct an instance of the class from any iterable input. - - Must override this method if the class constructor signature - does not accept an iterable for an input. - ''' - return cls(it) - - def __and__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - return self._from_iterable(value for value in other if value in self) - - __rand__ = __and__ - - def isdisjoint(self, other): - 'Return True if two sets have a null intersection.' - for value in other: - if value in self: - return False - return True - - def __or__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - chain = (e for s in (self, other) for e in s) - return self._from_iterable(chain) - - __ror__ = __or__ - - def __sub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in self - if value not in other) - - def __rsub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in other - if value not in self) - - def __xor__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return (self - other) | (other - self) - - __rxor__ = __xor__ - - def _hash(self): - """Compute the hash value of a set. - - Note that we don't define __hash__: not all sets are hashable. - But if you define a hashable set type, its __hash__ should - call this function. - - This must be compatible __eq__. - - All sets ought to compare equal if they contain the same - elements, regardless of how they are implemented, and - regardless of the order of the elements; so there's not much - freedom for __eq__ or __hash__. We match the algorithm used - by the built-in frozenset type. - """ - MAX = sys.maxsize - MASK = 2 * MAX + 1 - n = len(self) - h = 1927868237 * (n + 1) - h &= MASK - for x in self: - hx = hash(x) - h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 - h &= MASK - h = h * 69069 + 907133923 - h &= MASK - if h > MAX: - h -= MASK + 1 - if h == -1: - h = 590923713 - return h - -Set.register(frozenset) - - -class MutableSet(Set): - """A mutable set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__, __len__, - add(), and discard(). - - To override the comparisons (presumably for speed, as the - semantics are fixed), all you have to do is redefine __le__ and - then the other operations will automatically follow suit. - """ - - __slots__ = () - - @abstractmethod - def add(self, value): - """Add an element.""" - raise NotImplementedError - - @abstractmethod - def discard(self, value): - """Remove an element. Do not raise an exception if absent.""" - raise NotImplementedError - - def remove(self, value): - """Remove an element. If not a member, raise a KeyError.""" - if value not in self: - raise KeyError(value) - self.discard(value) - - def pop(self): - """Return the popped value. Raise KeyError if empty.""" - it = iter(self) - try: - value = next(it) - except StopIteration: - raise KeyError - self.discard(value) - return value - - def clear(self): - """This is slow (creates N new iterators!) but effective.""" - try: - while True: - self.pop() - except KeyError: - pass - - def __ior__(self, it): - for value in it: - self.add(value) - return self - - def __iand__(self, it): - for value in (self - it): - self.discard(value) - return self - - def __ixor__(self, it): - if it is self: - self.clear() - else: - if not isinstance(it, Set): - it = self._from_iterable(it) - for value in it: - if value in self: - self.discard(value) - else: - self.add(value) - return self - - def __isub__(self, it): - if it is self: - self.clear() - else: - for value in it: - self.discard(value) - return self - -MutableSet.register(set) - - -### MAPPINGS ### - - -class Mapping(Sized, Iterable, Container): - - __slots__ = () - - """A Mapping is a generic container for associating key/value - pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __iter__, and __len__. - - """ - - @abstractmethod - def __getitem__(self, key): - raise KeyError - - def get(self, key, default=None): - 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' - try: - return self[key] - except KeyError: - return default - - def __contains__(self, key): - try: - self[key] - except KeyError: - return False - else: - return True - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return KeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return ItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return ValuesView(self) - - def __eq__(self, other): - if not isinstance(other, Mapping): - return NotImplemented - return dict(self.items()) == dict(other.items()) - -Mapping.register(mappingproxy) - - -class MappingView(Sized): - - __slots__ = '_mapping', - - def __init__(self, mapping): - self._mapping = mapping - - def __len__(self): - return len(self._mapping) - - def __repr__(self): - return '{0.__class__.__name__}({0._mapping!r})'.format(self) - - -class KeysView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, key): - return key in self._mapping - - def __iter__(self): - yield from self._mapping - -KeysView.register(dict_keys) - - -class ItemsView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, item): - key, value = item - try: - v = self._mapping[key] - except KeyError: - return False - else: - return v == value - - def __iter__(self): - for key in self._mapping: - yield (key, self._mapping[key]) - -ItemsView.register(dict_items) - - -class ValuesView(MappingView): - - __slots__ = () - - def __contains__(self, value): - for key in self._mapping: - if value == self._mapping[key]: - return True - return False - - def __iter__(self): - for key in self._mapping: - yield self._mapping[key] - -ValuesView.register(dict_values) - - -class MutableMapping(Mapping): - - __slots__ = () - - """A MutableMapping is a generic container for associating - key/value pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __setitem__, __delitem__, - __iter__, and __len__. - - """ - - @abstractmethod - def __setitem__(self, key, value): - raise KeyError - - @abstractmethod - def __delitem__(self, key): - raise KeyError - - __marker = object() - - def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - ''' - try: - value = self[key] - except KeyError: - if default is self.__marker: - raise - return default - else: - del self[key] - return value - - def popitem(self): - '''D.popitem() -> (k, v), remove and return some (key, value) pair - as a 2-tuple; but raise KeyError if D is empty. - ''' - try: - key = next(iter(self)) - except StopIteration: - raise KeyError - value = self[key] - del self[key] - return key, value - - def clear(self): - 'D.clear() -> None. Remove all items from D.' - try: - while True: - self.popitem() - except KeyError: - pass - - def update(*args, **kwds): - ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. - If E present and has a .keys() method, does: for k in E: D[k] = E[k] - If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v - In either case, this is followed by: for k, v in F.items(): D[k] = v - ''' - if not args: - raise TypeError("descriptor 'update' of 'MutableMapping' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('update expected at most 1 arguments, got %d' % - len(args)) - if args: - other = args[0] - if isinstance(other, Mapping): - for key in other: - self[key] = other[key] - elif hasattr(other, "keys"): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - def setdefault(self, key, default=None): - 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' - try: - return self[key] - except KeyError: - self[key] = default - return default - -MutableMapping.register(dict) - - -### SEQUENCES ### - - -class Sequence(Sized, Iterable, Container): - - """All the operations on a read-only sequence. - - Concrete subclasses must override __new__ or __init__, - __getitem__, and __len__. - """ - - __slots__ = () - - @abstractmethod - def __getitem__(self, index): - raise IndexError - - def __iter__(self): - i = 0 - try: - while True: - v = self[i] - yield v - i += 1 - except IndexError: - return - - def __contains__(self, value): - for v in self: - if v == value: - return True - return False - - def __reversed__(self): - for i in reversed(range(len(self))): - yield self[i] - - def index(self, value, start=0, stop=None): - '''S.index(value, [start, [stop]]) -> integer -- return first index of value. - Raises ValueError if the value is not present. - ''' - if start is not None and start < 0: - start = max(len(self) + start, 0) - if stop is not None and stop < 0: - stop += len(self) - - i = start - while stop is None or i < stop: - try: - if self[i] == value: - return i - except IndexError: - break - i += 1 - raise ValueError - - def count(self, value): - 'S.count(value) -> integer -- return number of occurrences of value' - return sum(1 for v in self if v == value) - -Sequence.register(tuple) -Sequence.register(str) -Sequence.register(range) -Sequence.register(memoryview) - - -class ByteString(Sequence): - - """This unifies bytes and bytearray. - - XXX Should add all their methods. - """ - - __slots__ = () - -ByteString.register(bytes) -ByteString.register(bytearray) - - -class MutableSequence(Sequence): - - __slots__ = () - - """All the operations on a read-write sequence. - - Concrete subclasses must provide __new__ or __init__, - __getitem__, __setitem__, __delitem__, __len__, and insert(). - - """ - - @abstractmethod - def __setitem__(self, index, value): - raise IndexError - - @abstractmethod - def __delitem__(self, index): - raise IndexError - - @abstractmethod - def insert(self, index, value): - 'S.insert(index, value) -- insert value before index' - raise IndexError - - def append(self, value): - 'S.append(value) -- append value to the end of the sequence' - self.insert(len(self), value) - - def clear(self): - 'S.clear() -> None -- remove all items from S' - try: - while True: - self.pop() - except IndexError: - pass - - def reverse(self): - 'S.reverse() -- reverse *IN PLACE*' - n = len(self) - for i in range(n//2): - self[i], self[n-i-1] = self[n-i-1], self[i] - - def extend(self, values): - 'S.extend(iterable) -- extend sequence by appending elements from the iterable' - for v in values: - self.append(v) - - def pop(self, index=-1): - '''S.pop([index]) -> item -- remove and return item at index (default last). - Raise IndexError if list is empty or index is out of range. - ''' - v = self[index] - del self[index] - return v - - def remove(self, value): - '''S.remove(value) -- remove first occurrence of value. - Raise ValueError if the value is not present. - ''' - del self[self.index(value)] - - def __iadd__(self, values): - self.extend(values) - return self - -MutableSequence.register(list) -MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.5.0/abc.py b/typing_extensions/test_data/python-3.5.0/abc.py deleted file mode 100644 index 1cbf96a6..00000000 --- a/typing_extensions/test_data/python-3.5.0/abc.py +++ /dev/null @@ -1,248 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) according to PEP 3119.""" - -from _weakrefset import WeakSet - - -def abstractmethod(funcobj): - """A decorator indicating abstract methods. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract methods are overridden. - The abstract methods can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractmethod - def my_abstract_method(self, ...): - ... - """ - funcobj.__isabstractmethod__ = True - return funcobj - - -class abstractclassmethod(classmethod): - """ - A decorator indicating abstract classmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractclassmethod - def my_abstract_classmethod(cls, ...): - ... - - 'abstractclassmethod' is deprecated. Use 'classmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractstaticmethod(staticmethod): - """ - A decorator indicating abstract staticmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractstaticmethod - def my_abstract_staticmethod(...): - ... - - 'abstractstaticmethod' is deprecated. Use 'staticmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractproperty(property): - """ - A decorator indicating abstract properties. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract properties are overridden. - The abstract properties can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractproperty - def my_abstract_property(self): - ... - - This defines a read-only property; you can also define a read-write - abstract property using the 'long' form of property declaration: - - class C(metaclass=ABCMeta): - def getx(self): ... - def setx(self, value): ... - x = abstractproperty(getx, setx) - - 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' - instead. - """ - - __isabstractmethod__ = True - - -class ABCMeta(type): - - """Metaclass for defining Abstract Base Classes (ABCs). - - Use this metaclass to create an ABC. An ABC can be subclassed - directly, and then acts as a mix-in class. You can also register - unrelated concrete classes (even built-in classes) and unrelated - ABCs as 'virtual subclasses' -- these and their descendants will - be considered subclasses of the registering ABC by the built-in - issubclass() function, but the registering ABC won't show up in - their MRO (Method Resolution Order) nor will method - implementations defined by the registering ABC be callable (not - even via super()). - - """ - - # A global counter that is incremented each time a class is - # registered as a virtual subclass of anything. It forces the - # negative cache to be cleared before its next use. - # Note: this counter is private. Use `abc.get_cache_token()` for - # external code. - _abc_invalidation_counter = 0 - - def __new__(mcls, name, bases, namespace): - cls = super().__new__(mcls, name, bases, namespace) - # Compute set of abstract method names - abstracts = {name - for name, value in namespace.items() - if getattr(value, "__isabstractmethod__", False)} - for base in bases: - for name in getattr(base, "__abstractmethods__", set()): - value = getattr(cls, name, None) - if getattr(value, "__isabstractmethod__", False): - abstracts.add(name) - cls.__abstractmethods__ = frozenset(abstracts) - # Set up inheritance registry - cls._abc_registry = WeakSet() - cls._abc_cache = WeakSet() - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - return cls - - def register(cls, subclass): - """Register a virtual subclass of an ABC. - - Returns the subclass, to allow usage as a class decorator. - """ - if not isinstance(subclass, type): - raise TypeError("Can only register classes") - if issubclass(subclass, cls): - return subclass # Already a subclass - # Subtle: test for cycles *after* testing for "already a subclass"; - # this means we allow X.register(X) and interpret it as a no-op. - if issubclass(cls, subclass): - # This would create a cycle, which is bad for the algorithm below - raise RuntimeError("Refusing to create an inheritance cycle") - cls._abc_registry.add(subclass) - ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache - return subclass - - def _dump_registry(cls, file=None): - """Debug helper to print the ABC registry.""" - print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) - print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) - for name in sorted(cls.__dict__.keys()): - if name.startswith("_abc_"): - value = getattr(cls, name) - print("%s: %r" % (name, value), file=file) - - def __instancecheck__(cls, instance): - """Override for isinstance(instance, cls).""" - # Inline the cache checking - subclass = instance.__class__ - if subclass in cls._abc_cache: - return True - subtype = type(instance) - if subtype is subclass: - if (cls._abc_negative_cache_version == - ABCMeta._abc_invalidation_counter and - subclass in cls._abc_negative_cache): - return False - # Fall back to the subclass check. - return cls.__subclasscheck__(subclass) - return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) - - def __subclasscheck__(cls, subclass): - """Override for issubclass(subclass, cls).""" - # Check cache - if subclass in cls._abc_cache: - return True - # Check negative cache; may have to invalidate - if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: - # Invalidate the negative cache - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - elif subclass in cls._abc_negative_cache: - return False - # Check the subclass hook - ok = cls.__subclasshook__(subclass) - if ok is not NotImplemented: - assert isinstance(ok, bool) - if ok: - cls._abc_cache.add(subclass) - else: - cls._abc_negative_cache.add(subclass) - return ok - # Check if it's a direct subclass - if cls in getattr(subclass, '__mro__', ()): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a registered class (recursive) - for rcls in cls._abc_registry: - if issubclass(subclass, rcls): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a subclass (recursive) - for scls in cls.__subclasses__(): - if issubclass(subclass, scls): - cls._abc_cache.add(subclass) - return True - # No dice; update negative cache - cls._abc_negative_cache.add(subclass) - return False - - -class ABC(metaclass=ABCMeta): - """Helper class that provides a standard way to create an ABC using - inheritance. - """ - pass - - -def get_cache_token(): - """Returns the current ABC cache token. - - The token is an opaque object (supporting equality testing) identifying the - current version of the ABC cache for virtual subclasses. The token changes - with every call to ``register()`` on any ABC. - """ - return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.5.0/collections/__init__.py b/typing_extensions/test_data/python-3.5.0/collections/__init__.py deleted file mode 100644 index 80dc4f6d..00000000 --- a/typing_extensions/test_data/python-3.5.0/collections/__init__.py +++ /dev/null @@ -1,1217 +0,0 @@ -__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', - 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] - -# For backwards compatibility, continue to make the collections ABCs -# available through the collections module. -from _collections_abc import * -import _collections_abc -__all__ += _collections_abc.__all__ - -from operator import itemgetter as _itemgetter, eq as _eq -from keyword import iskeyword as _iskeyword -import sys as _sys -import heapq as _heapq -from _weakref import proxy as _proxy -from itertools import repeat as _repeat, chain as _chain, starmap as _starmap -from reprlib import recursive_repr as _recursive_repr - -try: - from _collections import deque -except ImportError: - pass -else: - MutableSequence.register(deque) - -try: - from _collections import defaultdict -except ImportError: - pass - - -################################################################################ -### OrderedDict -################################################################################ - -class _OrderedDictKeysView(KeysView): - - def __reversed__(self): - yield from reversed(self._mapping) - -class _OrderedDictItemsView(ItemsView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield (key, self._mapping[key]) - -class _OrderedDictValuesView(ValuesView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield self._mapping[key] - -class _Link(object): - __slots__ = 'prev', 'next', 'key', '__weakref__' - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as regular dictionaries. - - # The internal self.__map dict maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # The sentinel is in self.__hardroot with a weakref proxy in self.__root. - # The prev links are weakref proxies (to prevent circular references). - # Individual links are kept alive by the hard reference in self.__map. - # Those hard references disappear when a key is deleted from an OrderedDict. - - def __init__(*args, **kwds): - '''Initialize an ordered dictionary. The signature is the same as - regular dictionaries, but keyword arguments are not recommended because - their insertion order is arbitrary. - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'OrderedDict' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__hardroot = _Link() - self.__root = root = _proxy(self.__hardroot) - root.prev = root.next = root - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, - dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link at the end of the linked list, - # and the inherited dictionary is updated with the new key/value pair. - if key not in self: - self.__map[key] = link = Link() - root = self.__root - last = root.prev - link.prev, link.next, link.key = last, root, key - last.next = link - root.prev = proxy(link) - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which gets - # removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link = self.__map.pop(key) - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - link.prev = None - link.next = None - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - # Traverse the linked list in order. - root = self.__root - curr = root.next - while curr is not root: - yield curr.key - curr = curr.next - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - # Traverse the linked list in reverse order. - root = self.__root - curr = root.prev - while curr is not root: - yield curr.key - curr = curr.prev - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - root = self.__root - root.prev = root.next = root - self.__map.clear() - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root.prev - link_prev = link.prev - link_prev.next = root - root.prev = link_prev - else: - link = root.next - link_next = link.next - root.next = link_next - link_next.prev = root - key = link.key - del self.__map[key] - value = dict.pop(self, key) - return key, value - - def move_to_end(self, key, last=True): - '''Move an existing element to the end (or beginning if last==False). - - Raises KeyError if the element does not exist. - When last=True, acts like a fast version of self[key]=self.pop(key). - - ''' - link = self.__map[key] - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - root = self.__root - if last: - last = root.prev - link.prev = last - link.next = root - last.next = root.prev = link - else: - first = root.next - link.prev = root - link.next = first - root.next = first.prev = link - - def __sizeof__(self): - sizeof = _sys.getsizeof - n = len(self) + 1 # number of links including root - size = sizeof(self.__dict__) # instance dictionary - size += sizeof(self.__map) * 2 # internal dict and inherited dict - size += sizeof(self.__hardroot) * n # link objects - size += sizeof(self.__root) * n # proxy objects - return size - - update = __update = MutableMapping.update - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return _OrderedDictKeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return _OrderedDictItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return _OrderedDictValuesView(self) - - __ne__ = MutableMapping.__ne__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding - value. If key is not found, d is returned if given, otherwise KeyError - is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - @_recursive_repr() - def __repr__(self): - 'od.__repr__() <==> repr(od)' - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, list(self.items())) - - def __reduce__(self): - 'Return state information for pickling' - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - return self.__class__, (), inst_dict or None, None, iter(self.items()) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. - If not specified, the value defaults to None. - - ''' - self = cls() - for key in iterable: - self[key] = value - return self - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return dict.__eq__(self, other) and all(map(_eq, self, other)) - return dict.__eq__(self, other) - - -try: - from _collections import OrderedDict -except ImportError: - # Leave the pure Python version in place. - pass - - -################################################################################ -### namedtuple -################################################################################ - -_class_template = """\ -from builtins import property as _property, tuple as _tuple -from operator import itemgetter as _itemgetter -from collections import OrderedDict - -class {typename}(tuple): - '{typename}({arg_list})' - - __slots__ = () - - _fields = {field_names!r} - - def __new__(_cls, {arg_list}): - 'Create new instance of {typename}({arg_list})' - return _tuple.__new__(_cls, ({arg_list})) - - @classmethod - def _make(cls, iterable, new=tuple.__new__, len=len): - 'Make a new {typename} object from a sequence or iterable' - result = new(cls, iterable) - if len(result) != {num_fields:d}: - raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) - return result - - def _replace(_self, **kwds): - 'Return a new {typename} object replacing specified fields with new values' - result = _self._make(map(kwds.pop, {field_names!r}, _self)) - if kwds: - raise ValueError('Got unexpected field names: %r' % list(kwds)) - return result - - def __repr__(self): - 'Return a nicely formatted representation string' - return self.__class__.__name__ + '({repr_fmt})' % self - - @property - def __dict__(self): - 'A new OrderedDict mapping field names to their values' - return OrderedDict(zip(self._fields, self)) - - def _asdict(self): - 'Return a new OrderedDict which maps field names to their values.' - return self.__dict__ - - def __getnewargs__(self): - 'Return self as a plain tuple. Used by copy and pickle.' - return tuple(self) - - def __getstate__(self): - 'Exclude the OrderedDict from pickling' - return None - -{field_defs} -""" - -_repr_template = '{name}=%r' - -_field_template = '''\ - {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') -''' - -def namedtuple(typename, field_names, verbose=False, rename=False): - """Returns a new subclass of tuple with named fields. - - >>> Point = namedtuple('Point', ['x', 'y']) - >>> Point.__doc__ # docstring for the new class - 'Point(x, y)' - >>> p = Point(11, y=22) # instantiate with positional args or keywords - >>> p[0] + p[1] # indexable like a plain tuple - 33 - >>> x, y = p # unpack like a regular tuple - >>> x, y - (11, 22) - >>> p.x + p.y # fields also accessable by name - 33 - >>> d = p._asdict() # convert to a dictionary - >>> d['x'] - 11 - >>> Point(**d) # convert from a dictionary - Point(x=11, y=22) - >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields - Point(x=100, y=22) - - """ - - # Validate the field names. At the user's option, either generate an error - # message or automatically replace the field name with a valid name. - if isinstance(field_names, str): - field_names = field_names.replace(',', ' ').split() - field_names = list(map(str, field_names)) - typename = str(typename) - if rename: - seen = set() - for index, name in enumerate(field_names): - if (not name.isidentifier() - or _iskeyword(name) - or name.startswith('_') - or name in seen): - field_names[index] = '_%d' % index - seen.add(name) - for name in [typename] + field_names: - if type(name) != str: - raise TypeError('Type names and field names must be strings') - if not name.isidentifier(): - raise ValueError('Type names and field names must be valid ' - 'identifiers: %r' % name) - if _iskeyword(name): - raise ValueError('Type names and field names cannot be a ' - 'keyword: %r' % name) - seen = set() - for name in field_names: - if name.startswith('_') and not rename: - raise ValueError('Field names cannot start with an underscore: ' - '%r' % name) - if name in seen: - raise ValueError('Encountered duplicate field name: %r' % name) - seen.add(name) - - # Fill-in the class template - class_definition = _class_template.format( - typename = typename, - field_names = tuple(field_names), - num_fields = len(field_names), - arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], - repr_fmt = ', '.join(_repr_template.format(name=name) - for name in field_names), - field_defs = '\n'.join(_field_template.format(index=index, name=name) - for index, name in enumerate(field_names)) - ) - - # Execute the template string in a temporary namespace and support - # tracing utilities by setting a value for frame.f_globals['__name__'] - namespace = dict(__name__='namedtuple_%s' % typename) - exec(class_definition, namespace) - result = namespace[typename] - result._source = class_definition - if verbose: - print(result._source) - - # For pickling to work, the __module__ variable needs to be set to the frame - # where the named tuple is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython). - try: - result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - - return result - - -######################################################################## -### Counter -######################################################################## - -def _count_elements(mapping, iterable): - 'Tally elements from the iterable.' - mapping_get = mapping.get - for elem in iterable: - mapping[elem] = mapping_get(elem, 0) + 1 - -try: # Load C helper function if available - from _collections import _count_elements -except ImportError: - pass - -class Counter(dict): - '''Dict subclass for counting hashable items. Sometimes called a bag - or multiset. Elements are stored as dictionary keys and their counts - are stored as dictionary values. - - >>> c = Counter('abcdeabcdabcaba') # count elements from a string - - >>> c.most_common(3) # three most common elements - [('a', 5), ('b', 4), ('c', 3)] - >>> sorted(c) # list all unique elements - ['a', 'b', 'c', 'd', 'e'] - >>> ''.join(sorted(c.elements())) # list elements with repetitions - 'aaaaabbbbcccdde' - >>> sum(c.values()) # total of all counts - 15 - - >>> c['a'] # count of letter 'a' - 5 - >>> for elem in 'shazam': # update counts from an iterable - ... c[elem] += 1 # by adding 1 to each element's count - >>> c['a'] # now there are seven 'a' - 7 - >>> del c['b'] # remove all 'b' - >>> c['b'] # now there are zero 'b' - 0 - - >>> d = Counter('simsalabim') # make another counter - >>> c.update(d) # add in the second counter - >>> c['a'] # now there are nine 'a' - 9 - - >>> c.clear() # empty the counter - >>> c - Counter() - - Note: If a count is set to zero or reduced to zero, it will remain - in the counter until the entry is deleted or the counter is cleared: - - >>> c = Counter('aaabbc') - >>> c['b'] -= 2 # reduce the count of 'b' by two - >>> c.most_common() # 'b' is still in, but its count is zero - [('a', 3), ('c', 1), ('b', 0)] - - ''' - # References: - # http://en.wikipedia.org/wiki/Multiset - # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html - # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm - # http://code.activestate.com/recipes/259174/ - # Knuth, TAOCP Vol. II section 4.6.3 - - def __init__(*args, **kwds): - '''Create a new, empty Counter object. And if given, count elements - from an input iterable. Or, initialize the count from another mapping - of elements to their counts. - - >>> c = Counter() # a new, empty counter - >>> c = Counter('gallahad') # a new counter from an iterable - >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping - >>> c = Counter(a=4, b=2) # a new counter from keyword args - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - super(Counter, self).__init__() - self.update(*args, **kwds) - - def __missing__(self, key): - 'The count of elements not in the Counter is zero.' - # Needed so that self[missing_item] does not raise KeyError - return 0 - - def most_common(self, n=None): - '''List the n most common elements and their counts from the most - common to the least. If n is None, then list all element counts. - - >>> Counter('abcdeabcdabcaba').most_common(3) - [('a', 5), ('b', 4), ('c', 3)] - - ''' - # Emulate Bag.sortedByCount from Smalltalk - if n is None: - return sorted(self.items(), key=_itemgetter(1), reverse=True) - return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) - - def elements(self): - '''Iterator over elements repeating each as many times as its count. - - >>> c = Counter('ABCABC') - >>> sorted(c.elements()) - ['A', 'A', 'B', 'B', 'C', 'C'] - - # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 - >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) - >>> product = 1 - >>> for factor in prime_factors.elements(): # loop over factors - ... product *= factor # and multiply them - >>> product - 1836 - - Note, if an element's count has been set to zero or is a negative - number, elements() will ignore it. - - ''' - # Emulate Bag.do from Smalltalk and Multiset.begin from C++. - return _chain.from_iterable(_starmap(_repeat, self.items())) - - # Override dict methods where necessary - - @classmethod - def fromkeys(cls, iterable, v=None): - # There is no equivalent method for counters because setting v=1 - # means that no element can have a count greater than one. - raise NotImplementedError( - 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') - - def update(*args, **kwds): - '''Like dict.update() but add counts instead of replacing them. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.update('witch') # add elements from another iterable - >>> d = Counter('watch') - >>> c.update(d) # add elements from another counter - >>> c['h'] # four 'h' in which, witch, and watch - 4 - - ''' - # The regular dict.update() operation makes no sense here because the - # replace behavior results in the some of original untouched counts - # being mixed-in with all of the other counts for a mismash that - # doesn't have a straight-forward interpretation in most counting - # contexts. Instead, we implement straight-addition. Both the inputs - # and outputs are allowed to contain zero and negative counts. - - if not args: - raise TypeError("descriptor 'update' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - if isinstance(iterable, Mapping): - if self: - self_get = self.get - for elem, count in iterable.items(): - self[elem] = count + self_get(elem, 0) - else: - super(Counter, self).update(iterable) # fast path when counter is empty - else: - _count_elements(self, iterable) - if kwds: - self.update(kwds) - - def subtract(*args, **kwds): - '''Like dict.update() but subtracts counts instead of replacing them. - Counts can be reduced below zero. Both the inputs and outputs are - allowed to contain zero and negative counts. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.subtract('witch') # subtract elements from another iterable - >>> c.subtract(Counter('watch')) # subtract elements from another counter - >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch - 0 - >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch - -1 - - ''' - if not args: - raise TypeError("descriptor 'subtract' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - self_get = self.get - if isinstance(iterable, Mapping): - for elem, count in iterable.items(): - self[elem] = self_get(elem, 0) - count - else: - for elem in iterable: - self[elem] = self_get(elem, 0) - 1 - if kwds: - self.subtract(kwds) - - def copy(self): - 'Return a shallow copy.' - return self.__class__(self) - - def __reduce__(self): - return self.__class__, (dict(self),) - - def __delitem__(self, elem): - 'Like dict.__delitem__() but does not raise KeyError for missing values.' - if elem in self: - super().__delitem__(elem) - - def __repr__(self): - if not self: - return '%s()' % self.__class__.__name__ - try: - items = ', '.join(map('%r: %r'.__mod__, self.most_common())) - return '%s({%s})' % (self.__class__.__name__, items) - except TypeError: - # handle case where values are not orderable - return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) - - # Multiset-style mathematical operations discussed in: - # Knuth TAOCP Volume II section 4.6.3 exercise 19 - # and at http://en.wikipedia.org/wiki/Multiset - # - # Outputs guaranteed to only include positive counts. - # - # To strip negative and zero counts, add-in an empty counter: - # c += Counter() - - def __add__(self, other): - '''Add counts from two counters. - - >>> Counter('abbb') + Counter('bcc') - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count + other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __sub__(self, other): - ''' Subtract count, but keep only results with positive counts. - - >>> Counter('abbbc') - Counter('bccd') - Counter({'b': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count - other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count < 0: - result[elem] = 0 - count - return result - - def __or__(self, other): - '''Union is the maximum of value in either of the input counters. - - >>> Counter('abbb') | Counter('bcc') - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = other_count if count < other_count else count - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __and__(self, other): - ''' Intersection is the minimum of corresponding counts. - - >>> Counter('abbb') & Counter('bcc') - Counter({'b': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = count if count < other_count else other_count - if newcount > 0: - result[elem] = newcount - return result - - def __pos__(self): - 'Adds an empty counter, effectively stripping negative and zero counts' - result = Counter() - for elem, count in self.items(): - if count > 0: - result[elem] = count - return result - - def __neg__(self): - '''Subtracts from an empty counter. Strips positive and zero counts, - and flips the sign on negative counts. - - ''' - result = Counter() - for elem, count in self.items(): - if count < 0: - result[elem] = 0 - count - return result - - def _keep_positive(self): - '''Internal method to strip elements with a negative or zero count''' - nonpositive = [elem for elem, count in self.items() if not count > 0] - for elem in nonpositive: - del self[elem] - return self - - def __iadd__(self, other): - '''Inplace add from another counter, keeping only positive counts. - - >>> c = Counter('abbb') - >>> c += Counter('bcc') - >>> c - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] += count - return self._keep_positive() - - def __isub__(self, other): - '''Inplace subtract counter, but keep only results with positive counts. - - >>> c = Counter('abbbc') - >>> c -= Counter('bccd') - >>> c - Counter({'b': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] -= count - return self._keep_positive() - - def __ior__(self, other): - '''Inplace union is the maximum of value from either counter. - - >>> c = Counter('abbb') - >>> c |= Counter('bcc') - >>> c - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - for elem, other_count in other.items(): - count = self[elem] - if other_count > count: - self[elem] = other_count - return self._keep_positive() - - def __iand__(self, other): - '''Inplace intersection is the minimum of corresponding counts. - - >>> c = Counter('abbb') - >>> c &= Counter('bcc') - >>> c - Counter({'b': 1}) - - ''' - for elem, count in self.items(): - other_count = other[elem] - if other_count < count: - self[elem] = other_count - return self._keep_positive() - - -######################################################################## -### ChainMap (helper for configparser and string.Template) -######################################################################## - -class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together - to create a single, updateable view. - - The underlying mappings are stored in a list. That list is public and can - accessed or updated using the *maps* attribute. There is no other state. - - Lookups search the underlying mappings successively until a key is found. - In contrast, writes, updates, and deletions only operate on the first - mapping. - - ''' - - def __init__(self, *maps): - '''Initialize a ChainMap by setting *maps* to the given mappings. - If no mappings are provided, a single empty dictionary is used. - - ''' - self.maps = list(maps) or [{}] # always at least one map - - def __missing__(self, key): - raise KeyError(key) - - def __getitem__(self, key): - for mapping in self.maps: - try: - return mapping[key] # can't use 'key in mapping' with defaultdict - except KeyError: - pass - return self.__missing__(key) # support subclasses that define __missing__ - - def get(self, key, default=None): - return self[key] if key in self else default - - def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible - - def __iter__(self): - return iter(set().union(*self.maps)) - - def __contains__(self, key): - return any(key in m for m in self.maps) - - def __bool__(self): - return any(self.maps) - - @_recursive_repr() - def __repr__(self): - return '{0.__class__.__name__}({1})'.format( - self, ', '.join(map(repr, self.maps))) - - @classmethod - def fromkeys(cls, iterable, *args): - 'Create a ChainMap with a single dict created from the iterable.' - return cls(dict.fromkeys(iterable, *args)) - - def copy(self): - 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' - return self.__class__(self.maps[0].copy(), *self.maps[1:]) - - __copy__ = copy - - def new_child(self, m=None): # like Django's Context.push() - ''' - New ChainMap with a new map followed by all previous maps. If no - map is provided, an empty dict is used. - ''' - if m is None: - m = {} - return self.__class__(m, *self.maps) - - @property - def parents(self): # like Django's Context.pop() - 'New ChainMap from maps[1:].' - return self.__class__(*self.maps[1:]) - - def __setitem__(self, key, value): - self.maps[0][key] = value - - def __delitem__(self, key): - try: - del self.maps[0][key] - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def popitem(self): - 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' - try: - return self.maps[0].popitem() - except KeyError: - raise KeyError('No keys found in the first mapping.') - - def pop(self, key, *args): - 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' - try: - return self.maps[0].pop(key, *args) - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def clear(self): - 'Clear maps[0], leaving maps[1:] intact.' - self.maps[0].clear() - - -################################################################################ -### UserDict -################################################################################ - -class UserDict(MutableMapping): - - # Start by filling-out the abstract methods - def __init__(self, dict=None, **kwargs): - self.data = {} - if dict is not None: - self.update(dict) - if len(kwargs): - self.update(kwargs) - def __len__(self): return len(self.data) - def __getitem__(self, key): - if key in self.data: - return self.data[key] - if hasattr(self.__class__, "__missing__"): - return self.__class__.__missing__(self, key) - raise KeyError(key) - def __setitem__(self, key, item): self.data[key] = item - def __delitem__(self, key): del self.data[key] - def __iter__(self): - return iter(self.data) - - # Modify __contains__ to work correctly when __missing__ is present - def __contains__(self, key): - return key in self.data - - # Now, add the methods in dicts but not in MutableMapping - def __repr__(self): return repr(self.data) - def copy(self): - if self.__class__ is UserDict: - return UserDict(self.data.copy()) - import copy - data = self.data - try: - self.data = {} - c = copy.copy(self) - finally: - self.data = data - c.update(self) - return c - @classmethod - def fromkeys(cls, iterable, value=None): - d = cls() - for key in iterable: - d[key] = value - return d - - - -################################################################################ -### UserList -################################################################################ - -class UserList(MutableSequence): - """A more or less complete user-defined wrapper around list objects.""" - def __init__(self, initlist=None): - self.data = [] - if initlist is not None: - # XXX should this accept an arbitrary sequence? - if type(initlist) == type(self.data): - self.data[:] = initlist - elif isinstance(initlist, UserList): - self.data[:] = initlist.data[:] - else: - self.data = list(initlist) - def __repr__(self): return repr(self.data) - def __lt__(self, other): return self.data < self.__cast(other) - def __le__(self, other): return self.data <= self.__cast(other) - def __eq__(self, other): return self.data == self.__cast(other) - def __gt__(self, other): return self.data > self.__cast(other) - def __ge__(self, other): return self.data >= self.__cast(other) - def __cast(self, other): - return other.data if isinstance(other, UserList) else other - def __contains__(self, item): return item in self.data - def __len__(self): return len(self.data) - def __getitem__(self, i): return self.data[i] - def __setitem__(self, i, item): self.data[i] = item - def __delitem__(self, i): del self.data[i] - def __add__(self, other): - if isinstance(other, UserList): - return self.__class__(self.data + other.data) - elif isinstance(other, type(self.data)): - return self.__class__(self.data + other) - return self.__class__(self.data + list(other)) - def __radd__(self, other): - if isinstance(other, UserList): - return self.__class__(other.data + self.data) - elif isinstance(other, type(self.data)): - return self.__class__(other + self.data) - return self.__class__(list(other) + self.data) - def __iadd__(self, other): - if isinstance(other, UserList): - self.data += other.data - elif isinstance(other, type(self.data)): - self.data += other - else: - self.data += list(other) - return self - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __imul__(self, n): - self.data *= n - return self - def append(self, item): self.data.append(item) - def insert(self, i, item): self.data.insert(i, item) - def pop(self, i=-1): return self.data.pop(i) - def remove(self, item): self.data.remove(item) - def clear(self): self.data.clear() - def copy(self): return self.__class__(self) - def count(self, item): return self.data.count(item) - def index(self, item, *args): return self.data.index(item, *args) - def reverse(self): self.data.reverse() - def sort(self, *args, **kwds): self.data.sort(*args, **kwds) - def extend(self, other): - if isinstance(other, UserList): - self.data.extend(other.data) - else: - self.data.extend(other) - - - -################################################################################ -### UserString -################################################################################ - -class UserString(Sequence): - def __init__(self, seq): - if isinstance(seq, str): - self.data = seq - elif isinstance(seq, UserString): - self.data = seq.data[:] - else: - self.data = str(seq) - def __str__(self): return str(self.data) - def __repr__(self): return repr(self.data) - def __int__(self): return int(self.data) - def __float__(self): return float(self.data) - def __complex__(self): return complex(self.data) - def __hash__(self): return hash(self.data) - def __getnewargs__(self): - return (self.data[:],) - - def __eq__(self, string): - if isinstance(string, UserString): - return self.data == string.data - return self.data == string - def __lt__(self, string): - if isinstance(string, UserString): - return self.data < string.data - return self.data < string - def __le__(self, string): - if isinstance(string, UserString): - return self.data <= string.data - return self.data <= string - def __gt__(self, string): - if isinstance(string, UserString): - return self.data > string.data - return self.data > string - def __ge__(self, string): - if isinstance(string, UserString): - return self.data >= string.data - return self.data >= string - - def __contains__(self, char): - if isinstance(char, UserString): - char = char.data - return char in self.data - - def __len__(self): return len(self.data) - def __getitem__(self, index): return self.__class__(self.data[index]) - def __add__(self, other): - if isinstance(other, UserString): - return self.__class__(self.data + other.data) - elif isinstance(other, str): - return self.__class__(self.data + other) - return self.__class__(self.data + str(other)) - def __radd__(self, other): - if isinstance(other, str): - return self.__class__(other + self.data) - return self.__class__(str(other) + self.data) - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __mod__(self, args): - return self.__class__(self.data % args) - def __rmod__(self, format): - return self.__class__(format % args) - - # the following methods are defined in alphabetical order: - def capitalize(self): return self.__class__(self.data.capitalize()) - def casefold(self): - return self.__class__(self.data.casefold()) - def center(self, width, *args): - return self.__class__(self.data.center(width, *args)) - def count(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.count(sub, start, end) - def encode(self, encoding=None, errors=None): # XXX improve this? - if encoding: - if errors: - return self.__class__(self.data.encode(encoding, errors)) - return self.__class__(self.data.encode(encoding)) - return self.__class__(self.data.encode()) - def endswith(self, suffix, start=0, end=_sys.maxsize): - return self.data.endswith(suffix, start, end) - def expandtabs(self, tabsize=8): - return self.__class__(self.data.expandtabs(tabsize)) - def find(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.find(sub, start, end) - def format(self, *args, **kwds): - return self.data.format(*args, **kwds) - def format_map(self, mapping): - return self.data.format_map(mapping) - def index(self, sub, start=0, end=_sys.maxsize): - return self.data.index(sub, start, end) - def isalpha(self): return self.data.isalpha() - def isalnum(self): return self.data.isalnum() - def isdecimal(self): return self.data.isdecimal() - def isdigit(self): return self.data.isdigit() - def isidentifier(self): return self.data.isidentifier() - def islower(self): return self.data.islower() - def isnumeric(self): return self.data.isnumeric() - def isprintable(self): return self.data.isprintable() - def isspace(self): return self.data.isspace() - def istitle(self): return self.data.istitle() - def isupper(self): return self.data.isupper() - def join(self, seq): return self.data.join(seq) - def ljust(self, width, *args): - return self.__class__(self.data.ljust(width, *args)) - def lower(self): return self.__class__(self.data.lower()) - def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) - maketrans = str.maketrans - def partition(self, sep): - return self.data.partition(sep) - def replace(self, old, new, maxsplit=-1): - if isinstance(old, UserString): - old = old.data - if isinstance(new, UserString): - new = new.data - return self.__class__(self.data.replace(old, new, maxsplit)) - def rfind(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.rfind(sub, start, end) - def rindex(self, sub, start=0, end=_sys.maxsize): - return self.data.rindex(sub, start, end) - def rjust(self, width, *args): - return self.__class__(self.data.rjust(width, *args)) - def rpartition(self, sep): - return self.data.rpartition(sep) - def rstrip(self, chars=None): - return self.__class__(self.data.rstrip(chars)) - def split(self, sep=None, maxsplit=-1): - return self.data.split(sep, maxsplit) - def rsplit(self, sep=None, maxsplit=-1): - return self.data.rsplit(sep, maxsplit) - def splitlines(self, keepends=False): return self.data.splitlines(keepends) - def startswith(self, prefix, start=0, end=_sys.maxsize): - return self.data.startswith(prefix, start, end) - def strip(self, chars=None): return self.__class__(self.data.strip(chars)) - def swapcase(self): return self.__class__(self.data.swapcase()) - def title(self): return self.__class__(self.data.title()) - def translate(self, *args): - return self.__class__(self.data.translate(*args)) - def upper(self): return self.__class__(self.data.upper()) - def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.5.0/collections/__main__.py b/typing_extensions/test_data/python-3.5.0/collections/__main__.py deleted file mode 100644 index 763e38e0..00000000 --- a/typing_extensions/test_data/python-3.5.0/collections/__main__.py +++ /dev/null @@ -1,38 +0,0 @@ -################################################################################ -### Simple tests -################################################################################ - -# verify that instances can be pickled -from collections import namedtuple -from pickle import loads, dumps -Point = namedtuple('Point', 'x, y', True) -p = Point(x=10, y=20) -assert p == loads(dumps(p)) - -# test and demonstrate ability to override methods -class Point(namedtuple('Point', 'x y')): - __slots__ = () - @property - def hypot(self): - return (self.x ** 2 + self.y ** 2) ** 0.5 - def __str__(self): - return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) - -for p in Point(3, 4), Point(14, 5/7.): - print (p) - -class Point(namedtuple('Point', 'x y')): - 'Point class with optimized _make() and _replace() without error-checking' - __slots__ = () - _make = classmethod(tuple.__new__) - def _replace(self, _map=map, **kwds): - return self._make(_map(kwds.get, ('x', 'y'), self)) - -print(Point(11, 22)._replace(x=100)) - -Point3D = namedtuple('Point3D', Point._fields + ('z',)) -print(Point3D.__doc__) - -import doctest, collections -TestResults = namedtuple('TestResults', 'failed attempted') -print(TestResults(*doctest.testmod(collections))) diff --git a/typing_extensions/test_data/python-3.5.0/collections/abc.py b/typing_extensions/test_data/python-3.5.0/collections/abc.py deleted file mode 100644 index 891600d1..00000000 --- a/typing_extensions/test_data/python-3.5.0/collections/abc.py +++ /dev/null @@ -1,2 +0,0 @@ -from _collections_abc import * -from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.5.0/typing.py b/typing_extensions/test_data/python-3.5.0/typing.py deleted file mode 100644 index 1a4982ea..00000000 --- a/typing_extensions/test_data/python-3.5.0/typing.py +++ /dev/null @@ -1,1648 +0,0 @@ -# TODO nits: -# Get rid of asserts that are the caller's fault. -# Docstrings (e.g. ABCs). - -import abc -from abc import abstractmethod, abstractproperty -import collections -import functools -import re as stdlib_re # Avoid confusion with the re we export. -import sys -import types -try: - import collections.abc as collections_abc -except ImportError: - import collections as collections_abc # Fallback for PY3.2. - - -# Please keep __all__ alphabetized within each category. -__all__ = [ - # Super-special typing primitives. - 'Any', - 'Callable', - 'Generic', - 'Optional', - 'TypeVar', - 'Union', - 'Tuple', - - # ABCs (from collections.abc). - 'AbstractSet', # collections.abc.Set. - 'ByteString', - 'Container', - 'Hashable', - 'ItemsView', - 'Iterable', - 'Iterator', - 'KeysView', - 'Mapping', - 'MappingView', - 'MutableMapping', - 'MutableSequence', - 'MutableSet', - 'Sequence', - 'Sized', - 'ValuesView', - - # Structural checks, a.k.a. protocols. - 'Reversible', - 'SupportsAbs', - 'SupportsFloat', - 'SupportsInt', - 'SupportsRound', - - # Concrete collection types. - 'Dict', - 'List', - 'Set', - 'NamedTuple', # Not really a type. - 'Generator', - - # One-off things. - 'AnyStr', - 'cast', - 'get_type_hints', - 'no_type_check', - 'no_type_check_decorator', - 'overload', - - # Submodules. - 'io', - 're', -] - - -def _qualname(x): - if sys.version_info[:2] >= (3, 3): - return x.__qualname__ - else: - # Fall back to just name. - return x.__name__ - - -class TypingMeta(type): - """Metaclass for every type defined below. - - This overrides __new__() to require an extra keyword parameter - '_root', which serves as a guard against naive subclassing of the - typing classes. Any legitimate class defined using a metaclass - derived from TypingMeta (including internal subclasses created by - e.g. Union[X, Y]) must pass _root=True. - - This also defines a dummy constructor (all the work is done in - __new__) and a nicer repr(). - """ - - _is_protocol = False - - def __new__(cls, name, bases, namespace, *, _root=False): - if not _root: - raise TypeError("Cannot subclass %s" % - (', '.join(map(_type_repr, bases)) or '()')) - return super().__new__(cls, name, bases, namespace) - - def __init__(self, *args, **kwds): - pass - - def _eval_type(self, globalns, localns): - """Override this in subclasses to interpret forward references. - - For example, Union['C'] is internally stored as - Union[_ForwardRef('C')], which should evaluate to _Union[C], - where C is an object found in globalns or localns (searching - localns first, of course). - """ - return self - - def _has_type_var(self): - return False - - def __repr__(self): - return '%s.%s' % (self.__module__, _qualname(self)) - - -class Final: - """Mix-in class to prevent instantiation.""" - - __slots__ = () - - def __new__(self, *args, **kwds): - raise TypeError("Cannot instantiate %r" % self.__class__) - - -class _ForwardRef(TypingMeta): - """Wrapper to hold a forward reference.""" - - def __new__(cls, arg): - if not isinstance(arg, str): - raise TypeError('ForwardRef must be a string -- got %r' % (arg,)) - try: - code = compile(arg, '', 'eval') - except SyntaxError: - raise SyntaxError('ForwardRef must be an expression -- got %r' % - (arg,)) - self = super().__new__(cls, arg, (), {}, _root=True) - self.__forward_arg__ = arg - self.__forward_code__ = code - self.__forward_evaluated__ = False - self.__forward_value__ = None - typing_globals = globals() - frame = sys._getframe(1) - while frame is not None and frame.f_globals is typing_globals: - frame = frame.f_back - assert frame is not None - self.__forward_frame__ = frame - return self - - def _eval_type(self, globalns, localns): - if not isinstance(localns, dict): - raise TypeError('ForwardRef localns must be a dict -- got %r' % - (localns,)) - if not isinstance(globalns, dict): - raise TypeError('ForwardRef globalns must be a dict -- got %r' % - (globalns,)) - if not self.__forward_evaluated__: - if globalns is None and localns is None: - globalns = localns = {} - elif globalns is None: - globalns = localns - elif localns is None: - localns = globalns - self.__forward_value__ = _type_check( - eval(self.__forward_code__, globalns, localns), - "Forward references must evaluate to types.") - self.__forward_evaluated__ = True - return self.__forward_value__ - - def __instancecheck__(self, obj): - raise TypeError("Forward references cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not self.__forward_evaluated__: - globalns = self.__forward_frame__.f_globals - localns = self.__forward_frame__.f_locals - try: - self._eval_type(globalns, localns) - except NameError: - return False # Too early. - return issubclass(cls, self.__forward_value__) - - def __repr__(self): - return '_ForwardRef(%r)' % (self.__forward_arg__,) - - -class _TypeAlias: - """Internal helper class for defining generic variants of concrete types. - - Note that this is not a type; let's call it a pseudo-type. It can - be used in instance and subclass checks, e.g. isinstance(m, Match) - or issubclass(type(m), Match). However, it cannot be itself the - target of an issubclass() call; e.g. issubclass(Match, C) (for - some arbitrary class C) raises TypeError rather than returning - False. - """ - - __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') - - def __new__(cls, *args, **kwds): - """Constructor. - - This only exists to give a better error message in case - someone tries to subclass a type alias (not a good idea). - """ - if (len(args) == 3 and - isinstance(args[0], str) and - isinstance(args[1], tuple)): - # Close enough. - raise TypeError("A type alias cannot be subclassed") - return object.__new__(cls) - - def __init__(self, name, type_var, impl_type, type_checker): - """Initializer. - - Args: - name: The name, e.g. 'Pattern'. - type_var: The type parameter, e.g. AnyStr, or the - specific type, e.g. str. - impl_type: The implementation type. - type_checker: Function that takes an impl_type instance. - and returns a value that should be a type_var instance. - """ - assert isinstance(name, str), repr(name) - assert isinstance(type_var, type), repr(type_var) - assert isinstance(impl_type, type), repr(impl_type) - assert not isinstance(impl_type, TypingMeta), repr(impl_type) - self.name = name - self.type_var = type_var - self.impl_type = impl_type - self.type_checker = type_checker - - def __repr__(self): - return "%s[%s]" % (self.name, _type_repr(self.type_var)) - - def __getitem__(self, parameter): - assert isinstance(parameter, type), repr(parameter) - if not isinstance(self.type_var, TypeVar): - raise TypeError("%s cannot be further parameterized." % self) - if self.type_var.__constraints__: - if not issubclass(parameter, Union[self.type_var.__constraints__]): - raise TypeError("%s is not a valid substitution for %s." % - (parameter, self.type_var)) - return self.__class__(self.name, parameter, - self.impl_type, self.type_checker) - - def __instancecheck__(self, obj): - raise TypeError("Type aliases cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if isinstance(cls, _TypeAlias): - # Covariance. For now, we compare by name. - return (cls.name == self.name and - issubclass(cls.type_var, self.type_var)) - else: - # Note that this is too lenient, because the - # implementation type doesn't carry information about - # whether it is about bytes or str (for example). - return issubclass(cls, self.impl_type) - - -def _has_type_var(t): - return t is not None and isinstance(t, TypingMeta) and t._has_type_var() - - -def _eval_type(t, globalns, localns): - if isinstance(t, TypingMeta): - return t._eval_type(globalns, localns) - else: - return t - - -def _type_check(arg, msg): - """Check that the argument is a type, and return it. - - As a special case, accept None and return type(None) instead. - Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. - - The msg argument is a human-readable error message, e.g. - - "Union[arg, ...]: arg should be a type." - - We append the repr() of the actual value (truncated to 100 chars). - """ - if arg is None: - return type(None) - if isinstance(arg, str): - arg = _ForwardRef(arg) - if not isinstance(arg, (type, _TypeAlias)): - raise TypeError(msg + " Got %.100r." % (arg,)) - return arg - - -def _type_repr(obj): - """Return the repr() of an object, special-casing types. - - If obj is a type, we return a shorter version than the default - type.__repr__, based on the module and qualified name, which is - typically enough to uniquely identify a type. For everything - else, we fall back on repr(obj). - """ - if isinstance(obj, type) and not isinstance(obj, TypingMeta): - if obj.__module__ == 'builtins': - return _qualname(obj) - else: - return '%s.%s' % (obj.__module__, _qualname(obj)) - else: - return repr(obj) - - -class AnyMeta(TypingMeta): - """Metaclass for Any.""" - - def __new__(cls, name, bases, namespace, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - return self - - def __instancecheck__(self, obj): - raise TypeError("Any cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not isinstance(cls, type): - return super().__subclasscheck__(cls) # To TypeError. - return True - - -class Any(Final, metaclass=AnyMeta, _root=True): - """Special type indicating an unconstrained type. - - - Any object is an instance of Any. - - Any class is a subclass of Any. - - As a special case, Any and object are subclasses of each other. - """ - - __slots__ = () - - -class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True): - """Type variable. - - Usage:: - - T = TypeVar('T') # Can be anything - A = TypeVar('A', str, bytes) # Must be str or bytes - - Type variables exist primarily for the benefit of static type - checkers. They serve as the parameters for generic types as well - as for generic function definitions. See class Generic for more - information on generic types. Generic functions work as follows: - - def repeat(x: T, n: int) -> Sequence[T]: - '''Return a list containing n references to x.''' - return [x]*n - - def longest(x: A, y: A) -> A: - '''Return the longest of two strings.''' - return x if len(x) >= len(y) else y - - The latter example's signature is essentially the overloading - of (str, str) -> str and (bytes, bytes) -> bytes. Also note - that if the arguments are instances of some subclass of str, - the return type is still plain str. - - At runtime, isinstance(x, T) will raise TypeError. However, - issubclass(C, T) is true for any class C, and issubclass(str, A) - and issubclass(bytes, A) are true, and issubclass(int, A) is - false. - - Type variables may be marked covariant or contravariant by passing - covariant=True or contravariant=True. See PEP 484 for more - details. By default type variables are invariant. - - Type variables can be introspected. e.g.: - - T.__name__ == 'T' - T.__constraints__ == () - T.__covariant__ == False - T.__contravariant__ = False - A.__constraints__ == (str, bytes) - """ - - def __new__(cls, name, *constraints, bound=None, - covariant=False, contravariant=False): - self = super().__new__(cls, name, (Final,), {}, _root=True) - if covariant and contravariant: - raise ValueError("Bivariant type variables are not supported.") - self.__covariant__ = bool(covariant) - self.__contravariant__ = bool(contravariant) - if constraints and bound is not None: - raise TypeError("Constraints cannot be combined with bound=...") - if constraints and len(constraints) == 1: - raise TypeError("A single constraint is not allowed") - msg = "TypeVar(name, constraint, ...): constraints must be types." - self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) - if bound: - self.__bound__ = _type_check(bound, "Bound must be a type.") - else: - self.__bound__ = None - return self - - def _has_type_var(self): - return True - - def __repr__(self): - if self.__covariant__: - prefix = '+' - elif self.__contravariant__: - prefix = '-' - else: - prefix = '~' - return prefix + self.__name__ - - def __instancecheck__(self, instance): - raise TypeError("Type variables cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - # TODO: Make this raise TypeError too? - if cls is self: - return True - if cls is Any: - return True - if self.__bound__ is not None: - return issubclass(cls, self.__bound__) - if self.__constraints__: - return any(issubclass(cls, c) for c in self.__constraints__) - return True - - -# Some unconstrained type variables. These are used by the container types. -T = TypeVar('T') # Any type. -KT = TypeVar('KT') # Key type. -VT = TypeVar('VT') # Value type. -T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. -V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. -VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. -T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. - -# A useful type variable with constraints. This represents string types. -# TODO: What about bytearray, memoryview? -AnyStr = TypeVar('AnyStr', bytes, str) - - -class UnionMeta(TypingMeta): - """Metaclass for Union.""" - - def __new__(cls, name, bases, namespace, parameters=None, _root=False): - if parameters is None: - return super().__new__(cls, name, bases, namespace, _root=_root) - if not isinstance(parameters, tuple): - raise TypeError("Expected parameters=") - # Flatten out Union[Union[...], ...] and type-check non-Union args. - params = [] - msg = "Union[arg, ...]: each arg must be a type." - for p in parameters: - if isinstance(p, UnionMeta): - params.extend(p.__union_params__) - else: - params.append(_type_check(p, msg)) - # Weed out strict duplicates, preserving the first of each occurrence. - all_params = set(params) - if len(all_params) < len(params): - new_params = [] - for t in params: - if t in all_params: - new_params.append(t) - all_params.remove(t) - params = new_params - assert not all_params, all_params - # Weed out subclasses. - # E.g. Union[int, Employee, Manager] == Union[int, Employee]. - # If Any or object is present it will be the sole survivor. - # If both Any and object are present, Any wins. - # Never discard type variables, except against Any. - # (In particular, Union[str, AnyStr] != AnyStr.) - all_params = set(params) - for t1 in params: - if t1 is Any: - return Any - if isinstance(t1, TypeVar): - continue - if any(issubclass(t1, t2) - for t2 in all_params - {t1} if not isinstance(t2, TypeVar)): - all_params.remove(t1) - # It's not a union if there's only one type left. - if len(all_params) == 1: - return all_params.pop() - # Create a new class with these params. - self = super().__new__(cls, name, bases, {}, _root=True) - self.__union_params__ = tuple(t for t in params if t in all_params) - self.__union_set_params__ = frozenset(self.__union_params__) - return self - - def _eval_type(self, globalns, localns): - p = tuple(_eval_type(t, globalns, localns) - for t in self.__union_params__) - if p == self.__union_params__: - return self - else: - return self.__class__(self.__name__, self.__bases__, {}, - p, _root=True) - - def _has_type_var(self): - if self.__union_params__: - for t in self.__union_params__: - if _has_type_var(t): - return True - return False - - def __repr__(self): - r = super().__repr__() - if self.__union_params__: - r += '[%s]' % (', '.join(_type_repr(t) - for t in self.__union_params__)) - return r - - def __getitem__(self, parameters): - if self.__union_params__ is not None: - raise TypeError( - "Cannot subscript an existing Union. Use Union[u, t] instead.") - if parameters == (): - raise TypeError("Cannot take a Union of no types.") - if not isinstance(parameters, tuple): - parameters = (parameters,) - return self.__class__(self.__name__, self.__bases__, - dict(self.__dict__), parameters, _root=True) - - def __eq__(self, other): - if not isinstance(other, UnionMeta): - return NotImplemented - return self.__union_set_params__ == other.__union_set_params__ - - def __hash__(self): - return hash(self.__union_set_params__) - - def __instancecheck__(self, obj): - raise TypeError("Unions cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if self.__union_params__ is None: - return isinstance(cls, UnionMeta) - elif isinstance(cls, UnionMeta): - if cls.__union_params__ is None: - return False - return all(issubclass(c, self) for c in (cls.__union_params__)) - elif isinstance(cls, TypeVar): - if cls in self.__union_params__: - return True - if cls.__constraints__: - return issubclass(Union[cls.__constraints__], self) - return False - else: - return any(issubclass(cls, t) for t in self.__union_params__) - - -class Union(Final, metaclass=UnionMeta, _root=True): - """Union type; Union[X, Y] means either X or Y. - - To define a union, use e.g. Union[int, str]. Details: - - - The arguments must be types and there must be at least one. - - - None as an argument is a special case and is replaced by - type(None). - - - Unions of unions are flattened, e.g.:: - - Union[Union[int, str], float] == Union[int, str, float] - - - Unions of a single argument vanish, e.g.:: - - Union[int] == int # The constructor actually returns int - - - Redundant arguments are skipped, e.g.:: - - Union[int, str, int] == Union[int, str] - - - When comparing unions, the argument order is ignored, e.g.:: - - Union[int, str] == Union[str, int] - - - When two arguments have a subclass relationship, the least - derived argument is kept, e.g.:: - - class Employee: pass - class Manager(Employee): pass - Union[int, Employee, Manager] == Union[int, Employee] - Union[Manager, int, Employee] == Union[int, Employee] - Union[Employee, Manager] == Employee - - - Corollary: if Any is present it is the sole survivor, e.g.:: - - Union[int, Any] == Any - - - Similar for object:: - - Union[int, object] == object - - - To cut a tie: Union[object, Any] == Union[Any, object] == Any. - - - You cannot subclass or instantiate a union. - - - You cannot write Union[X][Y] (what would it mean?). - - - You can use Optional[X] as a shorthand for Union[X, None]. - """ - - # Unsubscripted Union type has params set to None. - __union_params__ = None - __union_set_params__ = None - - -class OptionalMeta(TypingMeta): - """Metaclass for Optional.""" - - def __new__(cls, name, bases, namespace, _root=False): - return super().__new__(cls, name, bases, namespace, _root=_root) - - def __getitem__(self, arg): - arg = _type_check(arg, "Optional[t] requires a single type.") - return Union[arg, type(None)] - - -class Optional(Final, metaclass=OptionalMeta, _root=True): - """Optional type. - - Optional[X] is equivalent to Union[X, type(None)]. - """ - - __slots__ = () - - -class TupleMeta(TypingMeta): - """Metaclass for Tuple.""" - - def __new__(cls, name, bases, namespace, parameters=None, - use_ellipsis=False, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - self.__tuple_params__ = parameters - self.__tuple_use_ellipsis__ = use_ellipsis - return self - - def _has_type_var(self): - if self.__tuple_params__: - for t in self.__tuple_params__: - if _has_type_var(t): - return True - return False - - def _eval_type(self, globalns, localns): - tp = self.__tuple_params__ - if tp is None: - return self - p = tuple(_eval_type(t, globalns, localns) for t in tp) - if p == self.__tuple_params__: - return self - else: - return self.__class__(self.__name__, self.__bases__, {}, - p, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__tuple_params__ is not None: - params = [_type_repr(p) for p in self.__tuple_params__] - if self.__tuple_use_ellipsis__: - params.append('...') - r += '[%s]' % ( - ', '.join(params)) - return r - - def __getitem__(self, parameters): - if self.__tuple_params__ is not None: - raise TypeError("Cannot re-parameterize %r" % (self,)) - if not isinstance(parameters, tuple): - parameters = (parameters,) - if len(parameters) == 2 and parameters[1] == Ellipsis: - parameters = parameters[:1] - use_ellipsis = True - msg = "Tuple[t, ...]: t must be a type." - else: - use_ellipsis = False - msg = "Tuple[t0, t1, ...]: each t must be a type." - parameters = tuple(_type_check(p, msg) for p in parameters) - return self.__class__(self.__name__, self.__bases__, - dict(self.__dict__), parameters, - use_ellipsis=use_ellipsis, _root=True) - - def __eq__(self, other): - if not isinstance(other, TupleMeta): - return NotImplemented - return self.__tuple_params__ == other.__tuple_params__ - - def __hash__(self): - return hash(self.__tuple_params__) - - def __instancecheck__(self, obj): - raise TypeError("Tuples cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if not isinstance(cls, type): - return super().__subclasscheck__(cls) # To TypeError. - if issubclass(cls, tuple): - return True # Special case. - if not isinstance(cls, TupleMeta): - return super().__subclasscheck__(cls) # False. - if self.__tuple_params__ is None: - return True - if cls.__tuple_params__ is None: - return False # ??? - if cls.__tuple_use_ellipsis__ != self.__tuple_use_ellipsis__: - return False - # Covariance. - return (len(self.__tuple_params__) == len(cls.__tuple_params__) and - all(issubclass(x, p) - for x, p in zip(cls.__tuple_params__, - self.__tuple_params__))) - - -class Tuple(Final, metaclass=TupleMeta, _root=True): - """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. - - Example: Tuple[T1, T2] is a tuple of two elements corresponding - to type variables T1 and T2. Tuple[int, float, str] is a tuple - of an int, a float and a string. - - To specify a variable-length tuple of homogeneous type, use Sequence[T]. - """ - - __slots__ = () - - -class CallableMeta(TypingMeta): - """Metaclass for Callable.""" - - def __new__(cls, name, bases, namespace, _root=False, - args=None, result=None): - if args is None and result is None: - pass # Must be 'class Callable'. - else: - if args is not Ellipsis: - if not isinstance(args, list): - raise TypeError("Callable[args, result]: " - "args must be a list." - " Got %.100r." % (args,)) - msg = "Callable[[arg, ...], result]: each arg must be a type." - args = tuple(_type_check(arg, msg) for arg in args) - msg = "Callable[args, result]: result must be a type." - result = _type_check(result, msg) - self = super().__new__(cls, name, bases, namespace, _root=_root) - self.__args__ = args - self.__result__ = result - return self - - def _has_type_var(self): - if self.__args__: - for t in self.__args__: - if _has_type_var(t): - return True - return _has_type_var(self.__result__) - - def _eval_type(self, globalns, localns): - if self.__args__ is None and self.__result__ is None: - return self - if self.__args__ is Ellipsis: - args = self.__args__ - else: - args = [_eval_type(t, globalns, localns) for t in self.__args__] - result = _eval_type(self.__result__, globalns, localns) - if args == self.__args__ and result == self.__result__: - return self - else: - return self.__class__(self.__name__, self.__bases__, {}, - args=args, result=result, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__args__ is not None or self.__result__ is not None: - if self.__args__ is Ellipsis: - args_r = '...' - else: - args_r = '[%s]' % ', '.join(_type_repr(t) - for t in self.__args__) - r += '[%s, %s]' % (args_r, _type_repr(self.__result__)) - return r - - def __getitem__(self, parameters): - if self.__args__ is not None or self.__result__ is not None: - raise TypeError("This Callable type is already parameterized.") - if not isinstance(parameters, tuple) or len(parameters) != 2: - raise TypeError( - "Callable must be used as Callable[[arg, ...], result].") - args, result = parameters - return self.__class__(self.__name__, self.__bases__, - dict(self.__dict__), _root=True, - args=args, result=result) - - def __eq__(self, other): - if not isinstance(other, CallableMeta): - return NotImplemented - return (self.__args__ == other.__args__ and - self.__result__ == other.__result__) - - def __hash__(self): - return hash(self.__args__) ^ hash(self.__result__) - - def __instancecheck__(self, obj): - # For unparametrized Callable we allow this, because - # typing.Callable should be equivalent to - # collections.abc.Callable. - if self.__args__ is None and self.__result__ is None: - return isinstance(obj, collections_abc.Callable) - else: - raise TypeError("Callable[] cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if not isinstance(cls, CallableMeta): - return super().__subclasscheck__(cls) - if self.__args__ is None and self.__result__ is None: - return True - # We're not doing covariance or contravariance -- this is *invariance*. - return self == cls - - -class Callable(Final, metaclass=CallableMeta, _root=True): - """Callable type; Callable[[int], str] is a function of (int) -> str. - - The subscription syntax must always be used with exactly two - values: the argument list and the return type. The argument list - must be a list of types; the return type must be a single type. - - There is no syntax to indicate optional or keyword arguments, - such function types are rarely used as callback types. - """ - - __slots__ = () - - -def _gorg(a): - """Return the farthest origin of a generic class.""" - assert isinstance(a, GenericMeta) - while a.__origin__ is not None: - a = a.__origin__ - return a - - -def _geqv(a, b): - """Return whether two generic classes are equivalent. - - The intention is to consider generic class X and any of its - parameterized forms (X[T], X[int], etc.) as equivalent. - - However, X is not equivalent to a subclass of X. - - The relation is reflexive, symmetric and transitive. - """ - assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) - # Reduce each to its origin. - return _gorg(a) is _gorg(b) - - -class GenericMeta(TypingMeta, abc.ABCMeta): - """Metaclass for generic types.""" - - # TODO: Constrain more how Generic is used; only a few - # standard patterns should be allowed. - - # TODO: Use a more precise rule than matching __name__ to decide - # whether two classes are the same. Also, save the formal - # parameters. (These things are related! A solution lies in - # using origin.) - - __extra__ = None - - def __new__(cls, name, bases, namespace, - parameters=None, origin=None, extra=None): - if parameters is None: - # Extract parameters from direct base classes. Only - # direct bases are considered and only those that are - # themselves generic, and parameterized with type - # variables. Don't use bases like Any, Union, Tuple, - # Callable or type variables. - params = None - for base in bases: - if isinstance(base, TypingMeta): - if not isinstance(base, GenericMeta): - raise TypeError( - "You cannot inherit from magic class %s" % - repr(base)) - if base.__parameters__ is None: - continue # The base is unparameterized. - for bp in base.__parameters__: - if _has_type_var(bp) and not isinstance(bp, TypeVar): - raise TypeError( - "Cannot inherit from a generic class " - "parameterized with " - "non-type-variable %s" % bp) - if params is None: - params = [] - if bp not in params: - params.append(bp) - if params is not None: - parameters = tuple(params) - self = super().__new__(cls, name, bases, namespace, _root=True) - self.__parameters__ = parameters - if extra is not None: - self.__extra__ = extra - # Else __extra__ is inherited, eventually from the - # (meta-)class default above. - self.__origin__ = origin - return self - - def _has_type_var(self): - if self.__parameters__: - for t in self.__parameters__: - if _has_type_var(t): - return True - return False - - def __repr__(self): - r = super().__repr__() - if self.__parameters__ is not None: - r += '[%s]' % ( - ', '.join(_type_repr(p) for p in self.__parameters__)) - return r - - def __eq__(self, other): - if not isinstance(other, GenericMeta): - return NotImplemented - return (_geqv(self, other) and - self.__parameters__ == other.__parameters__) - - def __hash__(self): - return hash((self.__name__, self.__parameters__)) - - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if not params: - raise TypeError("Cannot have empty parameter list") - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self.__parameters__ is None: - for p in params: - if not isinstance(p, TypeVar): - raise TypeError("Initial parameters must be " - "type variables; got %s" % p) - if len(set(params)) != len(params): - raise TypeError( - "All type variables in Generic[...] must be distinct.") - else: - if len(params) != len(self.__parameters__): - raise TypeError("Cannot change parameter count from %d to %d" % - (len(self.__parameters__), len(params))) - for new, old in zip(params, self.__parameters__): - if isinstance(old, TypeVar): - if not old.__constraints__: - # Substituting for an unconstrained TypeVar is OK. - continue - if issubclass(new, Union[old.__constraints__]): - # Specializing a constrained type variable is OK. - continue - if not issubclass(new, old): - raise TypeError( - "Cannot substitute %s for %s in %s" % - (_type_repr(new), _type_repr(old), self)) - - return self.__class__(self.__name__, self.__bases__, - dict(self.__dict__), - parameters=params, - origin=self, - extra=self.__extra__) - - def __instancecheck__(self, instance): - # Since we extend ABC.__subclasscheck__ and - # ABC.__instancecheck__ inlines the cache checking done by the - # latter, we must extend __instancecheck__ too. For simplicity - # we just skip the cache check -- instance checks for generic - # classes are supposed to be rare anyways. - return self.__subclasscheck__(instance.__class__) - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if isinstance(cls, GenericMeta): - # For a class C(Generic[T]) where T is co-variant, - # C[X] is a subclass of C[Y] iff X is a subclass of Y. - origin = self.__origin__ - if origin is not None and origin is cls.__origin__: - assert len(self.__parameters__) == len(origin.__parameters__) - assert len(cls.__parameters__) == len(origin.__parameters__) - for p_self, p_cls, p_origin in zip(self.__parameters__, - cls.__parameters__, - origin.__parameters__): - if isinstance(p_origin, TypeVar): - if p_origin.__covariant__: - # Covariant -- p_cls must be a subclass of p_self. - if not issubclass(p_cls, p_self): - break - elif p_origin.__contravariant__: - # Contravariant. I think it's the opposite. :-) - if not issubclass(p_self, p_cls): - break - else: - # Invariant -- p_cls and p_self must equal. - if p_self != p_cls: - break - else: - # If the origin's parameter is not a typevar, - # insist on invariance. - if p_self != p_cls: - break - else: - return True - # If we break out of the loop, the superclass gets a chance. - if super().__subclasscheck__(cls): - return True - if self.__extra__ is None or isinstance(cls, GenericMeta): - return False - return issubclass(cls, self.__extra__) - - -class Generic(metaclass=GenericMeta): - """Abstract base class for generic types. - - A generic type is typically declared by inheriting from an - instantiation of this class with one or more type variables. - For example, a generic mapping type might be defined as:: - - class Mapping(Generic[KT, VT]): - def __getitem__(self, key: KT) -> VT: - ... - # Etc. - - This class can then be used as follows:: - - def lookup_name(mapping: Mapping, key: KT, default: VT) -> VT: - try: - return mapping[key] - except KeyError: - return default - - For clarity the type variables may be redefined, e.g.:: - - X = TypeVar('X') - Y = TypeVar('Y') - def lookup_name(mapping: Mapping[X, Y], key: X, default: Y) -> Y: - # Same body as above. - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - next_in_mro = object - # Look for the last occurrence of Generic or Generic[...]. - for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and _gorg(c) is Generic: - next_in_mro = cls.__mro__[i+1] - return next_in_mro.__new__(_gorg(cls)) - - -def cast(typ, val): - """Cast a value to a type. - - This returns the value unchanged. To the type checker this - signals that the return value has the designated type, but at - runtime we intentionally don't check anything (we want this - to be as fast as possible). - """ - return val - - -def _get_defaults(func): - """Internal helper to extract the default arguments, by name.""" - code = func.__code__ - pos_count = code.co_argcount - kw_count = code.co_kwonlyargcount - arg_names = code.co_varnames - kwarg_names = arg_names[pos_count:pos_count + kw_count] - arg_names = arg_names[:pos_count] - defaults = func.__defaults__ or () - kwdefaults = func.__kwdefaults__ - res = dict(kwdefaults) if kwdefaults else {} - pos_offset = pos_count - len(defaults) - for name, value in zip(arg_names[pos_offset:], defaults): - assert name not in res - res[name] = value - return res - - -def get_type_hints(obj, globalns=None, localns=None): - """Return type hints for a function or method object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, and if necessary - adds Optional[t] if a default value equal to None is set. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj, and these are also used as the locals. If the - object does not appear to have globals, an exception is raised. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - if getattr(obj, '__no_type_check__', None): - return {} - if globalns is None: - globalns = getattr(obj, '__globals__', {}) - if localns is None: - localns = globalns - elif localns is None: - localns = globalns - defaults = _get_defaults(obj) - hints = dict(obj.__annotations__) - for name, value in hints.items(): - if isinstance(value, str): - value = _ForwardRef(value) - value = _eval_type(value, globalns, localns) - if name in defaults and defaults[name] is None: - value = Optional[value] - hints[name] = value - return hints - - -# TODO: Also support this as a class decorator. -def no_type_check(arg): - """Decorator to indicate that annotations are not type hints. - - The argument must be a class or function; if it is a class, it - applies recursively to all methods defined in that class (but not - to methods defined in its superclasses or subclasses). - - This mutates the function(s) in place. - """ - if isinstance(arg, type): - for obj in arg.__dict__.values(): - if isinstance(obj, types.FunctionType): - obj.__no_type_check__ = True - else: - arg.__no_type_check__ = True - return arg - - -def no_type_check_decorator(decorator): - """Decorator to give another decorator the @no_type_check effect. - - This wraps the decorator with something that wraps the decorated - function in @no_type_check. - """ - - @functools.wraps(decorator) - def wrapped_decorator(*args, **kwds): - func = decorator(*args, **kwds) - func = no_type_check(func) - return func - - return wrapped_decorator - - -def overload(func): - raise RuntimeError("Overloading is only supported in library stubs") - - -class _ProtocolMeta(GenericMeta): - """Internal metaclass for _Protocol. - - This exists so _Protocol classes can be generic without deriving - from Generic. - """ - - def __instancecheck__(self, obj): - raise TypeError("Protocols cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not self._is_protocol: - # No structural checks since this isn't a protocol. - return NotImplemented - - if self is _Protocol: - # Every class is a subclass of the empty protocol. - return True - - # Find all attributes defined in the protocol. - attrs = self._get_protocol_attrs() - - for attr in attrs: - if not any(attr in d.__dict__ for d in cls.__mro__): - return False - return True - - def _get_protocol_attrs(self): - # Get all Protocol base classes. - protocol_bases = [] - for c in self.__mro__: - if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': - protocol_bases.append(c) - - # Get attributes included in protocol. - attrs = set() - for base in protocol_bases: - for attr in base.__dict__.keys(): - # Include attributes not defined in any non-protocol bases. - for c in self.__mro__: - if (c is not base and attr in c.__dict__ and - not getattr(c, '_is_protocol', False)): - break - else: - if (not attr.startswith('_abc_') and - attr != '__abstractmethods__' and - attr != '_is_protocol' and - attr != '__dict__' and - attr != '__slots__' and - attr != '_get_protocol_attrs' and - attr != '__parameters__' and - attr != '__origin__' and - attr != '__module__'): - attrs.add(attr) - - return attrs - - -class _Protocol(metaclass=_ProtocolMeta): - """Internal base class for protocol classes. - - This implements a simple-minded structural isinstance check - (similar but more general than the one-offs in collections.abc - such as Hashable). - """ - - __slots__ = () - - _is_protocol = True - - -# Various ABCs mimicking those in collections.abc. -# A few are simply re-exported for completeness. - -Hashable = collections_abc.Hashable # Not generic. - - -class Iterable(Generic[T_co], extra=collections_abc.Iterable): - __slots__ = () - - -class Iterator(Iterable[T_co], extra=collections_abc.Iterator): - __slots__ = () - - -class SupportsInt(_Protocol): - __slots__ = () - - @abstractmethod - def __int__(self) -> int: - pass - - -class SupportsFloat(_Protocol): - __slots__ = () - - @abstractmethod - def __float__(self) -> float: - pass - - -class SupportsComplex(_Protocol): - __slots__ = () - - @abstractmethod - def __complex__(self) -> complex: - pass - - -class SupportsBytes(_Protocol): - __slots__ = () - - @abstractmethod - def __bytes__(self) -> bytes: - pass - - -class SupportsAbs(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __abs__(self) -> T_co: - pass - - -class SupportsRound(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __round__(self, ndigits: int = 0) -> T_co: - pass - - -class Reversible(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __reversed__(self) -> 'Iterator[T_co]': - pass - - -Sized = collections_abc.Sized # Not generic. - - -class Container(Generic[T_co], extra=collections_abc.Container): - __slots__ = () - - -# Callable was defined earlier. - - -class AbstractSet(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Set): - pass - - -class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): - pass - - -# NOTE: Only the value type is covariant. -class Mapping(Sized, Iterable[KT], Container[KT], Generic[VT_co], - extra=collections_abc.Mapping): - pass - - -class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): - pass - - -class Sequence(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Sequence): - pass - - -class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): - pass - - -class ByteString(Sequence[int], extra=collections_abc.ByteString): - pass - - -ByteString.register(type(memoryview(b''))) - - -class List(list, MutableSequence[T]): - - def __new__(cls, *args, **kwds): - if _geqv(cls, List): - raise TypeError("Type List cannot be instantiated; " - "use list() instead") - return list.__new__(cls, *args, **kwds) - - -class Set(set, MutableSet[T]): - - def __new__(cls, *args, **kwds): - if _geqv(cls, Set): - raise TypeError("Type Set cannot be instantiated; " - "use set() instead") - return set.__new__(cls, *args, **kwds) - - -class _FrozenSetMeta(GenericMeta): - """This metaclass ensures set is not a subclass of FrozenSet. - - Without this metaclass, set would be considered a subclass of - FrozenSet, because FrozenSet.__extra__ is collections.abc.Set, and - set is a subclass of that. - """ - - def __subclasscheck__(self, cls): - if issubclass(cls, Set): - return False - return super().__subclasscheck__(cls) - - -class FrozenSet(frozenset, AbstractSet[T_co], metaclass=_FrozenSetMeta): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, FrozenSet): - raise TypeError("Type FrozenSet cannot be instantiated; " - "use frozenset() instead") - return frozenset.__new__(cls, *args, **kwds) - - -class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): - pass - - -class KeysView(MappingView[KT], AbstractSet[KT], - extra=collections_abc.KeysView): - pass - - -# TODO: Enable Set[Tuple[KT, VT_co]] instead of Generic[KT, VT_co]. -class ItemsView(MappingView, Generic[KT, VT_co], - extra=collections_abc.ItemsView): - pass - - -class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): - pass - - -class Dict(dict, MutableMapping[KT, VT]): - - def __new__(cls, *args, **kwds): - if _geqv(cls, Dict): - raise TypeError("Type Dict cannot be instantiated; " - "use dict() instead") - return dict.__new__(cls, *args, **kwds) - - -# Determine what base class to use for Generator. -if hasattr(collections_abc, 'Generator'): - # Sufficiently recent versions of 3.5 have a Generator ABC. - _G_base = collections_abc.Generator -else: - # Fall back on the exact type. - _G_base = types.GeneratorType - - -class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], - extra=_G_base): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Generator): - raise TypeError("Type Generator cannot be instantiated; " - "create a subclass instead") - return super().__new__(cls, *args, **kwds) - - -def NamedTuple(typename, fields): - """Typed version of namedtuple. - - Usage:: - - Employee = typing.NamedTuple('Employee', [('name', str), 'id', int)]) - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has one extra attribute: _field_types, - giving a dict mapping field names to types. (The field names - are in the _fields attribute, which is part of the namedtuple - API.) - """ - fields = [(n, t) for n, t in fields] - cls = collections.namedtuple(typename, [n for n, t in fields]) - cls._field_types = dict(fields) - return cls - - -class IO(Generic[AnyStr]): - """Generic base class for TextIO and BinaryIO. - - This is an abstract, generic version of the return of open(). - - NOTE: This does not distinguish between the different possible - classes (text vs. binary, read vs. write vs. read/write, - append-only, unbuffered). The TextIO and BinaryIO subclasses - below capture the distinctions between text vs. binary, which is - pervasive in the interface; however we currently do not offer a - way to track the other distinctions in the type system. - """ - - __slots__ = () - - @abstractproperty - def mode(self) -> str: - pass - - @abstractproperty - def name(self) -> str: - pass - - @abstractmethod - def close(self) -> None: - pass - - @abstractmethod - def closed(self) -> bool: - pass - - @abstractmethod - def fileno(self) -> int: - pass - - @abstractmethod - def flush(self) -> None: - pass - - @abstractmethod - def isatty(self) -> bool: - pass - - @abstractmethod - def read(self, n: int = -1) -> AnyStr: - pass - - @abstractmethod - def readable(self) -> bool: - pass - - @abstractmethod - def readline(self, limit: int = -1) -> AnyStr: - pass - - @abstractmethod - def readlines(self, hint: int = -1) -> List[AnyStr]: - pass - - @abstractmethod - def seek(self, offset: int, whence: int = 0) -> int: - pass - - @abstractmethod - def seekable(self) -> bool: - pass - - @abstractmethod - def tell(self) -> int: - pass - - @abstractmethod - def truncate(self, size: int = None) -> int: - pass - - @abstractmethod - def writable(self) -> bool: - pass - - @abstractmethod - def write(self, s: AnyStr) -> int: - pass - - @abstractmethod - def writelines(self, lines: List[AnyStr]) -> None: - pass - - @abstractmethod - def __enter__(self) -> 'IO[AnyStr]': - pass - - @abstractmethod - def __exit__(self, type, value, traceback) -> None: - pass - - -class BinaryIO(IO[bytes]): - """Typed version of the return of open() in binary mode.""" - - __slots__ = () - - @abstractmethod - def write(self, s: Union[bytes, bytearray]) -> int: - pass - - @abstractmethod - def __enter__(self) -> 'BinaryIO': - pass - - -class TextIO(IO[str]): - """Typed version of the return of open() in text mode.""" - - __slots__ = () - - @abstractproperty - def buffer(self) -> BinaryIO: - pass - - @abstractproperty - def encoding(self) -> str: - pass - - @abstractproperty - def errors(self) -> str: - pass - - @abstractproperty - def line_buffering(self) -> bool: - pass - - @abstractproperty - def newlines(self) -> Any: - pass - - @abstractmethod - def __enter__(self) -> 'TextIO': - pass - - -class io: - """Wrapper namespace for IO generic classes.""" - - __all__ = ['IO', 'TextIO', 'BinaryIO'] - IO = IO - TextIO = TextIO - BinaryIO = BinaryIO - -io.__name__ = __name__ + '.io' -sys.modules[io.__name__] = io - - -Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), - lambda p: p.pattern) -Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), - lambda m: m.re.pattern) - - -class re: - """Wrapper namespace for re type aliases.""" - - __all__ = ['Pattern', 'Match'] - Pattern = Pattern - Match = Match - -re.__name__ = __name__ + '.re' -sys.modules[re.__name__] = re diff --git a/typing_extensions/test_data/python-3.5.1/_collections_abc.py b/typing_extensions/test_data/python-3.5.1/_collections_abc.py deleted file mode 100644 index f89bb6f0..00000000 --- a/typing_extensions/test_data/python-3.5.1/_collections_abc.py +++ /dev/null @@ -1,939 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. - -Unit tests are in test_collections. -""" - -from abc import ABCMeta, abstractmethod -import sys - -__all__ = ["Awaitable", "Coroutine", "AsyncIterable", "AsyncIterator", - "Hashable", "Iterable", "Iterator", "Generator", - "Sized", "Container", "Callable", - "Set", "MutableSet", - "Mapping", "MutableMapping", - "MappingView", "KeysView", "ItemsView", "ValuesView", - "Sequence", "MutableSequence", - "ByteString", - ] - -# This module has been renamed from collections.abc to _collections_abc to -# speed up interpreter startup. Some of the types such as MutableMapping are -# required early but collections module imports a lot of other modules. -# See issue #19218 -__name__ = "collections.abc" - -# Private list of types that we want to register with the various ABCs -# so that they will pass tests like: -# it = iter(somebytearray) -# assert isinstance(it, Iterable) -# Note: in other implementations, these types many not be distinct -# and they make have their own implementation specific types that -# are not included on this list. -bytes_iterator = type(iter(b'')) -bytearray_iterator = type(iter(bytearray())) -#callable_iterator = ??? -dict_keyiterator = type(iter({}.keys())) -dict_valueiterator = type(iter({}.values())) -dict_itemiterator = type(iter({}.items())) -list_iterator = type(iter([])) -list_reverseiterator = type(iter(reversed([]))) -range_iterator = type(iter(range(0))) -set_iterator = type(iter(set())) -str_iterator = type(iter("")) -tuple_iterator = type(iter(())) -zip_iterator = type(iter(zip())) -## views ## -dict_keys = type({}.keys()) -dict_values = type({}.values()) -dict_items = type({}.items()) -## misc ## -mappingproxy = type(type.__dict__) -generator = type((lambda: (yield))()) -## coroutine ## -async def _coro(): pass -_coro = _coro() -coroutine = type(_coro) -_coro.close() # Prevent ResourceWarning -del _coro - - -### ONE-TRICK PONIES ### - -class Hashable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __hash__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Hashable: - for B in C.__mro__: - if "__hash__" in B.__dict__: - if B.__dict__["__hash__"]: - return True - break - return NotImplemented - - -class Awaitable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __await__(self): - yield - - @classmethod - def __subclasshook__(cls, C): - if cls is Awaitable: - for B in C.__mro__: - if "__await__" in B.__dict__: - if B.__dict__["__await__"]: - return True - break - return NotImplemented - - -class Coroutine(Awaitable): - - __slots__ = () - - @abstractmethod - def send(self, value): - """Send a value into the coroutine. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the coroutine. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside coroutine. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("coroutine ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Coroutine: - mro = C.__mro__ - for method in ('__await__', 'send', 'throw', 'close'): - for base in mro: - if method in base.__dict__: - break - else: - return NotImplemented - return True - return NotImplemented - - -Coroutine.register(coroutine) - - -class AsyncIterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - async def __aiter__(self): - return AsyncIterator() - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterable: - if any("__aiter__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class AsyncIterator(AsyncIterable): - - __slots__ = () - - @abstractmethod - async def __anext__(self): - """Return the next item or raise StopAsyncIteration when exhausted.""" - raise StopAsyncIteration - - async def __aiter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterator: - if (any("__anext__" in B.__dict__ for B in C.__mro__) and - any("__aiter__" in B.__dict__ for B in C.__mro__)): - return True - return NotImplemented - - -class Iterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __iter__(self): - while False: - yield None - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterable: - if any("__iter__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Iterator(Iterable): - - __slots__ = () - - @abstractmethod - def __next__(self): - 'Return the next item from the iterator. When exhausted, raise StopIteration' - raise StopIteration - - def __iter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterator: - if (any("__next__" in B.__dict__ for B in C.__mro__) and - any("__iter__" in B.__dict__ for B in C.__mro__)): - return True - return NotImplemented - -Iterator.register(bytes_iterator) -Iterator.register(bytearray_iterator) -#Iterator.register(callable_iterator) -Iterator.register(dict_keyiterator) -Iterator.register(dict_valueiterator) -Iterator.register(dict_itemiterator) -Iterator.register(list_iterator) -Iterator.register(list_reverseiterator) -Iterator.register(range_iterator) -Iterator.register(set_iterator) -Iterator.register(str_iterator) -Iterator.register(tuple_iterator) -Iterator.register(zip_iterator) - - -class Generator(Iterator): - - __slots__ = () - - def __next__(self): - """Return the next item from the generator. - When exhausted, raise StopIteration. - """ - return self.send(None) - - @abstractmethod - def send(self, value): - """Send a value into the generator. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the generator. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside generator. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("generator ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Generator: - mro = C.__mro__ - for method in ('__iter__', '__next__', 'send', 'throw', 'close'): - for base in mro: - if method in base.__dict__: - break - else: - return NotImplemented - return True - return NotImplemented - - -Generator.register(generator) - - -class Sized(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __len__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Sized: - if any("__len__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Container(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __contains__(self, x): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Container: - if any("__contains__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Callable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __call__(self, *args, **kwds): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Callable: - if any("__call__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -### SETS ### - - -class Set(Sized, Iterable, Container): - - """A set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__ and __len__. - - To override the comparisons (presumably for speed, as the - semantics are fixed), redefine __le__ and __ge__, - then the other operations will automatically follow suit. - """ - - __slots__ = () - - def __le__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) > len(other): - return False - for elem in self: - if elem not in other: - return False - return True - - def __lt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) < len(other) and self.__le__(other) - - def __gt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) > len(other) and self.__ge__(other) - - def __ge__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) < len(other): - return False - for elem in other: - if elem not in self: - return False - return True - - def __eq__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) == len(other) and self.__le__(other) - - @classmethod - def _from_iterable(cls, it): - '''Construct an instance of the class from any iterable input. - - Must override this method if the class constructor signature - does not accept an iterable for an input. - ''' - return cls(it) - - def __and__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - return self._from_iterable(value for value in other if value in self) - - __rand__ = __and__ - - def isdisjoint(self, other): - 'Return True if two sets have a null intersection.' - for value in other: - if value in self: - return False - return True - - def __or__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - chain = (e for s in (self, other) for e in s) - return self._from_iterable(chain) - - __ror__ = __or__ - - def __sub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in self - if value not in other) - - def __rsub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in other - if value not in self) - - def __xor__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return (self - other) | (other - self) - - __rxor__ = __xor__ - - def _hash(self): - """Compute the hash value of a set. - - Note that we don't define __hash__: not all sets are hashable. - But if you define a hashable set type, its __hash__ should - call this function. - - This must be compatible __eq__. - - All sets ought to compare equal if they contain the same - elements, regardless of how they are implemented, and - regardless of the order of the elements; so there's not much - freedom for __eq__ or __hash__. We match the algorithm used - by the built-in frozenset type. - """ - MAX = sys.maxsize - MASK = 2 * MAX + 1 - n = len(self) - h = 1927868237 * (n + 1) - h &= MASK - for x in self: - hx = hash(x) - h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 - h &= MASK - h = h * 69069 + 907133923 - h &= MASK - if h > MAX: - h -= MASK + 1 - if h == -1: - h = 590923713 - return h - -Set.register(frozenset) - - -class MutableSet(Set): - """A mutable set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__, __len__, - add(), and discard(). - - To override the comparisons (presumably for speed, as the - semantics are fixed), all you have to do is redefine __le__ and - then the other operations will automatically follow suit. - """ - - __slots__ = () - - @abstractmethod - def add(self, value): - """Add an element.""" - raise NotImplementedError - - @abstractmethod - def discard(self, value): - """Remove an element. Do not raise an exception if absent.""" - raise NotImplementedError - - def remove(self, value): - """Remove an element. If not a member, raise a KeyError.""" - if value not in self: - raise KeyError(value) - self.discard(value) - - def pop(self): - """Return the popped value. Raise KeyError if empty.""" - it = iter(self) - try: - value = next(it) - except StopIteration: - raise KeyError - self.discard(value) - return value - - def clear(self): - """This is slow (creates N new iterators!) but effective.""" - try: - while True: - self.pop() - except KeyError: - pass - - def __ior__(self, it): - for value in it: - self.add(value) - return self - - def __iand__(self, it): - for value in (self - it): - self.discard(value) - return self - - def __ixor__(self, it): - if it is self: - self.clear() - else: - if not isinstance(it, Set): - it = self._from_iterable(it) - for value in it: - if value in self: - self.discard(value) - else: - self.add(value) - return self - - def __isub__(self, it): - if it is self: - self.clear() - else: - for value in it: - self.discard(value) - return self - -MutableSet.register(set) - - -### MAPPINGS ### - - -class Mapping(Sized, Iterable, Container): - - __slots__ = () - - """A Mapping is a generic container for associating key/value - pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __iter__, and __len__. - - """ - - @abstractmethod - def __getitem__(self, key): - raise KeyError - - def get(self, key, default=None): - 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' - try: - return self[key] - except KeyError: - return default - - def __contains__(self, key): - try: - self[key] - except KeyError: - return False - else: - return True - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return KeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return ItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return ValuesView(self) - - def __eq__(self, other): - if not isinstance(other, Mapping): - return NotImplemented - return dict(self.items()) == dict(other.items()) - -Mapping.register(mappingproxy) - - -class MappingView(Sized): - - __slots__ = '_mapping', - - def __init__(self, mapping): - self._mapping = mapping - - def __len__(self): - return len(self._mapping) - - def __repr__(self): - return '{0.__class__.__name__}({0._mapping!r})'.format(self) - - -class KeysView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, key): - return key in self._mapping - - def __iter__(self): - yield from self._mapping - -KeysView.register(dict_keys) - - -class ItemsView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, item): - key, value = item - try: - v = self._mapping[key] - except KeyError: - return False - else: - return v == value - - def __iter__(self): - for key in self._mapping: - yield (key, self._mapping[key]) - -ItemsView.register(dict_items) - - -class ValuesView(MappingView): - - __slots__ = () - - def __contains__(self, value): - for key in self._mapping: - if value == self._mapping[key]: - return True - return False - - def __iter__(self): - for key in self._mapping: - yield self._mapping[key] - -ValuesView.register(dict_values) - - -class MutableMapping(Mapping): - - __slots__ = () - - """A MutableMapping is a generic container for associating - key/value pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __setitem__, __delitem__, - __iter__, and __len__. - - """ - - @abstractmethod - def __setitem__(self, key, value): - raise KeyError - - @abstractmethod - def __delitem__(self, key): - raise KeyError - - __marker = object() - - def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - ''' - try: - value = self[key] - except KeyError: - if default is self.__marker: - raise - return default - else: - del self[key] - return value - - def popitem(self): - '''D.popitem() -> (k, v), remove and return some (key, value) pair - as a 2-tuple; but raise KeyError if D is empty. - ''' - try: - key = next(iter(self)) - except StopIteration: - raise KeyError - value = self[key] - del self[key] - return key, value - - def clear(self): - 'D.clear() -> None. Remove all items from D.' - try: - while True: - self.popitem() - except KeyError: - pass - - def update(*args, **kwds): - ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. - If E present and has a .keys() method, does: for k in E: D[k] = E[k] - If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v - In either case, this is followed by: for k, v in F.items(): D[k] = v - ''' - if not args: - raise TypeError("descriptor 'update' of 'MutableMapping' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('update expected at most 1 arguments, got %d' % - len(args)) - if args: - other = args[0] - if isinstance(other, Mapping): - for key in other: - self[key] = other[key] - elif hasattr(other, "keys"): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - def setdefault(self, key, default=None): - 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' - try: - return self[key] - except KeyError: - self[key] = default - return default - -MutableMapping.register(dict) - - -### SEQUENCES ### - - -class Sequence(Sized, Iterable, Container): - - """All the operations on a read-only sequence. - - Concrete subclasses must override __new__ or __init__, - __getitem__, and __len__. - """ - - __slots__ = () - - @abstractmethod - def __getitem__(self, index): - raise IndexError - - def __iter__(self): - i = 0 - try: - while True: - v = self[i] - yield v - i += 1 - except IndexError: - return - - def __contains__(self, value): - for v in self: - if v == value: - return True - return False - - def __reversed__(self): - for i in reversed(range(len(self))): - yield self[i] - - def index(self, value, start=0, stop=None): - '''S.index(value, [start, [stop]]) -> integer -- return first index of value. - Raises ValueError if the value is not present. - ''' - if start is not None and start < 0: - start = max(len(self) + start, 0) - if stop is not None and stop < 0: - stop += len(self) - - i = start - while stop is None or i < stop: - try: - if self[i] == value: - return i - except IndexError: - break - i += 1 - raise ValueError - - def count(self, value): - 'S.count(value) -> integer -- return number of occurrences of value' - return sum(1 for v in self if v == value) - -Sequence.register(tuple) -Sequence.register(str) -Sequence.register(range) -Sequence.register(memoryview) - - -class ByteString(Sequence): - - """This unifies bytes and bytearray. - - XXX Should add all their methods. - """ - - __slots__ = () - -ByteString.register(bytes) -ByteString.register(bytearray) - - -class MutableSequence(Sequence): - - __slots__ = () - - """All the operations on a read-write sequence. - - Concrete subclasses must provide __new__ or __init__, - __getitem__, __setitem__, __delitem__, __len__, and insert(). - - """ - - @abstractmethod - def __setitem__(self, index, value): - raise IndexError - - @abstractmethod - def __delitem__(self, index): - raise IndexError - - @abstractmethod - def insert(self, index, value): - 'S.insert(index, value) -- insert value before index' - raise IndexError - - def append(self, value): - 'S.append(value) -- append value to the end of the sequence' - self.insert(len(self), value) - - def clear(self): - 'S.clear() -> None -- remove all items from S' - try: - while True: - self.pop() - except IndexError: - pass - - def reverse(self): - 'S.reverse() -- reverse *IN PLACE*' - n = len(self) - for i in range(n//2): - self[i], self[n-i-1] = self[n-i-1], self[i] - - def extend(self, values): - 'S.extend(iterable) -- extend sequence by appending elements from the iterable' - for v in values: - self.append(v) - - def pop(self, index=-1): - '''S.pop([index]) -> item -- remove and return item at index (default last). - Raise IndexError if list is empty or index is out of range. - ''' - v = self[index] - del self[index] - return v - - def remove(self, value): - '''S.remove(value) -- remove first occurrence of value. - Raise ValueError if the value is not present. - ''' - del self[self.index(value)] - - def __iadd__(self, values): - self.extend(values) - return self - -MutableSequence.register(list) -MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.5.1/abc.py b/typing_extensions/test_data/python-3.5.1/abc.py deleted file mode 100644 index 1cbf96a6..00000000 --- a/typing_extensions/test_data/python-3.5.1/abc.py +++ /dev/null @@ -1,248 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) according to PEP 3119.""" - -from _weakrefset import WeakSet - - -def abstractmethod(funcobj): - """A decorator indicating abstract methods. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract methods are overridden. - The abstract methods can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractmethod - def my_abstract_method(self, ...): - ... - """ - funcobj.__isabstractmethod__ = True - return funcobj - - -class abstractclassmethod(classmethod): - """ - A decorator indicating abstract classmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractclassmethod - def my_abstract_classmethod(cls, ...): - ... - - 'abstractclassmethod' is deprecated. Use 'classmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractstaticmethod(staticmethod): - """ - A decorator indicating abstract staticmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractstaticmethod - def my_abstract_staticmethod(...): - ... - - 'abstractstaticmethod' is deprecated. Use 'staticmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractproperty(property): - """ - A decorator indicating abstract properties. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract properties are overridden. - The abstract properties can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractproperty - def my_abstract_property(self): - ... - - This defines a read-only property; you can also define a read-write - abstract property using the 'long' form of property declaration: - - class C(metaclass=ABCMeta): - def getx(self): ... - def setx(self, value): ... - x = abstractproperty(getx, setx) - - 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' - instead. - """ - - __isabstractmethod__ = True - - -class ABCMeta(type): - - """Metaclass for defining Abstract Base Classes (ABCs). - - Use this metaclass to create an ABC. An ABC can be subclassed - directly, and then acts as a mix-in class. You can also register - unrelated concrete classes (even built-in classes) and unrelated - ABCs as 'virtual subclasses' -- these and their descendants will - be considered subclasses of the registering ABC by the built-in - issubclass() function, but the registering ABC won't show up in - their MRO (Method Resolution Order) nor will method - implementations defined by the registering ABC be callable (not - even via super()). - - """ - - # A global counter that is incremented each time a class is - # registered as a virtual subclass of anything. It forces the - # negative cache to be cleared before its next use. - # Note: this counter is private. Use `abc.get_cache_token()` for - # external code. - _abc_invalidation_counter = 0 - - def __new__(mcls, name, bases, namespace): - cls = super().__new__(mcls, name, bases, namespace) - # Compute set of abstract method names - abstracts = {name - for name, value in namespace.items() - if getattr(value, "__isabstractmethod__", False)} - for base in bases: - for name in getattr(base, "__abstractmethods__", set()): - value = getattr(cls, name, None) - if getattr(value, "__isabstractmethod__", False): - abstracts.add(name) - cls.__abstractmethods__ = frozenset(abstracts) - # Set up inheritance registry - cls._abc_registry = WeakSet() - cls._abc_cache = WeakSet() - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - return cls - - def register(cls, subclass): - """Register a virtual subclass of an ABC. - - Returns the subclass, to allow usage as a class decorator. - """ - if not isinstance(subclass, type): - raise TypeError("Can only register classes") - if issubclass(subclass, cls): - return subclass # Already a subclass - # Subtle: test for cycles *after* testing for "already a subclass"; - # this means we allow X.register(X) and interpret it as a no-op. - if issubclass(cls, subclass): - # This would create a cycle, which is bad for the algorithm below - raise RuntimeError("Refusing to create an inheritance cycle") - cls._abc_registry.add(subclass) - ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache - return subclass - - def _dump_registry(cls, file=None): - """Debug helper to print the ABC registry.""" - print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) - print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) - for name in sorted(cls.__dict__.keys()): - if name.startswith("_abc_"): - value = getattr(cls, name) - print("%s: %r" % (name, value), file=file) - - def __instancecheck__(cls, instance): - """Override for isinstance(instance, cls).""" - # Inline the cache checking - subclass = instance.__class__ - if subclass in cls._abc_cache: - return True - subtype = type(instance) - if subtype is subclass: - if (cls._abc_negative_cache_version == - ABCMeta._abc_invalidation_counter and - subclass in cls._abc_negative_cache): - return False - # Fall back to the subclass check. - return cls.__subclasscheck__(subclass) - return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) - - def __subclasscheck__(cls, subclass): - """Override for issubclass(subclass, cls).""" - # Check cache - if subclass in cls._abc_cache: - return True - # Check negative cache; may have to invalidate - if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: - # Invalidate the negative cache - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - elif subclass in cls._abc_negative_cache: - return False - # Check the subclass hook - ok = cls.__subclasshook__(subclass) - if ok is not NotImplemented: - assert isinstance(ok, bool) - if ok: - cls._abc_cache.add(subclass) - else: - cls._abc_negative_cache.add(subclass) - return ok - # Check if it's a direct subclass - if cls in getattr(subclass, '__mro__', ()): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a registered class (recursive) - for rcls in cls._abc_registry: - if issubclass(subclass, rcls): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a subclass (recursive) - for scls in cls.__subclasses__(): - if issubclass(subclass, scls): - cls._abc_cache.add(subclass) - return True - # No dice; update negative cache - cls._abc_negative_cache.add(subclass) - return False - - -class ABC(metaclass=ABCMeta): - """Helper class that provides a standard way to create an ABC using - inheritance. - """ - pass - - -def get_cache_token(): - """Returns the current ABC cache token. - - The token is an opaque object (supporting equality testing) identifying the - current version of the ABC cache for virtual subclasses. The token changes - with every call to ``register()`` on any ABC. - """ - return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.5.1/collections/__init__.py b/typing_extensions/test_data/python-3.5.1/collections/__init__.py deleted file mode 100644 index e8312a90..00000000 --- a/typing_extensions/test_data/python-3.5.1/collections/__init__.py +++ /dev/null @@ -1,1222 +0,0 @@ -__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', - 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] - -# For backwards compatibility, continue to make the collections ABCs -# available through the collections module. -from _collections_abc import * -import _collections_abc -__all__ += _collections_abc.__all__ - -from operator import itemgetter as _itemgetter, eq as _eq -from keyword import iskeyword as _iskeyword -import sys as _sys -import heapq as _heapq -from _weakref import proxy as _proxy -from itertools import repeat as _repeat, chain as _chain, starmap as _starmap -from reprlib import recursive_repr as _recursive_repr - -try: - from _collections import deque -except ImportError: - pass -else: - MutableSequence.register(deque) - -try: - from _collections import defaultdict -except ImportError: - pass - - -################################################################################ -### OrderedDict -################################################################################ - -class _OrderedDictKeysView(KeysView): - - def __reversed__(self): - yield from reversed(self._mapping) - -class _OrderedDictItemsView(ItemsView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield (key, self._mapping[key]) - -class _OrderedDictValuesView(ValuesView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield self._mapping[key] - -class _Link(object): - __slots__ = 'prev', 'next', 'key', '__weakref__' - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as regular dictionaries. - - # The internal self.__map dict maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # The sentinel is in self.__hardroot with a weakref proxy in self.__root. - # The prev links are weakref proxies (to prevent circular references). - # Individual links are kept alive by the hard reference in self.__map. - # Those hard references disappear when a key is deleted from an OrderedDict. - - def __init__(*args, **kwds): - '''Initialize an ordered dictionary. The signature is the same as - regular dictionaries, but keyword arguments are not recommended because - their insertion order is arbitrary. - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'OrderedDict' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__hardroot = _Link() - self.__root = root = _proxy(self.__hardroot) - root.prev = root.next = root - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, - dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link at the end of the linked list, - # and the inherited dictionary is updated with the new key/value pair. - if key not in self: - self.__map[key] = link = Link() - root = self.__root - last = root.prev - link.prev, link.next, link.key = last, root, key - last.next = link - root.prev = proxy(link) - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which gets - # removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link = self.__map.pop(key) - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - link.prev = None - link.next = None - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - # Traverse the linked list in order. - root = self.__root - curr = root.next - while curr is not root: - yield curr.key - curr = curr.next - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - # Traverse the linked list in reverse order. - root = self.__root - curr = root.prev - while curr is not root: - yield curr.key - curr = curr.prev - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - root = self.__root - root.prev = root.next = root - self.__map.clear() - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root.prev - link_prev = link.prev - link_prev.next = root - root.prev = link_prev - else: - link = root.next - link_next = link.next - root.next = link_next - link_next.prev = root - key = link.key - del self.__map[key] - value = dict.pop(self, key) - return key, value - - def move_to_end(self, key, last=True): - '''Move an existing element to the end (or beginning if last==False). - - Raises KeyError if the element does not exist. - When last=True, acts like a fast version of self[key]=self.pop(key). - - ''' - link = self.__map[key] - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - root = self.__root - if last: - last = root.prev - link.prev = last - link.next = root - last.next = root.prev = link - else: - first = root.next - link.prev = root - link.next = first - root.next = first.prev = link - - def __sizeof__(self): - sizeof = _sys.getsizeof - n = len(self) + 1 # number of links including root - size = sizeof(self.__dict__) # instance dictionary - size += sizeof(self.__map) * 2 # internal dict and inherited dict - size += sizeof(self.__hardroot) * n # link objects - size += sizeof(self.__root) * n # proxy objects - return size - - update = __update = MutableMapping.update - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return _OrderedDictKeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return _OrderedDictItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return _OrderedDictValuesView(self) - - __ne__ = MutableMapping.__ne__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding - value. If key is not found, d is returned if given, otherwise KeyError - is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - @_recursive_repr() - def __repr__(self): - 'od.__repr__() <==> repr(od)' - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, list(self.items())) - - def __reduce__(self): - 'Return state information for pickling' - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - return self.__class__, (), inst_dict or None, None, iter(self.items()) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. - If not specified, the value defaults to None. - - ''' - self = cls() - for key in iterable: - self[key] = value - return self - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return dict.__eq__(self, other) and all(map(_eq, self, other)) - return dict.__eq__(self, other) - - -try: - from _collections import OrderedDict -except ImportError: - # Leave the pure Python version in place. - pass - - -################################################################################ -### namedtuple -################################################################################ - -_class_template = """\ -from builtins import property as _property, tuple as _tuple -from operator import itemgetter as _itemgetter -from collections import OrderedDict - -class {typename}(tuple): - '{typename}({arg_list})' - - __slots__ = () - - _fields = {field_names!r} - - def __new__(_cls, {arg_list}): - 'Create new instance of {typename}({arg_list})' - return _tuple.__new__(_cls, ({arg_list})) - - @classmethod - def _make(cls, iterable, new=tuple.__new__, len=len): - 'Make a new {typename} object from a sequence or iterable' - result = new(cls, iterable) - if len(result) != {num_fields:d}: - raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) - return result - - def _replace(_self, **kwds): - 'Return a new {typename} object replacing specified fields with new values' - result = _self._make(map(kwds.pop, {field_names!r}, _self)) - if kwds: - raise ValueError('Got unexpected field names: %r' % list(kwds)) - return result - - def __repr__(self): - 'Return a nicely formatted representation string' - return self.__class__.__name__ + '({repr_fmt})' % self - - def _asdict(self): - 'Return a new OrderedDict which maps field names to their values.' - return OrderedDict(zip(self._fields, self)) - - def __getnewargs__(self): - 'Return self as a plain tuple. Used by copy and pickle.' - return tuple(self) - -{field_defs} -""" - -_repr_template = '{name}=%r' - -_field_template = '''\ - {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') -''' - -def namedtuple(typename, field_names, verbose=False, rename=False): - """Returns a new subclass of tuple with named fields. - - >>> Point = namedtuple('Point', ['x', 'y']) - >>> Point.__doc__ # docstring for the new class - 'Point(x, y)' - >>> p = Point(11, y=22) # instantiate with positional args or keywords - >>> p[0] + p[1] # indexable like a plain tuple - 33 - >>> x, y = p # unpack like a regular tuple - >>> x, y - (11, 22) - >>> p.x + p.y # fields also accessable by name - 33 - >>> d = p._asdict() # convert to a dictionary - >>> d['x'] - 11 - >>> Point(**d) # convert from a dictionary - Point(x=11, y=22) - >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields - Point(x=100, y=22) - - """ - - # Validate the field names. At the user's option, either generate an error - # message or automatically replace the field name with a valid name. - if isinstance(field_names, str): - field_names = field_names.replace(',', ' ').split() - field_names = list(map(str, field_names)) - typename = str(typename) - if rename: - seen = set() - for index, name in enumerate(field_names): - if (not name.isidentifier() - or _iskeyword(name) - or name.startswith('_') - or name in seen): - field_names[index] = '_%d' % index - seen.add(name) - for name in [typename] + field_names: - if type(name) != str: - raise TypeError('Type names and field names must be strings') - if not name.isidentifier(): - raise ValueError('Type names and field names must be valid ' - 'identifiers: %r' % name) - if _iskeyword(name): - raise ValueError('Type names and field names cannot be a ' - 'keyword: %r' % name) - seen = set() - for name in field_names: - if name.startswith('_') and not rename: - raise ValueError('Field names cannot start with an underscore: ' - '%r' % name) - if name in seen: - raise ValueError('Encountered duplicate field name: %r' % name) - seen.add(name) - - # Fill-in the class template - class_definition = _class_template.format( - typename = typename, - field_names = tuple(field_names), - num_fields = len(field_names), - arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], - repr_fmt = ', '.join(_repr_template.format(name=name) - for name in field_names), - field_defs = '\n'.join(_field_template.format(index=index, name=name) - for index, name in enumerate(field_names)) - ) - - # Execute the template string in a temporary namespace and support - # tracing utilities by setting a value for frame.f_globals['__name__'] - namespace = dict(__name__='namedtuple_%s' % typename) - exec(class_definition, namespace) - result = namespace[typename] - result._source = class_definition - if verbose: - print(result._source) - - # For pickling to work, the __module__ variable needs to be set to the frame - # where the named tuple is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython). - try: - result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - - return result - - -######################################################################## -### Counter -######################################################################## - -def _count_elements(mapping, iterable): - 'Tally elements from the iterable.' - mapping_get = mapping.get - for elem in iterable: - mapping[elem] = mapping_get(elem, 0) + 1 - -try: # Load C helper function if available - from _collections import _count_elements -except ImportError: - pass - -class Counter(dict): - '''Dict subclass for counting hashable items. Sometimes called a bag - or multiset. Elements are stored as dictionary keys and their counts - are stored as dictionary values. - - >>> c = Counter('abcdeabcdabcaba') # count elements from a string - - >>> c.most_common(3) # three most common elements - [('a', 5), ('b', 4), ('c', 3)] - >>> sorted(c) # list all unique elements - ['a', 'b', 'c', 'd', 'e'] - >>> ''.join(sorted(c.elements())) # list elements with repetitions - 'aaaaabbbbcccdde' - >>> sum(c.values()) # total of all counts - 15 - - >>> c['a'] # count of letter 'a' - 5 - >>> for elem in 'shazam': # update counts from an iterable - ... c[elem] += 1 # by adding 1 to each element's count - >>> c['a'] # now there are seven 'a' - 7 - >>> del c['b'] # remove all 'b' - >>> c['b'] # now there are zero 'b' - 0 - - >>> d = Counter('simsalabim') # make another counter - >>> c.update(d) # add in the second counter - >>> c['a'] # now there are nine 'a' - 9 - - >>> c.clear() # empty the counter - >>> c - Counter() - - Note: If a count is set to zero or reduced to zero, it will remain - in the counter until the entry is deleted or the counter is cleared: - - >>> c = Counter('aaabbc') - >>> c['b'] -= 2 # reduce the count of 'b' by two - >>> c.most_common() # 'b' is still in, but its count is zero - [('a', 3), ('c', 1), ('b', 0)] - - ''' - # References: - # http://en.wikipedia.org/wiki/Multiset - # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html - # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm - # http://code.activestate.com/recipes/259174/ - # Knuth, TAOCP Vol. II section 4.6.3 - - def __init__(*args, **kwds): - '''Create a new, empty Counter object. And if given, count elements - from an input iterable. Or, initialize the count from another mapping - of elements to their counts. - - >>> c = Counter() # a new, empty counter - >>> c = Counter('gallahad') # a new counter from an iterable - >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping - >>> c = Counter(a=4, b=2) # a new counter from keyword args - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - super(Counter, self).__init__() - self.update(*args, **kwds) - - def __missing__(self, key): - 'The count of elements not in the Counter is zero.' - # Needed so that self[missing_item] does not raise KeyError - return 0 - - def most_common(self, n=None): - '''List the n most common elements and their counts from the most - common to the least. If n is None, then list all element counts. - - >>> Counter('abcdeabcdabcaba').most_common(3) - [('a', 5), ('b', 4), ('c', 3)] - - ''' - # Emulate Bag.sortedByCount from Smalltalk - if n is None: - return sorted(self.items(), key=_itemgetter(1), reverse=True) - return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) - - def elements(self): - '''Iterator over elements repeating each as many times as its count. - - >>> c = Counter('ABCABC') - >>> sorted(c.elements()) - ['A', 'A', 'B', 'B', 'C', 'C'] - - # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 - >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) - >>> product = 1 - >>> for factor in prime_factors.elements(): # loop over factors - ... product *= factor # and multiply them - >>> product - 1836 - - Note, if an element's count has been set to zero or is a negative - number, elements() will ignore it. - - ''' - # Emulate Bag.do from Smalltalk and Multiset.begin from C++. - return _chain.from_iterable(_starmap(_repeat, self.items())) - - # Override dict methods where necessary - - @classmethod - def fromkeys(cls, iterable, v=None): - # There is no equivalent method for counters because setting v=1 - # means that no element can have a count greater than one. - raise NotImplementedError( - 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') - - def update(*args, **kwds): - '''Like dict.update() but add counts instead of replacing them. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.update('witch') # add elements from another iterable - >>> d = Counter('watch') - >>> c.update(d) # add elements from another counter - >>> c['h'] # four 'h' in which, witch, and watch - 4 - - ''' - # The regular dict.update() operation makes no sense here because the - # replace behavior results in the some of original untouched counts - # being mixed-in with all of the other counts for a mismash that - # doesn't have a straight-forward interpretation in most counting - # contexts. Instead, we implement straight-addition. Both the inputs - # and outputs are allowed to contain zero and negative counts. - - if not args: - raise TypeError("descriptor 'update' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - if isinstance(iterable, Mapping): - if self: - self_get = self.get - for elem, count in iterable.items(): - self[elem] = count + self_get(elem, 0) - else: - super(Counter, self).update(iterable) # fast path when counter is empty - else: - _count_elements(self, iterable) - if kwds: - self.update(kwds) - - def subtract(*args, **kwds): - '''Like dict.update() but subtracts counts instead of replacing them. - Counts can be reduced below zero. Both the inputs and outputs are - allowed to contain zero and negative counts. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.subtract('witch') # subtract elements from another iterable - >>> c.subtract(Counter('watch')) # subtract elements from another counter - >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch - 0 - >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch - -1 - - ''' - if not args: - raise TypeError("descriptor 'subtract' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - self_get = self.get - if isinstance(iterable, Mapping): - for elem, count in iterable.items(): - self[elem] = self_get(elem, 0) - count - else: - for elem in iterable: - self[elem] = self_get(elem, 0) - 1 - if kwds: - self.subtract(kwds) - - def copy(self): - 'Return a shallow copy.' - return self.__class__(self) - - def __reduce__(self): - return self.__class__, (dict(self),) - - def __delitem__(self, elem): - 'Like dict.__delitem__() but does not raise KeyError for missing values.' - if elem in self: - super().__delitem__(elem) - - def __repr__(self): - if not self: - return '%s()' % self.__class__.__name__ - try: - items = ', '.join(map('%r: %r'.__mod__, self.most_common())) - return '%s({%s})' % (self.__class__.__name__, items) - except TypeError: - # handle case where values are not orderable - return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) - - # Multiset-style mathematical operations discussed in: - # Knuth TAOCP Volume II section 4.6.3 exercise 19 - # and at http://en.wikipedia.org/wiki/Multiset - # - # Outputs guaranteed to only include positive counts. - # - # To strip negative and zero counts, add-in an empty counter: - # c += Counter() - - def __add__(self, other): - '''Add counts from two counters. - - >>> Counter('abbb') + Counter('bcc') - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count + other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __sub__(self, other): - ''' Subtract count, but keep only results with positive counts. - - >>> Counter('abbbc') - Counter('bccd') - Counter({'b': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count - other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count < 0: - result[elem] = 0 - count - return result - - def __or__(self, other): - '''Union is the maximum of value in either of the input counters. - - >>> Counter('abbb') | Counter('bcc') - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = other_count if count < other_count else count - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __and__(self, other): - ''' Intersection is the minimum of corresponding counts. - - >>> Counter('abbb') & Counter('bcc') - Counter({'b': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = count if count < other_count else other_count - if newcount > 0: - result[elem] = newcount - return result - - def __pos__(self): - 'Adds an empty counter, effectively stripping negative and zero counts' - result = Counter() - for elem, count in self.items(): - if count > 0: - result[elem] = count - return result - - def __neg__(self): - '''Subtracts from an empty counter. Strips positive and zero counts, - and flips the sign on negative counts. - - ''' - result = Counter() - for elem, count in self.items(): - if count < 0: - result[elem] = 0 - count - return result - - def _keep_positive(self): - '''Internal method to strip elements with a negative or zero count''' - nonpositive = [elem for elem, count in self.items() if not count > 0] - for elem in nonpositive: - del self[elem] - return self - - def __iadd__(self, other): - '''Inplace add from another counter, keeping only positive counts. - - >>> c = Counter('abbb') - >>> c += Counter('bcc') - >>> c - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] += count - return self._keep_positive() - - def __isub__(self, other): - '''Inplace subtract counter, but keep only results with positive counts. - - >>> c = Counter('abbbc') - >>> c -= Counter('bccd') - >>> c - Counter({'b': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] -= count - return self._keep_positive() - - def __ior__(self, other): - '''Inplace union is the maximum of value from either counter. - - >>> c = Counter('abbb') - >>> c |= Counter('bcc') - >>> c - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - for elem, other_count in other.items(): - count = self[elem] - if other_count > count: - self[elem] = other_count - return self._keep_positive() - - def __iand__(self, other): - '''Inplace intersection is the minimum of corresponding counts. - - >>> c = Counter('abbb') - >>> c &= Counter('bcc') - >>> c - Counter({'b': 1}) - - ''' - for elem, count in self.items(): - other_count = other[elem] - if other_count < count: - self[elem] = other_count - return self._keep_positive() - - -######################################################################## -### ChainMap (helper for configparser and string.Template) -######################################################################## - -class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together - to create a single, updateable view. - - The underlying mappings are stored in a list. That list is public and can - accessed or updated using the *maps* attribute. There is no other state. - - Lookups search the underlying mappings successively until a key is found. - In contrast, writes, updates, and deletions only operate on the first - mapping. - - ''' - - def __init__(self, *maps): - '''Initialize a ChainMap by setting *maps* to the given mappings. - If no mappings are provided, a single empty dictionary is used. - - ''' - self.maps = list(maps) or [{}] # always at least one map - - def __missing__(self, key): - raise KeyError(key) - - def __getitem__(self, key): - for mapping in self.maps: - try: - return mapping[key] # can't use 'key in mapping' with defaultdict - except KeyError: - pass - return self.__missing__(key) # support subclasses that define __missing__ - - def get(self, key, default=None): - return self[key] if key in self else default - - def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible - - def __iter__(self): - return iter(set().union(*self.maps)) - - def __contains__(self, key): - return any(key in m for m in self.maps) - - def __bool__(self): - return any(self.maps) - - @_recursive_repr() - def __repr__(self): - return '{0.__class__.__name__}({1})'.format( - self, ', '.join(map(repr, self.maps))) - - @classmethod - def fromkeys(cls, iterable, *args): - 'Create a ChainMap with a single dict created from the iterable.' - return cls(dict.fromkeys(iterable, *args)) - - def copy(self): - 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' - return self.__class__(self.maps[0].copy(), *self.maps[1:]) - - __copy__ = copy - - def new_child(self, m=None): # like Django's Context.push() - '''New ChainMap with a new map followed by all previous maps. - If no map is provided, an empty dict is used. - ''' - if m is None: - m = {} - return self.__class__(m, *self.maps) - - @property - def parents(self): # like Django's Context.pop() - 'New ChainMap from maps[1:].' - return self.__class__(*self.maps[1:]) - - def __setitem__(self, key, value): - self.maps[0][key] = value - - def __delitem__(self, key): - try: - del self.maps[0][key] - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def popitem(self): - 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' - try: - return self.maps[0].popitem() - except KeyError: - raise KeyError('No keys found in the first mapping.') - - def pop(self, key, *args): - 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' - try: - return self.maps[0].pop(key, *args) - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def clear(self): - 'Clear maps[0], leaving maps[1:] intact.' - self.maps[0].clear() - - -################################################################################ -### UserDict -################################################################################ - -class UserDict(MutableMapping): - - # Start by filling-out the abstract methods - def __init__(*args, **kwargs): - if not args: - raise TypeError("descriptor '__init__' of 'UserDict' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - if args: - dict = args[0] - elif 'dict' in kwargs: - dict = kwargs.pop('dict') - import warnings - warnings.warn("Passing 'dict' as keyword argument is deprecated", - PendingDeprecationWarning, stacklevel=2) - else: - dict = None - self.data = {} - if dict is not None: - self.update(dict) - if len(kwargs): - self.update(kwargs) - def __len__(self): return len(self.data) - def __getitem__(self, key): - if key in self.data: - return self.data[key] - if hasattr(self.__class__, "__missing__"): - return self.__class__.__missing__(self, key) - raise KeyError(key) - def __setitem__(self, key, item): self.data[key] = item - def __delitem__(self, key): del self.data[key] - def __iter__(self): - return iter(self.data) - - # Modify __contains__ to work correctly when __missing__ is present - def __contains__(self, key): - return key in self.data - - # Now, add the methods in dicts but not in MutableMapping - def __repr__(self): return repr(self.data) - def copy(self): - if self.__class__ is UserDict: - return UserDict(self.data.copy()) - import copy - data = self.data - try: - self.data = {} - c = copy.copy(self) - finally: - self.data = data - c.update(self) - return c - @classmethod - def fromkeys(cls, iterable, value=None): - d = cls() - for key in iterable: - d[key] = value - return d - - - -################################################################################ -### UserList -################################################################################ - -class UserList(MutableSequence): - """A more or less complete user-defined wrapper around list objects.""" - def __init__(self, initlist=None): - self.data = [] - if initlist is not None: - # XXX should this accept an arbitrary sequence? - if type(initlist) == type(self.data): - self.data[:] = initlist - elif isinstance(initlist, UserList): - self.data[:] = initlist.data[:] - else: - self.data = list(initlist) - def __repr__(self): return repr(self.data) - def __lt__(self, other): return self.data < self.__cast(other) - def __le__(self, other): return self.data <= self.__cast(other) - def __eq__(self, other): return self.data == self.__cast(other) - def __gt__(self, other): return self.data > self.__cast(other) - def __ge__(self, other): return self.data >= self.__cast(other) - def __cast(self, other): - return other.data if isinstance(other, UserList) else other - def __contains__(self, item): return item in self.data - def __len__(self): return len(self.data) - def __getitem__(self, i): return self.data[i] - def __setitem__(self, i, item): self.data[i] = item - def __delitem__(self, i): del self.data[i] - def __add__(self, other): - if isinstance(other, UserList): - return self.__class__(self.data + other.data) - elif isinstance(other, type(self.data)): - return self.__class__(self.data + other) - return self.__class__(self.data + list(other)) - def __radd__(self, other): - if isinstance(other, UserList): - return self.__class__(other.data + self.data) - elif isinstance(other, type(self.data)): - return self.__class__(other + self.data) - return self.__class__(list(other) + self.data) - def __iadd__(self, other): - if isinstance(other, UserList): - self.data += other.data - elif isinstance(other, type(self.data)): - self.data += other - else: - self.data += list(other) - return self - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __imul__(self, n): - self.data *= n - return self - def append(self, item): self.data.append(item) - def insert(self, i, item): self.data.insert(i, item) - def pop(self, i=-1): return self.data.pop(i) - def remove(self, item): self.data.remove(item) - def clear(self): self.data.clear() - def copy(self): return self.__class__(self) - def count(self, item): return self.data.count(item) - def index(self, item, *args): return self.data.index(item, *args) - def reverse(self): self.data.reverse() - def sort(self, *args, **kwds): self.data.sort(*args, **kwds) - def extend(self, other): - if isinstance(other, UserList): - self.data.extend(other.data) - else: - self.data.extend(other) - - - -################################################################################ -### UserString -################################################################################ - -class UserString(Sequence): - def __init__(self, seq): - if isinstance(seq, str): - self.data = seq - elif isinstance(seq, UserString): - self.data = seq.data[:] - else: - self.data = str(seq) - def __str__(self): return str(self.data) - def __repr__(self): return repr(self.data) - def __int__(self): return int(self.data) - def __float__(self): return float(self.data) - def __complex__(self): return complex(self.data) - def __hash__(self): return hash(self.data) - def __getnewargs__(self): - return (self.data[:],) - - def __eq__(self, string): - if isinstance(string, UserString): - return self.data == string.data - return self.data == string - def __lt__(self, string): - if isinstance(string, UserString): - return self.data < string.data - return self.data < string - def __le__(self, string): - if isinstance(string, UserString): - return self.data <= string.data - return self.data <= string - def __gt__(self, string): - if isinstance(string, UserString): - return self.data > string.data - return self.data > string - def __ge__(self, string): - if isinstance(string, UserString): - return self.data >= string.data - return self.data >= string - - def __contains__(self, char): - if isinstance(char, UserString): - char = char.data - return char in self.data - - def __len__(self): return len(self.data) - def __getitem__(self, index): return self.__class__(self.data[index]) - def __add__(self, other): - if isinstance(other, UserString): - return self.__class__(self.data + other.data) - elif isinstance(other, str): - return self.__class__(self.data + other) - return self.__class__(self.data + str(other)) - def __radd__(self, other): - if isinstance(other, str): - return self.__class__(other + self.data) - return self.__class__(str(other) + self.data) - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __mod__(self, args): - return self.__class__(self.data % args) - def __rmod__(self, format): - return self.__class__(format % args) - - # the following methods are defined in alphabetical order: - def capitalize(self): return self.__class__(self.data.capitalize()) - def casefold(self): - return self.__class__(self.data.casefold()) - def center(self, width, *args): - return self.__class__(self.data.center(width, *args)) - def count(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.count(sub, start, end) - def encode(self, encoding=None, errors=None): # XXX improve this? - if encoding: - if errors: - return self.__class__(self.data.encode(encoding, errors)) - return self.__class__(self.data.encode(encoding)) - return self.__class__(self.data.encode()) - def endswith(self, suffix, start=0, end=_sys.maxsize): - return self.data.endswith(suffix, start, end) - def expandtabs(self, tabsize=8): - return self.__class__(self.data.expandtabs(tabsize)) - def find(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.find(sub, start, end) - def format(self, *args, **kwds): - return self.data.format(*args, **kwds) - def format_map(self, mapping): - return self.data.format_map(mapping) - def index(self, sub, start=0, end=_sys.maxsize): - return self.data.index(sub, start, end) - def isalpha(self): return self.data.isalpha() - def isalnum(self): return self.data.isalnum() - def isdecimal(self): return self.data.isdecimal() - def isdigit(self): return self.data.isdigit() - def isidentifier(self): return self.data.isidentifier() - def islower(self): return self.data.islower() - def isnumeric(self): return self.data.isnumeric() - def isprintable(self): return self.data.isprintable() - def isspace(self): return self.data.isspace() - def istitle(self): return self.data.istitle() - def isupper(self): return self.data.isupper() - def join(self, seq): return self.data.join(seq) - def ljust(self, width, *args): - return self.__class__(self.data.ljust(width, *args)) - def lower(self): return self.__class__(self.data.lower()) - def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) - maketrans = str.maketrans - def partition(self, sep): - return self.data.partition(sep) - def replace(self, old, new, maxsplit=-1): - if isinstance(old, UserString): - old = old.data - if isinstance(new, UserString): - new = new.data - return self.__class__(self.data.replace(old, new, maxsplit)) - def rfind(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.rfind(sub, start, end) - def rindex(self, sub, start=0, end=_sys.maxsize): - return self.data.rindex(sub, start, end) - def rjust(self, width, *args): - return self.__class__(self.data.rjust(width, *args)) - def rpartition(self, sep): - return self.data.rpartition(sep) - def rstrip(self, chars=None): - return self.__class__(self.data.rstrip(chars)) - def split(self, sep=None, maxsplit=-1): - return self.data.split(sep, maxsplit) - def rsplit(self, sep=None, maxsplit=-1): - return self.data.rsplit(sep, maxsplit) - def splitlines(self, keepends=False): return self.data.splitlines(keepends) - def startswith(self, prefix, start=0, end=_sys.maxsize): - return self.data.startswith(prefix, start, end) - def strip(self, chars=None): return self.__class__(self.data.strip(chars)) - def swapcase(self): return self.__class__(self.data.swapcase()) - def title(self): return self.__class__(self.data.title()) - def translate(self, *args): - return self.__class__(self.data.translate(*args)) - def upper(self): return self.__class__(self.data.upper()) - def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.5.1/collections/__main__.py b/typing_extensions/test_data/python-3.5.1/collections/__main__.py deleted file mode 100644 index 763e38e0..00000000 --- a/typing_extensions/test_data/python-3.5.1/collections/__main__.py +++ /dev/null @@ -1,38 +0,0 @@ -################################################################################ -### Simple tests -################################################################################ - -# verify that instances can be pickled -from collections import namedtuple -from pickle import loads, dumps -Point = namedtuple('Point', 'x, y', True) -p = Point(x=10, y=20) -assert p == loads(dumps(p)) - -# test and demonstrate ability to override methods -class Point(namedtuple('Point', 'x y')): - __slots__ = () - @property - def hypot(self): - return (self.x ** 2 + self.y ** 2) ** 0.5 - def __str__(self): - return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) - -for p in Point(3, 4), Point(14, 5/7.): - print (p) - -class Point(namedtuple('Point', 'x y')): - 'Point class with optimized _make() and _replace() without error-checking' - __slots__ = () - _make = classmethod(tuple.__new__) - def _replace(self, _map=map, **kwds): - return self._make(_map(kwds.get, ('x', 'y'), self)) - -print(Point(11, 22)._replace(x=100)) - -Point3D = namedtuple('Point3D', Point._fields + ('z',)) -print(Point3D.__doc__) - -import doctest, collections -TestResults = namedtuple('TestResults', 'failed attempted') -print(TestResults(*doctest.testmod(collections))) diff --git a/typing_extensions/test_data/python-3.5.1/collections/abc.py b/typing_extensions/test_data/python-3.5.1/collections/abc.py deleted file mode 100644 index 891600d1..00000000 --- a/typing_extensions/test_data/python-3.5.1/collections/abc.py +++ /dev/null @@ -1,2 +0,0 @@ -from _collections_abc import * -from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.5.1/typing.py b/typing_extensions/test_data/python-3.5.1/typing.py deleted file mode 100644 index 1757f138..00000000 --- a/typing_extensions/test_data/python-3.5.1/typing.py +++ /dev/null @@ -1,1656 +0,0 @@ -# TODO nits: -# Get rid of asserts that are the caller's fault. -# Docstrings (e.g. ABCs). - -import abc -from abc import abstractmethod, abstractproperty -import collections -import functools -import re as stdlib_re # Avoid confusion with the re we export. -import sys -import types -try: - import collections.abc as collections_abc -except ImportError: - import collections as collections_abc # Fallback for PY3.2. - - -# Please keep __all__ alphabetized within each category. -__all__ = [ - # Super-special typing primitives. - 'Any', - 'Callable', - 'Generic', - 'Optional', - 'TypeVar', - 'Union', - 'Tuple', - - # ABCs (from collections.abc). - 'AbstractSet', # collections.abc.Set. - 'ByteString', - 'Container', - 'Hashable', - 'ItemsView', - 'Iterable', - 'Iterator', - 'KeysView', - 'Mapping', - 'MappingView', - 'MutableMapping', - 'MutableSequence', - 'MutableSet', - 'Sequence', - 'Sized', - 'ValuesView', - - # Structural checks, a.k.a. protocols. - 'Reversible', - 'SupportsAbs', - 'SupportsFloat', - 'SupportsInt', - 'SupportsRound', - - # Concrete collection types. - 'Dict', - 'List', - 'Set', - 'NamedTuple', # Not really a type. - 'Generator', - - # One-off things. - 'AnyStr', - 'cast', - 'get_type_hints', - 'no_type_check', - 'no_type_check_decorator', - 'overload', - - # Submodules. - 'io', - 're', -] - - -def _qualname(x): - if sys.version_info[:2] >= (3, 3): - return x.__qualname__ - else: - # Fall back to just name. - return x.__name__ - - -class TypingMeta(type): - """Metaclass for every type defined below. - - This overrides __new__() to require an extra keyword parameter - '_root', which serves as a guard against naive subclassing of the - typing classes. Any legitimate class defined using a metaclass - derived from TypingMeta (including internal subclasses created by - e.g. Union[X, Y]) must pass _root=True. - - This also defines a dummy constructor (all the work is done in - __new__) and a nicer repr(). - """ - - _is_protocol = False - - def __new__(cls, name, bases, namespace, *, _root=False): - if not _root: - raise TypeError("Cannot subclass %s" % - (', '.join(map(_type_repr, bases)) or '()')) - return super().__new__(cls, name, bases, namespace) - - def __init__(self, *args, **kwds): - pass - - def _eval_type(self, globalns, localns): - """Override this in subclasses to interpret forward references. - - For example, Union['C'] is internally stored as - Union[_ForwardRef('C')], which should evaluate to _Union[C], - where C is an object found in globalns or localns (searching - localns first, of course). - """ - return self - - def _has_type_var(self): - return False - - def __repr__(self): - return '%s.%s' % (self.__module__, _qualname(self)) - - -class Final: - """Mix-in class to prevent instantiation.""" - - __slots__ = () - - def __new__(self, *args, **kwds): - raise TypeError("Cannot instantiate %r" % self.__class__) - - -class _ForwardRef(TypingMeta): - """Wrapper to hold a forward reference.""" - - def __new__(cls, arg): - if not isinstance(arg, str): - raise TypeError('ForwardRef must be a string -- got %r' % (arg,)) - try: - code = compile(arg, '', 'eval') - except SyntaxError: - raise SyntaxError('ForwardRef must be an expression -- got %r' % - (arg,)) - self = super().__new__(cls, arg, (), {}, _root=True) - self.__forward_arg__ = arg - self.__forward_code__ = code - self.__forward_evaluated__ = False - self.__forward_value__ = None - typing_globals = globals() - frame = sys._getframe(1) - while frame is not None and frame.f_globals is typing_globals: - frame = frame.f_back - assert frame is not None - self.__forward_frame__ = frame - return self - - def _eval_type(self, globalns, localns): - if not isinstance(localns, dict): - raise TypeError('ForwardRef localns must be a dict -- got %r' % - (localns,)) - if not isinstance(globalns, dict): - raise TypeError('ForwardRef globalns must be a dict -- got %r' % - (globalns,)) - if not self.__forward_evaluated__: - if globalns is None and localns is None: - globalns = localns = {} - elif globalns is None: - globalns = localns - elif localns is None: - localns = globalns - self.__forward_value__ = _type_check( - eval(self.__forward_code__, globalns, localns), - "Forward references must evaluate to types.") - self.__forward_evaluated__ = True - return self.__forward_value__ - - def __instancecheck__(self, obj): - raise TypeError("Forward references cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not self.__forward_evaluated__: - globalns = self.__forward_frame__.f_globals - localns = self.__forward_frame__.f_locals - try: - self._eval_type(globalns, localns) - except NameError: - return False # Too early. - return issubclass(cls, self.__forward_value__) - - def __repr__(self): - return '_ForwardRef(%r)' % (self.__forward_arg__,) - - -class _TypeAlias: - """Internal helper class for defining generic variants of concrete types. - - Note that this is not a type; let's call it a pseudo-type. It can - be used in instance and subclass checks, e.g. isinstance(m, Match) - or issubclass(type(m), Match). However, it cannot be itself the - target of an issubclass() call; e.g. issubclass(Match, C) (for - some arbitrary class C) raises TypeError rather than returning - False. - """ - - __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') - - def __new__(cls, *args, **kwds): - """Constructor. - - This only exists to give a better error message in case - someone tries to subclass a type alias (not a good idea). - """ - if (len(args) == 3 and - isinstance(args[0], str) and - isinstance(args[1], tuple)): - # Close enough. - raise TypeError("A type alias cannot be subclassed") - return object.__new__(cls) - - def __init__(self, name, type_var, impl_type, type_checker): - """Initializer. - - Args: - name: The name, e.g. 'Pattern'. - type_var: The type parameter, e.g. AnyStr, or the - specific type, e.g. str. - impl_type: The implementation type. - type_checker: Function that takes an impl_type instance. - and returns a value that should be a type_var instance. - """ - assert isinstance(name, str), repr(name) - assert isinstance(type_var, type), repr(type_var) - assert isinstance(impl_type, type), repr(impl_type) - assert not isinstance(impl_type, TypingMeta), repr(impl_type) - self.name = name - self.type_var = type_var - self.impl_type = impl_type - self.type_checker = type_checker - - def __repr__(self): - return "%s[%s]" % (self.name, _type_repr(self.type_var)) - - def __getitem__(self, parameter): - assert isinstance(parameter, type), repr(parameter) - if not isinstance(self.type_var, TypeVar): - raise TypeError("%s cannot be further parameterized." % self) - if self.type_var.__constraints__: - if not issubclass(parameter, Union[self.type_var.__constraints__]): - raise TypeError("%s is not a valid substitution for %s." % - (parameter, self.type_var)) - return self.__class__(self.name, parameter, - self.impl_type, self.type_checker) - - def __instancecheck__(self, obj): - raise TypeError("Type aliases cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if isinstance(cls, _TypeAlias): - # Covariance. For now, we compare by name. - return (cls.name == self.name and - issubclass(cls.type_var, self.type_var)) - else: - # Note that this is too lenient, because the - # implementation type doesn't carry information about - # whether it is about bytes or str (for example). - return issubclass(cls, self.impl_type) - - -def _has_type_var(t): - return t is not None and isinstance(t, TypingMeta) and t._has_type_var() - - -def _eval_type(t, globalns, localns): - if isinstance(t, TypingMeta): - return t._eval_type(globalns, localns) - else: - return t - - -def _type_check(arg, msg): - """Check that the argument is a type, and return it. - - As a special case, accept None and return type(None) instead. - Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. - - The msg argument is a human-readable error message, e.g. - - "Union[arg, ...]: arg should be a type." - - We append the repr() of the actual value (truncated to 100 chars). - """ - if arg is None: - return type(None) - if isinstance(arg, str): - arg = _ForwardRef(arg) - if not isinstance(arg, (type, _TypeAlias)): - raise TypeError(msg + " Got %.100r." % (arg,)) - return arg - - -def _type_repr(obj): - """Return the repr() of an object, special-casing types. - - If obj is a type, we return a shorter version than the default - type.__repr__, based on the module and qualified name, which is - typically enough to uniquely identify a type. For everything - else, we fall back on repr(obj). - """ - if isinstance(obj, type) and not isinstance(obj, TypingMeta): - if obj.__module__ == 'builtins': - return _qualname(obj) - else: - return '%s.%s' % (obj.__module__, _qualname(obj)) - else: - return repr(obj) - - -class AnyMeta(TypingMeta): - """Metaclass for Any.""" - - def __new__(cls, name, bases, namespace, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - return self - - def __instancecheck__(self, obj): - raise TypeError("Any cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not isinstance(cls, type): - return super().__subclasscheck__(cls) # To TypeError. - return True - - -class Any(Final, metaclass=AnyMeta, _root=True): - """Special type indicating an unconstrained type. - - - Any object is an instance of Any. - - Any class is a subclass of Any. - - As a special case, Any and object are subclasses of each other. - """ - - __slots__ = () - - -class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True): - """Type variable. - - Usage:: - - T = TypeVar('T') # Can be anything - A = TypeVar('A', str, bytes) # Must be str or bytes - - Type variables exist primarily for the benefit of static type - checkers. They serve as the parameters for generic types as well - as for generic function definitions. See class Generic for more - information on generic types. Generic functions work as follows: - - def repeat(x: T, n: int) -> Sequence[T]: - '''Return a list containing n references to x.''' - return [x]*n - - def longest(x: A, y: A) -> A: - '''Return the longest of two strings.''' - return x if len(x) >= len(y) else y - - The latter example's signature is essentially the overloading - of (str, str) -> str and (bytes, bytes) -> bytes. Also note - that if the arguments are instances of some subclass of str, - the return type is still plain str. - - At runtime, isinstance(x, T) will raise TypeError. However, - issubclass(C, T) is true for any class C, and issubclass(str, A) - and issubclass(bytes, A) are true, and issubclass(int, A) is - false. - - Type variables may be marked covariant or contravariant by passing - covariant=True or contravariant=True. See PEP 484 for more - details. By default type variables are invariant. - - Type variables can be introspected. e.g.: - - T.__name__ == 'T' - T.__constraints__ == () - T.__covariant__ == False - T.__contravariant__ = False - A.__constraints__ == (str, bytes) - """ - - def __new__(cls, name, *constraints, bound=None, - covariant=False, contravariant=False): - self = super().__new__(cls, name, (Final,), {}, _root=True) - if covariant and contravariant: - raise ValueError("Bivariant type variables are not supported.") - self.__covariant__ = bool(covariant) - self.__contravariant__ = bool(contravariant) - if constraints and bound is not None: - raise TypeError("Constraints cannot be combined with bound=...") - if constraints and len(constraints) == 1: - raise TypeError("A single constraint is not allowed") - msg = "TypeVar(name, constraint, ...): constraints must be types." - self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) - if bound: - self.__bound__ = _type_check(bound, "Bound must be a type.") - else: - self.__bound__ = None - return self - - def _has_type_var(self): - return True - - def __repr__(self): - if self.__covariant__: - prefix = '+' - elif self.__contravariant__: - prefix = '-' - else: - prefix = '~' - return prefix + self.__name__ - - def __instancecheck__(self, instance): - raise TypeError("Type variables cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - # TODO: Make this raise TypeError too? - if cls is self: - return True - if cls is Any: - return True - if self.__bound__ is not None: - return issubclass(cls, self.__bound__) - if self.__constraints__: - return any(issubclass(cls, c) for c in self.__constraints__) - return True - - -# Some unconstrained type variables. These are used by the container types. -T = TypeVar('T') # Any type. -KT = TypeVar('KT') # Key type. -VT = TypeVar('VT') # Value type. -T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. -V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. -VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. -T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. - -# A useful type variable with constraints. This represents string types. -# TODO: What about bytearray, memoryview? -AnyStr = TypeVar('AnyStr', bytes, str) - - -class UnionMeta(TypingMeta): - """Metaclass for Union.""" - - def __new__(cls, name, bases, namespace, parameters=None, _root=False): - if parameters is None: - return super().__new__(cls, name, bases, namespace, _root=_root) - if not isinstance(parameters, tuple): - raise TypeError("Expected parameters=") - # Flatten out Union[Union[...], ...] and type-check non-Union args. - params = [] - msg = "Union[arg, ...]: each arg must be a type." - for p in parameters: - if isinstance(p, UnionMeta): - params.extend(p.__union_params__) - else: - params.append(_type_check(p, msg)) - # Weed out strict duplicates, preserving the first of each occurrence. - all_params = set(params) - if len(all_params) < len(params): - new_params = [] - for t in params: - if t in all_params: - new_params.append(t) - all_params.remove(t) - params = new_params - assert not all_params, all_params - # Weed out subclasses. - # E.g. Union[int, Employee, Manager] == Union[int, Employee]. - # If Any or object is present it will be the sole survivor. - # If both Any and object are present, Any wins. - # Never discard type variables, except against Any. - # (In particular, Union[str, AnyStr] != AnyStr.) - all_params = set(params) - for t1 in params: - if t1 is Any: - return Any - if isinstance(t1, TypeVar): - continue - if isinstance(t1, _TypeAlias): - # _TypeAlias is not a real class. - continue - if any(issubclass(t1, t2) - for t2 in all_params - {t1} if not isinstance(t2, TypeVar)): - all_params.remove(t1) - # It's not a union if there's only one type left. - if len(all_params) == 1: - return all_params.pop() - # Create a new class with these params. - self = super().__new__(cls, name, bases, {}, _root=True) - self.__union_params__ = tuple(t for t in params if t in all_params) - self.__union_set_params__ = frozenset(self.__union_params__) - return self - - def _eval_type(self, globalns, localns): - p = tuple(_eval_type(t, globalns, localns) - for t in self.__union_params__) - if p == self.__union_params__: - return self - else: - return self.__class__(self.__name__, self.__bases__, {}, - p, _root=True) - - def _has_type_var(self): - if self.__union_params__: - for t in self.__union_params__: - if _has_type_var(t): - return True - return False - - def __repr__(self): - r = super().__repr__() - if self.__union_params__: - r += '[%s]' % (', '.join(_type_repr(t) - for t in self.__union_params__)) - return r - - def __getitem__(self, parameters): - if self.__union_params__ is not None: - raise TypeError( - "Cannot subscript an existing Union. Use Union[u, t] instead.") - if parameters == (): - raise TypeError("Cannot take a Union of no types.") - if not isinstance(parameters, tuple): - parameters = (parameters,) - return self.__class__(self.__name__, self.__bases__, - dict(self.__dict__), parameters, _root=True) - - def __eq__(self, other): - if not isinstance(other, UnionMeta): - return NotImplemented - return self.__union_set_params__ == other.__union_set_params__ - - def __hash__(self): - return hash(self.__union_set_params__) - - def __instancecheck__(self, obj): - raise TypeError("Unions cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if self.__union_params__ is None: - return isinstance(cls, UnionMeta) - elif isinstance(cls, UnionMeta): - if cls.__union_params__ is None: - return False - return all(issubclass(c, self) for c in (cls.__union_params__)) - elif isinstance(cls, TypeVar): - if cls in self.__union_params__: - return True - if cls.__constraints__: - return issubclass(Union[cls.__constraints__], self) - return False - else: - return any(issubclass(cls, t) for t in self.__union_params__) - - -class Union(Final, metaclass=UnionMeta, _root=True): - """Union type; Union[X, Y] means either X or Y. - - To define a union, use e.g. Union[int, str]. Details: - - - The arguments must be types and there must be at least one. - - - None as an argument is a special case and is replaced by - type(None). - - - Unions of unions are flattened, e.g.:: - - Union[Union[int, str], float] == Union[int, str, float] - - - Unions of a single argument vanish, e.g.:: - - Union[int] == int # The constructor actually returns int - - - Redundant arguments are skipped, e.g.:: - - Union[int, str, int] == Union[int, str] - - - When comparing unions, the argument order is ignored, e.g.:: - - Union[int, str] == Union[str, int] - - - When two arguments have a subclass relationship, the least - derived argument is kept, e.g.:: - - class Employee: pass - class Manager(Employee): pass - Union[int, Employee, Manager] == Union[int, Employee] - Union[Manager, int, Employee] == Union[int, Employee] - Union[Employee, Manager] == Employee - - - Corollary: if Any is present it is the sole survivor, e.g.:: - - Union[int, Any] == Any - - - Similar for object:: - - Union[int, object] == object - - - To cut a tie: Union[object, Any] == Union[Any, object] == Any. - - - You cannot subclass or instantiate a union. - - - You cannot write Union[X][Y] (what would it mean?). - - - You can use Optional[X] as a shorthand for Union[X, None]. - """ - - # Unsubscripted Union type has params set to None. - __union_params__ = None - __union_set_params__ = None - - -class OptionalMeta(TypingMeta): - """Metaclass for Optional.""" - - def __new__(cls, name, bases, namespace, _root=False): - return super().__new__(cls, name, bases, namespace, _root=_root) - - def __getitem__(self, arg): - arg = _type_check(arg, "Optional[t] requires a single type.") - return Union[arg, type(None)] - - -class Optional(Final, metaclass=OptionalMeta, _root=True): - """Optional type. - - Optional[X] is equivalent to Union[X, type(None)]. - """ - - __slots__ = () - - -class TupleMeta(TypingMeta): - """Metaclass for Tuple.""" - - def __new__(cls, name, bases, namespace, parameters=None, - use_ellipsis=False, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - self.__tuple_params__ = parameters - self.__tuple_use_ellipsis__ = use_ellipsis - return self - - def _has_type_var(self): - if self.__tuple_params__: - for t in self.__tuple_params__: - if _has_type_var(t): - return True - return False - - def _eval_type(self, globalns, localns): - tp = self.__tuple_params__ - if tp is None: - return self - p = tuple(_eval_type(t, globalns, localns) for t in tp) - if p == self.__tuple_params__: - return self - else: - return self.__class__(self.__name__, self.__bases__, {}, - p, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__tuple_params__ is not None: - params = [_type_repr(p) for p in self.__tuple_params__] - if self.__tuple_use_ellipsis__: - params.append('...') - r += '[%s]' % ( - ', '.join(params)) - return r - - def __getitem__(self, parameters): - if self.__tuple_params__ is not None: - raise TypeError("Cannot re-parameterize %r" % (self,)) - if not isinstance(parameters, tuple): - parameters = (parameters,) - if len(parameters) == 2 and parameters[1] == Ellipsis: - parameters = parameters[:1] - use_ellipsis = True - msg = "Tuple[t, ...]: t must be a type." - else: - use_ellipsis = False - msg = "Tuple[t0, t1, ...]: each t must be a type." - parameters = tuple(_type_check(p, msg) for p in parameters) - return self.__class__(self.__name__, self.__bases__, - dict(self.__dict__), parameters, - use_ellipsis=use_ellipsis, _root=True) - - def __eq__(self, other): - if not isinstance(other, TupleMeta): - return NotImplemented - return self.__tuple_params__ == other.__tuple_params__ - - def __hash__(self): - return hash(self.__tuple_params__) - - def __instancecheck__(self, obj): - raise TypeError("Tuples cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if not isinstance(cls, type): - return super().__subclasscheck__(cls) # To TypeError. - if issubclass(cls, tuple): - return True # Special case. - if not isinstance(cls, TupleMeta): - return super().__subclasscheck__(cls) # False. - if self.__tuple_params__ is None: - return True - if cls.__tuple_params__ is None: - return False # ??? - if cls.__tuple_use_ellipsis__ != self.__tuple_use_ellipsis__: - return False - # Covariance. - return (len(self.__tuple_params__) == len(cls.__tuple_params__) and - all(issubclass(x, p) - for x, p in zip(cls.__tuple_params__, - self.__tuple_params__))) - - -class Tuple(Final, metaclass=TupleMeta, _root=True): - """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. - - Example: Tuple[T1, T2] is a tuple of two elements corresponding - to type variables T1 and T2. Tuple[int, float, str] is a tuple - of an int, a float and a string. - - To specify a variable-length tuple of homogeneous type, use Sequence[T]. - """ - - __slots__ = () - - -class CallableMeta(TypingMeta): - """Metaclass for Callable.""" - - def __new__(cls, name, bases, namespace, _root=False, - args=None, result=None): - if args is None and result is None: - pass # Must be 'class Callable'. - else: - if args is not Ellipsis: - if not isinstance(args, list): - raise TypeError("Callable[args, result]: " - "args must be a list." - " Got %.100r." % (args,)) - msg = "Callable[[arg, ...], result]: each arg must be a type." - args = tuple(_type_check(arg, msg) for arg in args) - msg = "Callable[args, result]: result must be a type." - result = _type_check(result, msg) - self = super().__new__(cls, name, bases, namespace, _root=_root) - self.__args__ = args - self.__result__ = result - return self - - def _has_type_var(self): - if self.__args__: - for t in self.__args__: - if _has_type_var(t): - return True - return _has_type_var(self.__result__) - - def _eval_type(self, globalns, localns): - if self.__args__ is None and self.__result__ is None: - return self - if self.__args__ is Ellipsis: - args = self.__args__ - else: - args = [_eval_type(t, globalns, localns) for t in self.__args__] - result = _eval_type(self.__result__, globalns, localns) - if args == self.__args__ and result == self.__result__: - return self - else: - return self.__class__(self.__name__, self.__bases__, {}, - args=args, result=result, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__args__ is not None or self.__result__ is not None: - if self.__args__ is Ellipsis: - args_r = '...' - else: - args_r = '[%s]' % ', '.join(_type_repr(t) - for t in self.__args__) - r += '[%s, %s]' % (args_r, _type_repr(self.__result__)) - return r - - def __getitem__(self, parameters): - if self.__args__ is not None or self.__result__ is not None: - raise TypeError("This Callable type is already parameterized.") - if not isinstance(parameters, tuple) or len(parameters) != 2: - raise TypeError( - "Callable must be used as Callable[[arg, ...], result].") - args, result = parameters - return self.__class__(self.__name__, self.__bases__, - dict(self.__dict__), _root=True, - args=args, result=result) - - def __eq__(self, other): - if not isinstance(other, CallableMeta): - return NotImplemented - return (self.__args__ == other.__args__ and - self.__result__ == other.__result__) - - def __hash__(self): - return hash(self.__args__) ^ hash(self.__result__) - - def __instancecheck__(self, obj): - # For unparametrized Callable we allow this, because - # typing.Callable should be equivalent to - # collections.abc.Callable. - if self.__args__ is None and self.__result__ is None: - return isinstance(obj, collections_abc.Callable) - else: - raise TypeError("Callable[] cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if not isinstance(cls, CallableMeta): - return super().__subclasscheck__(cls) - if self.__args__ is None and self.__result__ is None: - return True - # We're not doing covariance or contravariance -- this is *invariance*. - return self == cls - - -class Callable(Final, metaclass=CallableMeta, _root=True): - """Callable type; Callable[[int], str] is a function of (int) -> str. - - The subscription syntax must always be used with exactly two - values: the argument list and the return type. The argument list - must be a list of types; the return type must be a single type. - - There is no syntax to indicate optional or keyword arguments, - such function types are rarely used as callback types. - """ - - __slots__ = () - - -def _gorg(a): - """Return the farthest origin of a generic class.""" - assert isinstance(a, GenericMeta) - while a.__origin__ is not None: - a = a.__origin__ - return a - - -def _geqv(a, b): - """Return whether two generic classes are equivalent. - - The intention is to consider generic class X and any of its - parameterized forms (X[T], X[int], etc.) as equivalent. - - However, X is not equivalent to a subclass of X. - - The relation is reflexive, symmetric and transitive. - """ - assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) - # Reduce each to its origin. - return _gorg(a) is _gorg(b) - - -class GenericMeta(TypingMeta, abc.ABCMeta): - """Metaclass for generic types.""" - - # TODO: Constrain more how Generic is used; only a few - # standard patterns should be allowed. - - # TODO: Use a more precise rule than matching __name__ to decide - # whether two classes are the same. Also, save the formal - # parameters. (These things are related! A solution lies in - # using origin.) - - __extra__ = None - - def __new__(cls, name, bases, namespace, - parameters=None, origin=None, extra=None): - if parameters is None: - # Extract parameters from direct base classes. Only - # direct bases are considered and only those that are - # themselves generic, and parameterized with type - # variables. Don't use bases like Any, Union, Tuple, - # Callable or type variables. - params = None - for base in bases: - if isinstance(base, TypingMeta): - if not isinstance(base, GenericMeta): - raise TypeError( - "You cannot inherit from magic class %s" % - repr(base)) - if base.__parameters__ is None: - continue # The base is unparameterized. - for bp in base.__parameters__: - if _has_type_var(bp) and not isinstance(bp, TypeVar): - raise TypeError( - "Cannot inherit from a generic class " - "parameterized with " - "non-type-variable %s" % bp) - if params is None: - params = [] - if bp not in params: - params.append(bp) - if params is not None: - parameters = tuple(params) - self = super().__new__(cls, name, bases, namespace, _root=True) - self.__parameters__ = parameters - if extra is not None: - self.__extra__ = extra - # Else __extra__ is inherited, eventually from the - # (meta-)class default above. - self.__origin__ = origin - return self - - def _has_type_var(self): - if self.__parameters__: - for t in self.__parameters__: - if _has_type_var(t): - return True - return False - - def __repr__(self): - r = super().__repr__() - if self.__parameters__ is not None: - r += '[%s]' % ( - ', '.join(_type_repr(p) for p in self.__parameters__)) - return r - - def __eq__(self, other): - if not isinstance(other, GenericMeta): - return NotImplemented - return (_geqv(self, other) and - self.__parameters__ == other.__parameters__) - - def __hash__(self): - return hash((self.__name__, self.__parameters__)) - - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if not params: - raise TypeError("Cannot have empty parameter list") - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self.__parameters__ is None: - for p in params: - if not isinstance(p, TypeVar): - raise TypeError("Initial parameters must be " - "type variables; got %s" % p) - if len(set(params)) != len(params): - raise TypeError( - "All type variables in Generic[...] must be distinct.") - else: - if len(params) != len(self.__parameters__): - raise TypeError("Cannot change parameter count from %d to %d" % - (len(self.__parameters__), len(params))) - for new, old in zip(params, self.__parameters__): - if isinstance(old, TypeVar): - if not old.__constraints__: - # Substituting for an unconstrained TypeVar is OK. - continue - if issubclass(new, Union[old.__constraints__]): - # Specializing a constrained type variable is OK. - continue - if not issubclass(new, old): - raise TypeError( - "Cannot substitute %s for %s in %s" % - (_type_repr(new), _type_repr(old), self)) - - return self.__class__(self.__name__, (self,) + self.__bases__, - dict(self.__dict__), - parameters=params, - origin=self, - extra=self.__extra__) - - def __instancecheck__(self, instance): - # Since we extend ABC.__subclasscheck__ and - # ABC.__instancecheck__ inlines the cache checking done by the - # latter, we must extend __instancecheck__ too. For simplicity - # we just skip the cache check -- instance checks for generic - # classes are supposed to be rare anyways. - return self.__subclasscheck__(instance.__class__) - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if isinstance(cls, GenericMeta): - # For a class C(Generic[T]) where T is co-variant, - # C[X] is a subclass of C[Y] iff X is a subclass of Y. - origin = self.__origin__ - if origin is not None and origin is cls.__origin__: - assert len(self.__parameters__) == len(origin.__parameters__) - assert len(cls.__parameters__) == len(origin.__parameters__) - for p_self, p_cls, p_origin in zip(self.__parameters__, - cls.__parameters__, - origin.__parameters__): - if isinstance(p_origin, TypeVar): - if p_origin.__covariant__: - # Covariant -- p_cls must be a subclass of p_self. - if not issubclass(p_cls, p_self): - break - elif p_origin.__contravariant__: - # Contravariant. I think it's the opposite. :-) - if not issubclass(p_self, p_cls): - break - else: - # Invariant -- p_cls and p_self must equal. - if p_self != p_cls: - break - else: - # If the origin's parameter is not a typevar, - # insist on invariance. - if p_self != p_cls: - break - else: - return True - # If we break out of the loop, the superclass gets a chance. - if super().__subclasscheck__(cls): - return True - if self.__extra__ is None or isinstance(cls, GenericMeta): - return False - return issubclass(cls, self.__extra__) - - -class Generic(metaclass=GenericMeta): - """Abstract base class for generic types. - - A generic type is typically declared by inheriting from an - instantiation of this class with one or more type variables. - For example, a generic mapping type might be defined as:: - - class Mapping(Generic[KT, VT]): - def __getitem__(self, key: KT) -> VT: - ... - # Etc. - - This class can then be used as follows:: - - def lookup_name(mapping: Mapping, key: KT, default: VT) -> VT: - try: - return mapping[key] - except KeyError: - return default - - For clarity the type variables may be redefined, e.g.:: - - X = TypeVar('X') - Y = TypeVar('Y') - def lookup_name(mapping: Mapping[X, Y], key: X, default: Y) -> Y: - # Same body as above. - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - next_in_mro = object - # Look for the last occurrence of Generic or Generic[...]. - for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and _gorg(c) is Generic: - next_in_mro = cls.__mro__[i+1] - return next_in_mro.__new__(_gorg(cls)) - - -def cast(typ, val): - """Cast a value to a type. - - This returns the value unchanged. To the type checker this - signals that the return value has the designated type, but at - runtime we intentionally don't check anything (we want this - to be as fast as possible). - """ - return val - - -def _get_defaults(func): - """Internal helper to extract the default arguments, by name.""" - code = func.__code__ - pos_count = code.co_argcount - kw_count = code.co_kwonlyargcount - arg_names = code.co_varnames - kwarg_names = arg_names[pos_count:pos_count + kw_count] - arg_names = arg_names[:pos_count] - defaults = func.__defaults__ or () - kwdefaults = func.__kwdefaults__ - res = dict(kwdefaults) if kwdefaults else {} - pos_offset = pos_count - len(defaults) - for name, value in zip(arg_names[pos_offset:], defaults): - assert name not in res - res[name] = value - return res - - -def get_type_hints(obj, globalns=None, localns=None): - """Return type hints for a function or method object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, and if necessary - adds Optional[t] if a default value equal to None is set. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj, and these are also used as the locals. If the - object does not appear to have globals, an exception is raised. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - if getattr(obj, '__no_type_check__', None): - return {} - if globalns is None: - globalns = getattr(obj, '__globals__', {}) - if localns is None: - localns = globalns - elif localns is None: - localns = globalns - defaults = _get_defaults(obj) - hints = dict(obj.__annotations__) - for name, value in hints.items(): - if isinstance(value, str): - value = _ForwardRef(value) - value = _eval_type(value, globalns, localns) - if name in defaults and defaults[name] is None: - value = Optional[value] - hints[name] = value - return hints - - -# TODO: Also support this as a class decorator. -def no_type_check(arg): - """Decorator to indicate that annotations are not type hints. - - The argument must be a class or function; if it is a class, it - applies recursively to all methods defined in that class (but not - to methods defined in its superclasses or subclasses). - - This mutates the function(s) in place. - """ - if isinstance(arg, type): - for obj in arg.__dict__.values(): - if isinstance(obj, types.FunctionType): - obj.__no_type_check__ = True - else: - arg.__no_type_check__ = True - return arg - - -def no_type_check_decorator(decorator): - """Decorator to give another decorator the @no_type_check effect. - - This wraps the decorator with something that wraps the decorated - function in @no_type_check. - """ - - @functools.wraps(decorator) - def wrapped_decorator(*args, **kwds): - func = decorator(*args, **kwds) - func = no_type_check(func) - return func - - return wrapped_decorator - - -def overload(func): - raise RuntimeError("Overloading is only supported in library stubs") - - -class _ProtocolMeta(GenericMeta): - """Internal metaclass for _Protocol. - - This exists so _Protocol classes can be generic without deriving - from Generic. - """ - - def __instancecheck__(self, obj): - raise TypeError("Protocols cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not self._is_protocol: - # No structural checks since this isn't a protocol. - return NotImplemented - - if self is _Protocol: - # Every class is a subclass of the empty protocol. - return True - - # Find all attributes defined in the protocol. - attrs = self._get_protocol_attrs() - - for attr in attrs: - if not any(attr in d.__dict__ for d in cls.__mro__): - return False - return True - - def _get_protocol_attrs(self): - # Get all Protocol base classes. - protocol_bases = [] - for c in self.__mro__: - if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': - protocol_bases.append(c) - - # Get attributes included in protocol. - attrs = set() - for base in protocol_bases: - for attr in base.__dict__.keys(): - # Include attributes not defined in any non-protocol bases. - for c in self.__mro__: - if (c is not base and attr in c.__dict__ and - not getattr(c, '_is_protocol', False)): - break - else: - if (not attr.startswith('_abc_') and - attr != '__abstractmethods__' and - attr != '_is_protocol' and - attr != '__dict__' and - attr != '__slots__' and - attr != '_get_protocol_attrs' and - attr != '__parameters__' and - attr != '__origin__' and - attr != '__module__'): - attrs.add(attr) - - return attrs - - -class _Protocol(metaclass=_ProtocolMeta): - """Internal base class for protocol classes. - - This implements a simple-minded structural isinstance check - (similar but more general than the one-offs in collections.abc - such as Hashable). - """ - - __slots__ = () - - _is_protocol = True - - -# Various ABCs mimicking those in collections.abc. -# A few are simply re-exported for completeness. - -Hashable = collections_abc.Hashable # Not generic. - - -class Iterable(Generic[T_co], extra=collections_abc.Iterable): - __slots__ = () - - -class Iterator(Iterable[T_co], extra=collections_abc.Iterator): - __slots__ = () - - -class SupportsInt(_Protocol): - __slots__ = () - - @abstractmethod - def __int__(self) -> int: - pass - - -class SupportsFloat(_Protocol): - __slots__ = () - - @abstractmethod - def __float__(self) -> float: - pass - - -class SupportsComplex(_Protocol): - __slots__ = () - - @abstractmethod - def __complex__(self) -> complex: - pass - - -class SupportsBytes(_Protocol): - __slots__ = () - - @abstractmethod - def __bytes__(self) -> bytes: - pass - - -class SupportsAbs(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __abs__(self) -> T_co: - pass - - -class SupportsRound(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __round__(self, ndigits: int = 0) -> T_co: - pass - - -class Reversible(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __reversed__(self) -> 'Iterator[T_co]': - pass - - -Sized = collections_abc.Sized # Not generic. - - -class Container(Generic[T_co], extra=collections_abc.Container): - __slots__ = () - - -# Callable was defined earlier. - - -class AbstractSet(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Set): - pass - - -class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): - pass - - -# NOTE: Only the value type is covariant. -class Mapping(Sized, Iterable[KT], Container[KT], Generic[VT_co], - extra=collections_abc.Mapping): - pass - - -class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): - pass - - -class Sequence(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Sequence): - pass - - -class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): - pass - - -class ByteString(Sequence[int], extra=collections_abc.ByteString): - pass - - -ByteString.register(type(memoryview(b''))) - - -class List(list, MutableSequence[T]): - - def __new__(cls, *args, **kwds): - if _geqv(cls, List): - raise TypeError("Type List cannot be instantiated; " - "use list() instead") - return list.__new__(cls, *args, **kwds) - - -class Set(set, MutableSet[T]): - - def __new__(cls, *args, **kwds): - if _geqv(cls, Set): - raise TypeError("Type Set cannot be instantiated; " - "use set() instead") - return set.__new__(cls, *args, **kwds) - - -class _FrozenSetMeta(GenericMeta): - """This metaclass ensures set is not a subclass of FrozenSet. - - Without this metaclass, set would be considered a subclass of - FrozenSet, because FrozenSet.__extra__ is collections.abc.Set, and - set is a subclass of that. - """ - - def __subclasscheck__(self, cls): - if issubclass(cls, Set): - return False - return super().__subclasscheck__(cls) - - -class FrozenSet(frozenset, AbstractSet[T_co], metaclass=_FrozenSetMeta): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, FrozenSet): - raise TypeError("Type FrozenSet cannot be instantiated; " - "use frozenset() instead") - return frozenset.__new__(cls, *args, **kwds) - - -class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): - pass - - -class KeysView(MappingView[KT], AbstractSet[KT], - extra=collections_abc.KeysView): - pass - - -# TODO: Enable Set[Tuple[KT, VT_co]] instead of Generic[KT, VT_co]. -class ItemsView(MappingView, Generic[KT, VT_co], - extra=collections_abc.ItemsView): - pass - - -class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): - pass - - -class Dict(dict, MutableMapping[KT, VT]): - - def __new__(cls, *args, **kwds): - if _geqv(cls, Dict): - raise TypeError("Type Dict cannot be instantiated; " - "use dict() instead") - return dict.__new__(cls, *args, **kwds) - - -# Determine what base class to use for Generator. -if hasattr(collections_abc, 'Generator'): - # Sufficiently recent versions of 3.5 have a Generator ABC. - _G_base = collections_abc.Generator -else: - # Fall back on the exact type. - _G_base = types.GeneratorType - - -class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], - extra=_G_base): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Generator): - raise TypeError("Type Generator cannot be instantiated; " - "create a subclass instead") - return super().__new__(cls, *args, **kwds) - - -def NamedTuple(typename, fields): - """Typed version of namedtuple. - - Usage:: - - Employee = typing.NamedTuple('Employee', [('name', str), 'id', int)]) - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has one extra attribute: _field_types, - giving a dict mapping field names to types. (The field names - are in the _fields attribute, which is part of the namedtuple - API.) - """ - fields = [(n, t) for n, t in fields] - cls = collections.namedtuple(typename, [n for n, t in fields]) - cls._field_types = dict(fields) - # Set the module to the caller's module (otherwise it'd be 'typing'). - try: - cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - return cls - - -class IO(Generic[AnyStr]): - """Generic base class for TextIO and BinaryIO. - - This is an abstract, generic version of the return of open(). - - NOTE: This does not distinguish between the different possible - classes (text vs. binary, read vs. write vs. read/write, - append-only, unbuffered). The TextIO and BinaryIO subclasses - below capture the distinctions between text vs. binary, which is - pervasive in the interface; however we currently do not offer a - way to track the other distinctions in the type system. - """ - - __slots__ = () - - @abstractproperty - def mode(self) -> str: - pass - - @abstractproperty - def name(self) -> str: - pass - - @abstractmethod - def close(self) -> None: - pass - - @abstractmethod - def closed(self) -> bool: - pass - - @abstractmethod - def fileno(self) -> int: - pass - - @abstractmethod - def flush(self) -> None: - pass - - @abstractmethod - def isatty(self) -> bool: - pass - - @abstractmethod - def read(self, n: int = -1) -> AnyStr: - pass - - @abstractmethod - def readable(self) -> bool: - pass - - @abstractmethod - def readline(self, limit: int = -1) -> AnyStr: - pass - - @abstractmethod - def readlines(self, hint: int = -1) -> List[AnyStr]: - pass - - @abstractmethod - def seek(self, offset: int, whence: int = 0) -> int: - pass - - @abstractmethod - def seekable(self) -> bool: - pass - - @abstractmethod - def tell(self) -> int: - pass - - @abstractmethod - def truncate(self, size: int = None) -> int: - pass - - @abstractmethod - def writable(self) -> bool: - pass - - @abstractmethod - def write(self, s: AnyStr) -> int: - pass - - @abstractmethod - def writelines(self, lines: List[AnyStr]) -> None: - pass - - @abstractmethod - def __enter__(self) -> 'IO[AnyStr]': - pass - - @abstractmethod - def __exit__(self, type, value, traceback) -> None: - pass - - -class BinaryIO(IO[bytes]): - """Typed version of the return of open() in binary mode.""" - - __slots__ = () - - @abstractmethod - def write(self, s: Union[bytes, bytearray]) -> int: - pass - - @abstractmethod - def __enter__(self) -> 'BinaryIO': - pass - - -class TextIO(IO[str]): - """Typed version of the return of open() in text mode.""" - - __slots__ = () - - @abstractproperty - def buffer(self) -> BinaryIO: - pass - - @abstractproperty - def encoding(self) -> str: - pass - - @abstractproperty - def errors(self) -> str: - pass - - @abstractproperty - def line_buffering(self) -> bool: - pass - - @abstractproperty - def newlines(self) -> Any: - pass - - @abstractmethod - def __enter__(self) -> 'TextIO': - pass - - -class io: - """Wrapper namespace for IO generic classes.""" - - __all__ = ['IO', 'TextIO', 'BinaryIO'] - IO = IO - TextIO = TextIO - BinaryIO = BinaryIO - -io.__name__ = __name__ + '.io' -sys.modules[io.__name__] = io - - -Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), - lambda p: p.pattern) -Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), - lambda m: m.re.pattern) - - -class re: - """Wrapper namespace for re type aliases.""" - - __all__ = ['Pattern', 'Match'] - Pattern = Pattern - Match = Match - -re.__name__ = __name__ + '.re' -sys.modules[re.__name__] = re diff --git a/typing_extensions/test_data/python-3.5.2/_collections_abc.py b/typing_extensions/test_data/python-3.5.2/_collections_abc.py deleted file mode 100644 index fc9c9f1c..00000000 --- a/typing_extensions/test_data/python-3.5.2/_collections_abc.py +++ /dev/null @@ -1,939 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. - -Unit tests are in test_collections. -""" - -from abc import ABCMeta, abstractmethod -import sys - -__all__ = ["Awaitable", "Coroutine", "AsyncIterable", "AsyncIterator", - "Hashable", "Iterable", "Iterator", "Generator", - "Sized", "Container", "Callable", - "Set", "MutableSet", - "Mapping", "MutableMapping", - "MappingView", "KeysView", "ItemsView", "ValuesView", - "Sequence", "MutableSequence", - "ByteString", - ] - -# This module has been renamed from collections.abc to _collections_abc to -# speed up interpreter startup. Some of the types such as MutableMapping are -# required early but collections module imports a lot of other modules. -# See issue #19218 -__name__ = "collections.abc" - -# Private list of types that we want to register with the various ABCs -# so that they will pass tests like: -# it = iter(somebytearray) -# assert isinstance(it, Iterable) -# Note: in other implementations, these types many not be distinct -# and they make have their own implementation specific types that -# are not included on this list. -bytes_iterator = type(iter(b'')) -bytearray_iterator = type(iter(bytearray())) -#callable_iterator = ??? -dict_keyiterator = type(iter({}.keys())) -dict_valueiterator = type(iter({}.values())) -dict_itemiterator = type(iter({}.items())) -list_iterator = type(iter([])) -list_reverseiterator = type(iter(reversed([]))) -range_iterator = type(iter(range(0))) -set_iterator = type(iter(set())) -str_iterator = type(iter("")) -tuple_iterator = type(iter(())) -zip_iterator = type(iter(zip())) -## views ## -dict_keys = type({}.keys()) -dict_values = type({}.values()) -dict_items = type({}.items()) -## misc ## -mappingproxy = type(type.__dict__) -generator = type((lambda: (yield))()) -## coroutine ## -async def _coro(): pass -_coro = _coro() -coroutine = type(_coro) -_coro.close() # Prevent ResourceWarning -del _coro - - -### ONE-TRICK PONIES ### - -class Hashable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __hash__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Hashable: - for B in C.__mro__: - if "__hash__" in B.__dict__: - if B.__dict__["__hash__"]: - return True - break - return NotImplemented - - -class Awaitable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __await__(self): - yield - - @classmethod - def __subclasshook__(cls, C): - if cls is Awaitable: - for B in C.__mro__: - if "__await__" in B.__dict__: - if B.__dict__["__await__"]: - return True - break - return NotImplemented - - -class Coroutine(Awaitable): - - __slots__ = () - - @abstractmethod - def send(self, value): - """Send a value into the coroutine. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the coroutine. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside coroutine. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("coroutine ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Coroutine: - mro = C.__mro__ - for method in ('__await__', 'send', 'throw', 'close'): - for base in mro: - if method in base.__dict__: - break - else: - return NotImplemented - return True - return NotImplemented - - -Coroutine.register(coroutine) - - -class AsyncIterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __aiter__(self): - return AsyncIterator() - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterable: - if any("__aiter__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class AsyncIterator(AsyncIterable): - - __slots__ = () - - @abstractmethod - async def __anext__(self): - """Return the next item or raise StopAsyncIteration when exhausted.""" - raise StopAsyncIteration - - def __aiter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterator: - if (any("__anext__" in B.__dict__ for B in C.__mro__) and - any("__aiter__" in B.__dict__ for B in C.__mro__)): - return True - return NotImplemented - - -class Iterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __iter__(self): - while False: - yield None - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterable: - if any("__iter__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Iterator(Iterable): - - __slots__ = () - - @abstractmethod - def __next__(self): - 'Return the next item from the iterator. When exhausted, raise StopIteration' - raise StopIteration - - def __iter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterator: - if (any("__next__" in B.__dict__ for B in C.__mro__) and - any("__iter__" in B.__dict__ for B in C.__mro__)): - return True - return NotImplemented - -Iterator.register(bytes_iterator) -Iterator.register(bytearray_iterator) -#Iterator.register(callable_iterator) -Iterator.register(dict_keyiterator) -Iterator.register(dict_valueiterator) -Iterator.register(dict_itemiterator) -Iterator.register(list_iterator) -Iterator.register(list_reverseiterator) -Iterator.register(range_iterator) -Iterator.register(set_iterator) -Iterator.register(str_iterator) -Iterator.register(tuple_iterator) -Iterator.register(zip_iterator) - - -class Generator(Iterator): - - __slots__ = () - - def __next__(self): - """Return the next item from the generator. - When exhausted, raise StopIteration. - """ - return self.send(None) - - @abstractmethod - def send(self, value): - """Send a value into the generator. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the generator. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside generator. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("generator ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Generator: - mro = C.__mro__ - for method in ('__iter__', '__next__', 'send', 'throw', 'close'): - for base in mro: - if method in base.__dict__: - break - else: - return NotImplemented - return True - return NotImplemented - - -Generator.register(generator) - - -class Sized(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __len__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Sized: - if any("__len__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Container(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __contains__(self, x): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Container: - if any("__contains__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Callable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __call__(self, *args, **kwds): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Callable: - if any("__call__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -### SETS ### - - -class Set(Sized, Iterable, Container): - - """A set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__ and __len__. - - To override the comparisons (presumably for speed, as the - semantics are fixed), redefine __le__ and __ge__, - then the other operations will automatically follow suit. - """ - - __slots__ = () - - def __le__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) > len(other): - return False - for elem in self: - if elem not in other: - return False - return True - - def __lt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) < len(other) and self.__le__(other) - - def __gt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) > len(other) and self.__ge__(other) - - def __ge__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) < len(other): - return False - for elem in other: - if elem not in self: - return False - return True - - def __eq__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) == len(other) and self.__le__(other) - - @classmethod - def _from_iterable(cls, it): - '''Construct an instance of the class from any iterable input. - - Must override this method if the class constructor signature - does not accept an iterable for an input. - ''' - return cls(it) - - def __and__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - return self._from_iterable(value for value in other if value in self) - - __rand__ = __and__ - - def isdisjoint(self, other): - 'Return True if two sets have a null intersection.' - for value in other: - if value in self: - return False - return True - - def __or__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - chain = (e for s in (self, other) for e in s) - return self._from_iterable(chain) - - __ror__ = __or__ - - def __sub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in self - if value not in other) - - def __rsub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in other - if value not in self) - - def __xor__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return (self - other) | (other - self) - - __rxor__ = __xor__ - - def _hash(self): - """Compute the hash value of a set. - - Note that we don't define __hash__: not all sets are hashable. - But if you define a hashable set type, its __hash__ should - call this function. - - This must be compatible __eq__. - - All sets ought to compare equal if they contain the same - elements, regardless of how they are implemented, and - regardless of the order of the elements; so there's not much - freedom for __eq__ or __hash__. We match the algorithm used - by the built-in frozenset type. - """ - MAX = sys.maxsize - MASK = 2 * MAX + 1 - n = len(self) - h = 1927868237 * (n + 1) - h &= MASK - for x in self: - hx = hash(x) - h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 - h &= MASK - h = h * 69069 + 907133923 - h &= MASK - if h > MAX: - h -= MASK + 1 - if h == -1: - h = 590923713 - return h - -Set.register(frozenset) - - -class MutableSet(Set): - """A mutable set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__, __len__, - add(), and discard(). - - To override the comparisons (presumably for speed, as the - semantics are fixed), all you have to do is redefine __le__ and - then the other operations will automatically follow suit. - """ - - __slots__ = () - - @abstractmethod - def add(self, value): - """Add an element.""" - raise NotImplementedError - - @abstractmethod - def discard(self, value): - """Remove an element. Do not raise an exception if absent.""" - raise NotImplementedError - - def remove(self, value): - """Remove an element. If not a member, raise a KeyError.""" - if value not in self: - raise KeyError(value) - self.discard(value) - - def pop(self): - """Return the popped value. Raise KeyError if empty.""" - it = iter(self) - try: - value = next(it) - except StopIteration: - raise KeyError - self.discard(value) - return value - - def clear(self): - """This is slow (creates N new iterators!) but effective.""" - try: - while True: - self.pop() - except KeyError: - pass - - def __ior__(self, it): - for value in it: - self.add(value) - return self - - def __iand__(self, it): - for value in (self - it): - self.discard(value) - return self - - def __ixor__(self, it): - if it is self: - self.clear() - else: - if not isinstance(it, Set): - it = self._from_iterable(it) - for value in it: - if value in self: - self.discard(value) - else: - self.add(value) - return self - - def __isub__(self, it): - if it is self: - self.clear() - else: - for value in it: - self.discard(value) - return self - -MutableSet.register(set) - - -### MAPPINGS ### - - -class Mapping(Sized, Iterable, Container): - - __slots__ = () - - """A Mapping is a generic container for associating key/value - pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __iter__, and __len__. - - """ - - @abstractmethod - def __getitem__(self, key): - raise KeyError - - def get(self, key, default=None): - 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' - try: - return self[key] - except KeyError: - return default - - def __contains__(self, key): - try: - self[key] - except KeyError: - return False - else: - return True - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return KeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return ItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return ValuesView(self) - - def __eq__(self, other): - if not isinstance(other, Mapping): - return NotImplemented - return dict(self.items()) == dict(other.items()) - -Mapping.register(mappingproxy) - - -class MappingView(Sized): - - __slots__ = '_mapping', - - def __init__(self, mapping): - self._mapping = mapping - - def __len__(self): - return len(self._mapping) - - def __repr__(self): - return '{0.__class__.__name__}({0._mapping!r})'.format(self) - - -class KeysView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, key): - return key in self._mapping - - def __iter__(self): - yield from self._mapping - -KeysView.register(dict_keys) - - -class ItemsView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, item): - key, value = item - try: - v = self._mapping[key] - except KeyError: - return False - else: - return v == value - - def __iter__(self): - for key in self._mapping: - yield (key, self._mapping[key]) - -ItemsView.register(dict_items) - - -class ValuesView(MappingView): - - __slots__ = () - - def __contains__(self, value): - for key in self._mapping: - if value == self._mapping[key]: - return True - return False - - def __iter__(self): - for key in self._mapping: - yield self._mapping[key] - -ValuesView.register(dict_values) - - -class MutableMapping(Mapping): - - __slots__ = () - - """A MutableMapping is a generic container for associating - key/value pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __setitem__, __delitem__, - __iter__, and __len__. - - """ - - @abstractmethod - def __setitem__(self, key, value): - raise KeyError - - @abstractmethod - def __delitem__(self, key): - raise KeyError - - __marker = object() - - def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - ''' - try: - value = self[key] - except KeyError: - if default is self.__marker: - raise - return default - else: - del self[key] - return value - - def popitem(self): - '''D.popitem() -> (k, v), remove and return some (key, value) pair - as a 2-tuple; but raise KeyError if D is empty. - ''' - try: - key = next(iter(self)) - except StopIteration: - raise KeyError - value = self[key] - del self[key] - return key, value - - def clear(self): - 'D.clear() -> None. Remove all items from D.' - try: - while True: - self.popitem() - except KeyError: - pass - - def update(*args, **kwds): - ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. - If E present and has a .keys() method, does: for k in E: D[k] = E[k] - If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v - In either case, this is followed by: for k, v in F.items(): D[k] = v - ''' - if not args: - raise TypeError("descriptor 'update' of 'MutableMapping' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('update expected at most 1 arguments, got %d' % - len(args)) - if args: - other = args[0] - if isinstance(other, Mapping): - for key in other: - self[key] = other[key] - elif hasattr(other, "keys"): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - def setdefault(self, key, default=None): - 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' - try: - return self[key] - except KeyError: - self[key] = default - return default - -MutableMapping.register(dict) - - -### SEQUENCES ### - - -class Sequence(Sized, Iterable, Container): - - """All the operations on a read-only sequence. - - Concrete subclasses must override __new__ or __init__, - __getitem__, and __len__. - """ - - __slots__ = () - - @abstractmethod - def __getitem__(self, index): - raise IndexError - - def __iter__(self): - i = 0 - try: - while True: - v = self[i] - yield v - i += 1 - except IndexError: - return - - def __contains__(self, value): - for v in self: - if v == value: - return True - return False - - def __reversed__(self): - for i in reversed(range(len(self))): - yield self[i] - - def index(self, value, start=0, stop=None): - '''S.index(value, [start, [stop]]) -> integer -- return first index of value. - Raises ValueError if the value is not present. - ''' - if start is not None and start < 0: - start = max(len(self) + start, 0) - if stop is not None and stop < 0: - stop += len(self) - - i = start - while stop is None or i < stop: - try: - if self[i] == value: - return i - except IndexError: - break - i += 1 - raise ValueError - - def count(self, value): - 'S.count(value) -> integer -- return number of occurrences of value' - return sum(1 for v in self if v == value) - -Sequence.register(tuple) -Sequence.register(str) -Sequence.register(range) -Sequence.register(memoryview) - - -class ByteString(Sequence): - - """This unifies bytes and bytearray. - - XXX Should add all their methods. - """ - - __slots__ = () - -ByteString.register(bytes) -ByteString.register(bytearray) - - -class MutableSequence(Sequence): - - __slots__ = () - - """All the operations on a read-write sequence. - - Concrete subclasses must provide __new__ or __init__, - __getitem__, __setitem__, __delitem__, __len__, and insert(). - - """ - - @abstractmethod - def __setitem__(self, index, value): - raise IndexError - - @abstractmethod - def __delitem__(self, index): - raise IndexError - - @abstractmethod - def insert(self, index, value): - 'S.insert(index, value) -- insert value before index' - raise IndexError - - def append(self, value): - 'S.append(value) -- append value to the end of the sequence' - self.insert(len(self), value) - - def clear(self): - 'S.clear() -> None -- remove all items from S' - try: - while True: - self.pop() - except IndexError: - pass - - def reverse(self): - 'S.reverse() -- reverse *IN PLACE*' - n = len(self) - for i in range(n//2): - self[i], self[n-i-1] = self[n-i-1], self[i] - - def extend(self, values): - 'S.extend(iterable) -- extend sequence by appending elements from the iterable' - for v in values: - self.append(v) - - def pop(self, index=-1): - '''S.pop([index]) -> item -- remove and return item at index (default last). - Raise IndexError if list is empty or index is out of range. - ''' - v = self[index] - del self[index] - return v - - def remove(self, value): - '''S.remove(value) -- remove first occurrence of value. - Raise ValueError if the value is not present. - ''' - del self[self.index(value)] - - def __iadd__(self, values): - self.extend(values) - return self - -MutableSequence.register(list) -MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.5.2/abc.py b/typing_extensions/test_data/python-3.5.2/abc.py deleted file mode 100644 index 1cbf96a6..00000000 --- a/typing_extensions/test_data/python-3.5.2/abc.py +++ /dev/null @@ -1,248 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) according to PEP 3119.""" - -from _weakrefset import WeakSet - - -def abstractmethod(funcobj): - """A decorator indicating abstract methods. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract methods are overridden. - The abstract methods can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractmethod - def my_abstract_method(self, ...): - ... - """ - funcobj.__isabstractmethod__ = True - return funcobj - - -class abstractclassmethod(classmethod): - """ - A decorator indicating abstract classmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractclassmethod - def my_abstract_classmethod(cls, ...): - ... - - 'abstractclassmethod' is deprecated. Use 'classmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractstaticmethod(staticmethod): - """ - A decorator indicating abstract staticmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractstaticmethod - def my_abstract_staticmethod(...): - ... - - 'abstractstaticmethod' is deprecated. Use 'staticmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractproperty(property): - """ - A decorator indicating abstract properties. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract properties are overridden. - The abstract properties can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractproperty - def my_abstract_property(self): - ... - - This defines a read-only property; you can also define a read-write - abstract property using the 'long' form of property declaration: - - class C(metaclass=ABCMeta): - def getx(self): ... - def setx(self, value): ... - x = abstractproperty(getx, setx) - - 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' - instead. - """ - - __isabstractmethod__ = True - - -class ABCMeta(type): - - """Metaclass for defining Abstract Base Classes (ABCs). - - Use this metaclass to create an ABC. An ABC can be subclassed - directly, and then acts as a mix-in class. You can also register - unrelated concrete classes (even built-in classes) and unrelated - ABCs as 'virtual subclasses' -- these and their descendants will - be considered subclasses of the registering ABC by the built-in - issubclass() function, but the registering ABC won't show up in - their MRO (Method Resolution Order) nor will method - implementations defined by the registering ABC be callable (not - even via super()). - - """ - - # A global counter that is incremented each time a class is - # registered as a virtual subclass of anything. It forces the - # negative cache to be cleared before its next use. - # Note: this counter is private. Use `abc.get_cache_token()` for - # external code. - _abc_invalidation_counter = 0 - - def __new__(mcls, name, bases, namespace): - cls = super().__new__(mcls, name, bases, namespace) - # Compute set of abstract method names - abstracts = {name - for name, value in namespace.items() - if getattr(value, "__isabstractmethod__", False)} - for base in bases: - for name in getattr(base, "__abstractmethods__", set()): - value = getattr(cls, name, None) - if getattr(value, "__isabstractmethod__", False): - abstracts.add(name) - cls.__abstractmethods__ = frozenset(abstracts) - # Set up inheritance registry - cls._abc_registry = WeakSet() - cls._abc_cache = WeakSet() - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - return cls - - def register(cls, subclass): - """Register a virtual subclass of an ABC. - - Returns the subclass, to allow usage as a class decorator. - """ - if not isinstance(subclass, type): - raise TypeError("Can only register classes") - if issubclass(subclass, cls): - return subclass # Already a subclass - # Subtle: test for cycles *after* testing for "already a subclass"; - # this means we allow X.register(X) and interpret it as a no-op. - if issubclass(cls, subclass): - # This would create a cycle, which is bad for the algorithm below - raise RuntimeError("Refusing to create an inheritance cycle") - cls._abc_registry.add(subclass) - ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache - return subclass - - def _dump_registry(cls, file=None): - """Debug helper to print the ABC registry.""" - print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) - print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) - for name in sorted(cls.__dict__.keys()): - if name.startswith("_abc_"): - value = getattr(cls, name) - print("%s: %r" % (name, value), file=file) - - def __instancecheck__(cls, instance): - """Override for isinstance(instance, cls).""" - # Inline the cache checking - subclass = instance.__class__ - if subclass in cls._abc_cache: - return True - subtype = type(instance) - if subtype is subclass: - if (cls._abc_negative_cache_version == - ABCMeta._abc_invalidation_counter and - subclass in cls._abc_negative_cache): - return False - # Fall back to the subclass check. - return cls.__subclasscheck__(subclass) - return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) - - def __subclasscheck__(cls, subclass): - """Override for issubclass(subclass, cls).""" - # Check cache - if subclass in cls._abc_cache: - return True - # Check negative cache; may have to invalidate - if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: - # Invalidate the negative cache - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - elif subclass in cls._abc_negative_cache: - return False - # Check the subclass hook - ok = cls.__subclasshook__(subclass) - if ok is not NotImplemented: - assert isinstance(ok, bool) - if ok: - cls._abc_cache.add(subclass) - else: - cls._abc_negative_cache.add(subclass) - return ok - # Check if it's a direct subclass - if cls in getattr(subclass, '__mro__', ()): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a registered class (recursive) - for rcls in cls._abc_registry: - if issubclass(subclass, rcls): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a subclass (recursive) - for scls in cls.__subclasses__(): - if issubclass(subclass, scls): - cls._abc_cache.add(subclass) - return True - # No dice; update negative cache - cls._abc_negative_cache.add(subclass) - return False - - -class ABC(metaclass=ABCMeta): - """Helper class that provides a standard way to create an ABC using - inheritance. - """ - pass - - -def get_cache_token(): - """Returns the current ABC cache token. - - The token is an opaque object (supporting equality testing) identifying the - current version of the ABC cache for virtual subclasses. The token changes - with every call to ``register()`` on any ABC. - """ - return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.5.2/collections/__init__.py b/typing_extensions/test_data/python-3.5.2/collections/__init__.py deleted file mode 100644 index ebe8ee7a..00000000 --- a/typing_extensions/test_data/python-3.5.2/collections/__init__.py +++ /dev/null @@ -1,1239 +0,0 @@ -'''This module implements specialized container datatypes providing -alternatives to Python's general purpose built-in containers, dict, -list, set, and tuple. - -* namedtuple factory function for creating tuple subclasses with named fields -* deque list-like container with fast appends and pops on either end -* ChainMap dict-like class for creating a single view of multiple mappings -* Counter dict subclass for counting hashable objects -* OrderedDict dict subclass that remembers the order entries were added -* defaultdict dict subclass that calls a factory function to supply missing values -* UserDict wrapper around dictionary objects for easier dict subclassing -* UserList wrapper around list objects for easier list subclassing -* UserString wrapper around string objects for easier string subclassing - -''' - -__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', - 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] - -# For backwards compatibility, continue to make the collections ABCs -# available through the collections module. -from _collections_abc import * -import _collections_abc -__all__ += _collections_abc.__all__ - -from operator import itemgetter as _itemgetter, eq as _eq -from keyword import iskeyword as _iskeyword -import sys as _sys -import heapq as _heapq -from _weakref import proxy as _proxy -from itertools import repeat as _repeat, chain as _chain, starmap as _starmap -from reprlib import recursive_repr as _recursive_repr - -try: - from _collections import deque -except ImportError: - pass -else: - MutableSequence.register(deque) - -try: - from _collections import defaultdict -except ImportError: - pass - - -################################################################################ -### OrderedDict -################################################################################ - -class _OrderedDictKeysView(KeysView): - - def __reversed__(self): - yield from reversed(self._mapping) - -class _OrderedDictItemsView(ItemsView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield (key, self._mapping[key]) - -class _OrderedDictValuesView(ValuesView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield self._mapping[key] - -class _Link(object): - __slots__ = 'prev', 'next', 'key', '__weakref__' - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as regular dictionaries. - - # The internal self.__map dict maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # The sentinel is in self.__hardroot with a weakref proxy in self.__root. - # The prev links are weakref proxies (to prevent circular references). - # Individual links are kept alive by the hard reference in self.__map. - # Those hard references disappear when a key is deleted from an OrderedDict. - - def __init__(*args, **kwds): - '''Initialize an ordered dictionary. The signature is the same as - regular dictionaries, but keyword arguments are not recommended because - their insertion order is arbitrary. - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'OrderedDict' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__hardroot = _Link() - self.__root = root = _proxy(self.__hardroot) - root.prev = root.next = root - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, - dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link at the end of the linked list, - # and the inherited dictionary is updated with the new key/value pair. - if key not in self: - self.__map[key] = link = Link() - root = self.__root - last = root.prev - link.prev, link.next, link.key = last, root, key - last.next = link - root.prev = proxy(link) - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which gets - # removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link = self.__map.pop(key) - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - link.prev = None - link.next = None - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - # Traverse the linked list in order. - root = self.__root - curr = root.next - while curr is not root: - yield curr.key - curr = curr.next - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - # Traverse the linked list in reverse order. - root = self.__root - curr = root.prev - while curr is not root: - yield curr.key - curr = curr.prev - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - root = self.__root - root.prev = root.next = root - self.__map.clear() - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root.prev - link_prev = link.prev - link_prev.next = root - root.prev = link_prev - else: - link = root.next - link_next = link.next - root.next = link_next - link_next.prev = root - key = link.key - del self.__map[key] - value = dict.pop(self, key) - return key, value - - def move_to_end(self, key, last=True): - '''Move an existing element to the end (or beginning if last==False). - - Raises KeyError if the element does not exist. - When last=True, acts like a fast version of self[key]=self.pop(key). - - ''' - link = self.__map[key] - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - root = self.__root - if last: - last = root.prev - link.prev = last - link.next = root - last.next = root.prev = link - else: - first = root.next - link.prev = root - link.next = first - root.next = first.prev = link - - def __sizeof__(self): - sizeof = _sys.getsizeof - n = len(self) + 1 # number of links including root - size = sizeof(self.__dict__) # instance dictionary - size += sizeof(self.__map) * 2 # internal dict and inherited dict - size += sizeof(self.__hardroot) * n # link objects - size += sizeof(self.__root) * n # proxy objects - return size - - update = __update = MutableMapping.update - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return _OrderedDictKeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return _OrderedDictItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return _OrderedDictValuesView(self) - - __ne__ = MutableMapping.__ne__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding - value. If key is not found, d is returned if given, otherwise KeyError - is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - @_recursive_repr() - def __repr__(self): - 'od.__repr__() <==> repr(od)' - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, list(self.items())) - - def __reduce__(self): - 'Return state information for pickling' - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - return self.__class__, (), inst_dict or None, None, iter(self.items()) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. - If not specified, the value defaults to None. - - ''' - self = cls() - for key in iterable: - self[key] = value - return self - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return dict.__eq__(self, other) and all(map(_eq, self, other)) - return dict.__eq__(self, other) - - -try: - from _collections import OrderedDict -except ImportError: - # Leave the pure Python version in place. - pass - - -################################################################################ -### namedtuple -################################################################################ - -_class_template = """\ -from builtins import property as _property, tuple as _tuple -from operator import itemgetter as _itemgetter -from collections import OrderedDict - -class {typename}(tuple): - '{typename}({arg_list})' - - __slots__ = () - - _fields = {field_names!r} - - def __new__(_cls, {arg_list}): - 'Create new instance of {typename}({arg_list})' - return _tuple.__new__(_cls, ({arg_list})) - - @classmethod - def _make(cls, iterable, new=tuple.__new__, len=len): - 'Make a new {typename} object from a sequence or iterable' - result = new(cls, iterable) - if len(result) != {num_fields:d}: - raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) - return result - - def _replace(_self, **kwds): - 'Return a new {typename} object replacing specified fields with new values' - result = _self._make(map(kwds.pop, {field_names!r}, _self)) - if kwds: - raise ValueError('Got unexpected field names: %r' % list(kwds)) - return result - - def __repr__(self): - 'Return a nicely formatted representation string' - return self.__class__.__name__ + '({repr_fmt})' % self - - def _asdict(self): - 'Return a new OrderedDict which maps field names to their values.' - return OrderedDict(zip(self._fields, self)) - - def __getnewargs__(self): - 'Return self as a plain tuple. Used by copy and pickle.' - return tuple(self) - -{field_defs} -""" - -_repr_template = '{name}=%r' - -_field_template = '''\ - {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') -''' - -def namedtuple(typename, field_names, verbose=False, rename=False): - """Returns a new subclass of tuple with named fields. - - >>> Point = namedtuple('Point', ['x', 'y']) - >>> Point.__doc__ # docstring for the new class - 'Point(x, y)' - >>> p = Point(11, y=22) # instantiate with positional args or keywords - >>> p[0] + p[1] # indexable like a plain tuple - 33 - >>> x, y = p # unpack like a regular tuple - >>> x, y - (11, 22) - >>> p.x + p.y # fields also accessible by name - 33 - >>> d = p._asdict() # convert to a dictionary - >>> d['x'] - 11 - >>> Point(**d) # convert from a dictionary - Point(x=11, y=22) - >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields - Point(x=100, y=22) - - """ - - # Validate the field names. At the user's option, either generate an error - # message or automatically replace the field name with a valid name. - if isinstance(field_names, str): - field_names = field_names.replace(',', ' ').split() - field_names = list(map(str, field_names)) - typename = str(typename) - if rename: - seen = set() - for index, name in enumerate(field_names): - if (not name.isidentifier() - or _iskeyword(name) - or name.startswith('_') - or name in seen): - field_names[index] = '_%d' % index - seen.add(name) - for name in [typename] + field_names: - if type(name) != str: - raise TypeError('Type names and field names must be strings') - if not name.isidentifier(): - raise ValueError('Type names and field names must be valid ' - 'identifiers: %r' % name) - if _iskeyword(name): - raise ValueError('Type names and field names cannot be a ' - 'keyword: %r' % name) - seen = set() - for name in field_names: - if name.startswith('_') and not rename: - raise ValueError('Field names cannot start with an underscore: ' - '%r' % name) - if name in seen: - raise ValueError('Encountered duplicate field name: %r' % name) - seen.add(name) - - # Fill-in the class template - class_definition = _class_template.format( - typename = typename, - field_names = tuple(field_names), - num_fields = len(field_names), - arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], - repr_fmt = ', '.join(_repr_template.format(name=name) - for name in field_names), - field_defs = '\n'.join(_field_template.format(index=index, name=name) - for index, name in enumerate(field_names)) - ) - - # Execute the template string in a temporary namespace and support - # tracing utilities by setting a value for frame.f_globals['__name__'] - namespace = dict(__name__='namedtuple_%s' % typename) - exec(class_definition, namespace) - result = namespace[typename] - result._source = class_definition - if verbose: - print(result._source) - - # For pickling to work, the __module__ variable needs to be set to the frame - # where the named tuple is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython). - try: - result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - - return result - - -######################################################################## -### Counter -######################################################################## - -def _count_elements(mapping, iterable): - 'Tally elements from the iterable.' - mapping_get = mapping.get - for elem in iterable: - mapping[elem] = mapping_get(elem, 0) + 1 - -try: # Load C helper function if available - from _collections import _count_elements -except ImportError: - pass - -class Counter(dict): - '''Dict subclass for counting hashable items. Sometimes called a bag - or multiset. Elements are stored as dictionary keys and their counts - are stored as dictionary values. - - >>> c = Counter('abcdeabcdabcaba') # count elements from a string - - >>> c.most_common(3) # three most common elements - [('a', 5), ('b', 4), ('c', 3)] - >>> sorted(c) # list all unique elements - ['a', 'b', 'c', 'd', 'e'] - >>> ''.join(sorted(c.elements())) # list elements with repetitions - 'aaaaabbbbcccdde' - >>> sum(c.values()) # total of all counts - 15 - - >>> c['a'] # count of letter 'a' - 5 - >>> for elem in 'shazam': # update counts from an iterable - ... c[elem] += 1 # by adding 1 to each element's count - >>> c['a'] # now there are seven 'a' - 7 - >>> del c['b'] # remove all 'b' - >>> c['b'] # now there are zero 'b' - 0 - - >>> d = Counter('simsalabim') # make another counter - >>> c.update(d) # add in the second counter - >>> c['a'] # now there are nine 'a' - 9 - - >>> c.clear() # empty the counter - >>> c - Counter() - - Note: If a count is set to zero or reduced to zero, it will remain - in the counter until the entry is deleted or the counter is cleared: - - >>> c = Counter('aaabbc') - >>> c['b'] -= 2 # reduce the count of 'b' by two - >>> c.most_common() # 'b' is still in, but its count is zero - [('a', 3), ('c', 1), ('b', 0)] - - ''' - # References: - # http://en.wikipedia.org/wiki/Multiset - # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html - # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm - # http://code.activestate.com/recipes/259174/ - # Knuth, TAOCP Vol. II section 4.6.3 - - def __init__(*args, **kwds): - '''Create a new, empty Counter object. And if given, count elements - from an input iterable. Or, initialize the count from another mapping - of elements to their counts. - - >>> c = Counter() # a new, empty counter - >>> c = Counter('gallahad') # a new counter from an iterable - >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping - >>> c = Counter(a=4, b=2) # a new counter from keyword args - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - super(Counter, self).__init__() - self.update(*args, **kwds) - - def __missing__(self, key): - 'The count of elements not in the Counter is zero.' - # Needed so that self[missing_item] does not raise KeyError - return 0 - - def most_common(self, n=None): - '''List the n most common elements and their counts from the most - common to the least. If n is None, then list all element counts. - - >>> Counter('abcdeabcdabcaba').most_common(3) - [('a', 5), ('b', 4), ('c', 3)] - - ''' - # Emulate Bag.sortedByCount from Smalltalk - if n is None: - return sorted(self.items(), key=_itemgetter(1), reverse=True) - return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) - - def elements(self): - '''Iterator over elements repeating each as many times as its count. - - >>> c = Counter('ABCABC') - >>> sorted(c.elements()) - ['A', 'A', 'B', 'B', 'C', 'C'] - - # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 - >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) - >>> product = 1 - >>> for factor in prime_factors.elements(): # loop over factors - ... product *= factor # and multiply them - >>> product - 1836 - - Note, if an element's count has been set to zero or is a negative - number, elements() will ignore it. - - ''' - # Emulate Bag.do from Smalltalk and Multiset.begin from C++. - return _chain.from_iterable(_starmap(_repeat, self.items())) - - # Override dict methods where necessary - - @classmethod - def fromkeys(cls, iterable, v=None): - # There is no equivalent method for counters because setting v=1 - # means that no element can have a count greater than one. - raise NotImplementedError( - 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') - - def update(*args, **kwds): - '''Like dict.update() but add counts instead of replacing them. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.update('witch') # add elements from another iterable - >>> d = Counter('watch') - >>> c.update(d) # add elements from another counter - >>> c['h'] # four 'h' in which, witch, and watch - 4 - - ''' - # The regular dict.update() operation makes no sense here because the - # replace behavior results in the some of original untouched counts - # being mixed-in with all of the other counts for a mismash that - # doesn't have a straight-forward interpretation in most counting - # contexts. Instead, we implement straight-addition. Both the inputs - # and outputs are allowed to contain zero and negative counts. - - if not args: - raise TypeError("descriptor 'update' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - if isinstance(iterable, Mapping): - if self: - self_get = self.get - for elem, count in iterable.items(): - self[elem] = count + self_get(elem, 0) - else: - super(Counter, self).update(iterable) # fast path when counter is empty - else: - _count_elements(self, iterable) - if kwds: - self.update(kwds) - - def subtract(*args, **kwds): - '''Like dict.update() but subtracts counts instead of replacing them. - Counts can be reduced below zero. Both the inputs and outputs are - allowed to contain zero and negative counts. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.subtract('witch') # subtract elements from another iterable - >>> c.subtract(Counter('watch')) # subtract elements from another counter - >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch - 0 - >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch - -1 - - ''' - if not args: - raise TypeError("descriptor 'subtract' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - self_get = self.get - if isinstance(iterable, Mapping): - for elem, count in iterable.items(): - self[elem] = self_get(elem, 0) - count - else: - for elem in iterable: - self[elem] = self_get(elem, 0) - 1 - if kwds: - self.subtract(kwds) - - def copy(self): - 'Return a shallow copy.' - return self.__class__(self) - - def __reduce__(self): - return self.__class__, (dict(self),) - - def __delitem__(self, elem): - 'Like dict.__delitem__() but does not raise KeyError for missing values.' - if elem in self: - super().__delitem__(elem) - - def __repr__(self): - if not self: - return '%s()' % self.__class__.__name__ - try: - items = ', '.join(map('%r: %r'.__mod__, self.most_common())) - return '%s({%s})' % (self.__class__.__name__, items) - except TypeError: - # handle case where values are not orderable - return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) - - # Multiset-style mathematical operations discussed in: - # Knuth TAOCP Volume II section 4.6.3 exercise 19 - # and at http://en.wikipedia.org/wiki/Multiset - # - # Outputs guaranteed to only include positive counts. - # - # To strip negative and zero counts, add-in an empty counter: - # c += Counter() - - def __add__(self, other): - '''Add counts from two counters. - - >>> Counter('abbb') + Counter('bcc') - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count + other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __sub__(self, other): - ''' Subtract count, but keep only results with positive counts. - - >>> Counter('abbbc') - Counter('bccd') - Counter({'b': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count - other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count < 0: - result[elem] = 0 - count - return result - - def __or__(self, other): - '''Union is the maximum of value in either of the input counters. - - >>> Counter('abbb') | Counter('bcc') - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = other_count if count < other_count else count - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __and__(self, other): - ''' Intersection is the minimum of corresponding counts. - - >>> Counter('abbb') & Counter('bcc') - Counter({'b': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = count if count < other_count else other_count - if newcount > 0: - result[elem] = newcount - return result - - def __pos__(self): - 'Adds an empty counter, effectively stripping negative and zero counts' - result = Counter() - for elem, count in self.items(): - if count > 0: - result[elem] = count - return result - - def __neg__(self): - '''Subtracts from an empty counter. Strips positive and zero counts, - and flips the sign on negative counts. - - ''' - result = Counter() - for elem, count in self.items(): - if count < 0: - result[elem] = 0 - count - return result - - def _keep_positive(self): - '''Internal method to strip elements with a negative or zero count''' - nonpositive = [elem for elem, count in self.items() if not count > 0] - for elem in nonpositive: - del self[elem] - return self - - def __iadd__(self, other): - '''Inplace add from another counter, keeping only positive counts. - - >>> c = Counter('abbb') - >>> c += Counter('bcc') - >>> c - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] += count - return self._keep_positive() - - def __isub__(self, other): - '''Inplace subtract counter, but keep only results with positive counts. - - >>> c = Counter('abbbc') - >>> c -= Counter('bccd') - >>> c - Counter({'b': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] -= count - return self._keep_positive() - - def __ior__(self, other): - '''Inplace union is the maximum of value from either counter. - - >>> c = Counter('abbb') - >>> c |= Counter('bcc') - >>> c - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - for elem, other_count in other.items(): - count = self[elem] - if other_count > count: - self[elem] = other_count - return self._keep_positive() - - def __iand__(self, other): - '''Inplace intersection is the minimum of corresponding counts. - - >>> c = Counter('abbb') - >>> c &= Counter('bcc') - >>> c - Counter({'b': 1}) - - ''' - for elem, count in self.items(): - other_count = other[elem] - if other_count < count: - self[elem] = other_count - return self._keep_positive() - - -######################################################################## -### ChainMap (helper for configparser and string.Template) -######################################################################## - -class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together - to create a single, updateable view. - - The underlying mappings are stored in a list. That list is public and can - be accessed or updated using the *maps* attribute. There is no other - state. - - Lookups search the underlying mappings successively until a key is found. - In contrast, writes, updates, and deletions only operate on the first - mapping. - - ''' - - def __init__(self, *maps): - '''Initialize a ChainMap by setting *maps* to the given mappings. - If no mappings are provided, a single empty dictionary is used. - - ''' - self.maps = list(maps) or [{}] # always at least one map - - def __missing__(self, key): - raise KeyError(key) - - def __getitem__(self, key): - for mapping in self.maps: - try: - return mapping[key] # can't use 'key in mapping' with defaultdict - except KeyError: - pass - return self.__missing__(key) # support subclasses that define __missing__ - - def get(self, key, default=None): - return self[key] if key in self else default - - def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible - - def __iter__(self): - return iter(set().union(*self.maps)) - - def __contains__(self, key): - return any(key in m for m in self.maps) - - def __bool__(self): - return any(self.maps) - - @_recursive_repr() - def __repr__(self): - return '{0.__class__.__name__}({1})'.format( - self, ', '.join(map(repr, self.maps))) - - @classmethod - def fromkeys(cls, iterable, *args): - 'Create a ChainMap with a single dict created from the iterable.' - return cls(dict.fromkeys(iterable, *args)) - - def copy(self): - 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' - return self.__class__(self.maps[0].copy(), *self.maps[1:]) - - __copy__ = copy - - def new_child(self, m=None): # like Django's Context.push() - '''New ChainMap with a new map followed by all previous maps. - If no map is provided, an empty dict is used. - ''' - if m is None: - m = {} - return self.__class__(m, *self.maps) - - @property - def parents(self): # like Django's Context.pop() - 'New ChainMap from maps[1:].' - return self.__class__(*self.maps[1:]) - - def __setitem__(self, key, value): - self.maps[0][key] = value - - def __delitem__(self, key): - try: - del self.maps[0][key] - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def popitem(self): - 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' - try: - return self.maps[0].popitem() - except KeyError: - raise KeyError('No keys found in the first mapping.') - - def pop(self, key, *args): - 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' - try: - return self.maps[0].pop(key, *args) - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def clear(self): - 'Clear maps[0], leaving maps[1:] intact.' - self.maps[0].clear() - - -################################################################################ -### UserDict -################################################################################ - -class UserDict(MutableMapping): - - # Start by filling-out the abstract methods - def __init__(*args, **kwargs): - if not args: - raise TypeError("descriptor '__init__' of 'UserDict' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - if args: - dict = args[0] - elif 'dict' in kwargs: - dict = kwargs.pop('dict') - import warnings - warnings.warn("Passing 'dict' as keyword argument is deprecated", - PendingDeprecationWarning, stacklevel=2) - else: - dict = None - self.data = {} - if dict is not None: - self.update(dict) - if len(kwargs): - self.update(kwargs) - def __len__(self): return len(self.data) - def __getitem__(self, key): - if key in self.data: - return self.data[key] - if hasattr(self.__class__, "__missing__"): - return self.__class__.__missing__(self, key) - raise KeyError(key) - def __setitem__(self, key, item): self.data[key] = item - def __delitem__(self, key): del self.data[key] - def __iter__(self): - return iter(self.data) - - # Modify __contains__ to work correctly when __missing__ is present - def __contains__(self, key): - return key in self.data - - # Now, add the methods in dicts but not in MutableMapping - def __repr__(self): return repr(self.data) - def copy(self): - if self.__class__ is UserDict: - return UserDict(self.data.copy()) - import copy - data = self.data - try: - self.data = {} - c = copy.copy(self) - finally: - self.data = data - c.update(self) - return c - @classmethod - def fromkeys(cls, iterable, value=None): - d = cls() - for key in iterable: - d[key] = value - return d - - - -################################################################################ -### UserList -################################################################################ - -class UserList(MutableSequence): - """A more or less complete user-defined wrapper around list objects.""" - def __init__(self, initlist=None): - self.data = [] - if initlist is not None: - # XXX should this accept an arbitrary sequence? - if type(initlist) == type(self.data): - self.data[:] = initlist - elif isinstance(initlist, UserList): - self.data[:] = initlist.data[:] - else: - self.data = list(initlist) - def __repr__(self): return repr(self.data) - def __lt__(self, other): return self.data < self.__cast(other) - def __le__(self, other): return self.data <= self.__cast(other) - def __eq__(self, other): return self.data == self.__cast(other) - def __gt__(self, other): return self.data > self.__cast(other) - def __ge__(self, other): return self.data >= self.__cast(other) - def __cast(self, other): - return other.data if isinstance(other, UserList) else other - def __contains__(self, item): return item in self.data - def __len__(self): return len(self.data) - def __getitem__(self, i): return self.data[i] - def __setitem__(self, i, item): self.data[i] = item - def __delitem__(self, i): del self.data[i] - def __add__(self, other): - if isinstance(other, UserList): - return self.__class__(self.data + other.data) - elif isinstance(other, type(self.data)): - return self.__class__(self.data + other) - return self.__class__(self.data + list(other)) - def __radd__(self, other): - if isinstance(other, UserList): - return self.__class__(other.data + self.data) - elif isinstance(other, type(self.data)): - return self.__class__(other + self.data) - return self.__class__(list(other) + self.data) - def __iadd__(self, other): - if isinstance(other, UserList): - self.data += other.data - elif isinstance(other, type(self.data)): - self.data += other - else: - self.data += list(other) - return self - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __imul__(self, n): - self.data *= n - return self - def append(self, item): self.data.append(item) - def insert(self, i, item): self.data.insert(i, item) - def pop(self, i=-1): return self.data.pop(i) - def remove(self, item): self.data.remove(item) - def clear(self): self.data.clear() - def copy(self): return self.__class__(self) - def count(self, item): return self.data.count(item) - def index(self, item, *args): return self.data.index(item, *args) - def reverse(self): self.data.reverse() - def sort(self, *args, **kwds): self.data.sort(*args, **kwds) - def extend(self, other): - if isinstance(other, UserList): - self.data.extend(other.data) - else: - self.data.extend(other) - - - -################################################################################ -### UserString -################################################################################ - -class UserString(Sequence): - def __init__(self, seq): - if isinstance(seq, str): - self.data = seq - elif isinstance(seq, UserString): - self.data = seq.data[:] - else: - self.data = str(seq) - def __str__(self): return str(self.data) - def __repr__(self): return repr(self.data) - def __int__(self): return int(self.data) - def __float__(self): return float(self.data) - def __complex__(self): return complex(self.data) - def __hash__(self): return hash(self.data) - def __getnewargs__(self): - return (self.data[:],) - - def __eq__(self, string): - if isinstance(string, UserString): - return self.data == string.data - return self.data == string - def __lt__(self, string): - if isinstance(string, UserString): - return self.data < string.data - return self.data < string - def __le__(self, string): - if isinstance(string, UserString): - return self.data <= string.data - return self.data <= string - def __gt__(self, string): - if isinstance(string, UserString): - return self.data > string.data - return self.data > string - def __ge__(self, string): - if isinstance(string, UserString): - return self.data >= string.data - return self.data >= string - - def __contains__(self, char): - if isinstance(char, UserString): - char = char.data - return char in self.data - - def __len__(self): return len(self.data) - def __getitem__(self, index): return self.__class__(self.data[index]) - def __add__(self, other): - if isinstance(other, UserString): - return self.__class__(self.data + other.data) - elif isinstance(other, str): - return self.__class__(self.data + other) - return self.__class__(self.data + str(other)) - def __radd__(self, other): - if isinstance(other, str): - return self.__class__(other + self.data) - return self.__class__(str(other) + self.data) - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __mod__(self, args): - return self.__class__(self.data % args) - def __rmod__(self, format): - return self.__class__(format % args) - - # the following methods are defined in alphabetical order: - def capitalize(self): return self.__class__(self.data.capitalize()) - def casefold(self): - return self.__class__(self.data.casefold()) - def center(self, width, *args): - return self.__class__(self.data.center(width, *args)) - def count(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.count(sub, start, end) - def encode(self, encoding=None, errors=None): # XXX improve this? - if encoding: - if errors: - return self.__class__(self.data.encode(encoding, errors)) - return self.__class__(self.data.encode(encoding)) - return self.__class__(self.data.encode()) - def endswith(self, suffix, start=0, end=_sys.maxsize): - return self.data.endswith(suffix, start, end) - def expandtabs(self, tabsize=8): - return self.__class__(self.data.expandtabs(tabsize)) - def find(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.find(sub, start, end) - def format(self, *args, **kwds): - return self.data.format(*args, **kwds) - def format_map(self, mapping): - return self.data.format_map(mapping) - def index(self, sub, start=0, end=_sys.maxsize): - return self.data.index(sub, start, end) - def isalpha(self): return self.data.isalpha() - def isalnum(self): return self.data.isalnum() - def isdecimal(self): return self.data.isdecimal() - def isdigit(self): return self.data.isdigit() - def isidentifier(self): return self.data.isidentifier() - def islower(self): return self.data.islower() - def isnumeric(self): return self.data.isnumeric() - def isprintable(self): return self.data.isprintable() - def isspace(self): return self.data.isspace() - def istitle(self): return self.data.istitle() - def isupper(self): return self.data.isupper() - def join(self, seq): return self.data.join(seq) - def ljust(self, width, *args): - return self.__class__(self.data.ljust(width, *args)) - def lower(self): return self.__class__(self.data.lower()) - def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) - maketrans = str.maketrans - def partition(self, sep): - return self.data.partition(sep) - def replace(self, old, new, maxsplit=-1): - if isinstance(old, UserString): - old = old.data - if isinstance(new, UserString): - new = new.data - return self.__class__(self.data.replace(old, new, maxsplit)) - def rfind(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.rfind(sub, start, end) - def rindex(self, sub, start=0, end=_sys.maxsize): - return self.data.rindex(sub, start, end) - def rjust(self, width, *args): - return self.__class__(self.data.rjust(width, *args)) - def rpartition(self, sep): - return self.data.rpartition(sep) - def rstrip(self, chars=None): - return self.__class__(self.data.rstrip(chars)) - def split(self, sep=None, maxsplit=-1): - return self.data.split(sep, maxsplit) - def rsplit(self, sep=None, maxsplit=-1): - return self.data.rsplit(sep, maxsplit) - def splitlines(self, keepends=False): return self.data.splitlines(keepends) - def startswith(self, prefix, start=0, end=_sys.maxsize): - return self.data.startswith(prefix, start, end) - def strip(self, chars=None): return self.__class__(self.data.strip(chars)) - def swapcase(self): return self.__class__(self.data.swapcase()) - def title(self): return self.__class__(self.data.title()) - def translate(self, *args): - return self.__class__(self.data.translate(*args)) - def upper(self): return self.__class__(self.data.upper()) - def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.5.2/collections/__main__.py b/typing_extensions/test_data/python-3.5.2/collections/__main__.py deleted file mode 100644 index 763e38e0..00000000 --- a/typing_extensions/test_data/python-3.5.2/collections/__main__.py +++ /dev/null @@ -1,38 +0,0 @@ -################################################################################ -### Simple tests -################################################################################ - -# verify that instances can be pickled -from collections import namedtuple -from pickle import loads, dumps -Point = namedtuple('Point', 'x, y', True) -p = Point(x=10, y=20) -assert p == loads(dumps(p)) - -# test and demonstrate ability to override methods -class Point(namedtuple('Point', 'x y')): - __slots__ = () - @property - def hypot(self): - return (self.x ** 2 + self.y ** 2) ** 0.5 - def __str__(self): - return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) - -for p in Point(3, 4), Point(14, 5/7.): - print (p) - -class Point(namedtuple('Point', 'x y')): - 'Point class with optimized _make() and _replace() without error-checking' - __slots__ = () - _make = classmethod(tuple.__new__) - def _replace(self, _map=map, **kwds): - return self._make(_map(kwds.get, ('x', 'y'), self)) - -print(Point(11, 22)._replace(x=100)) - -Point3D = namedtuple('Point3D', Point._fields + ('z',)) -print(Point3D.__doc__) - -import doctest, collections -TestResults = namedtuple('TestResults', 'failed attempted') -print(TestResults(*doctest.testmod(collections))) diff --git a/typing_extensions/test_data/python-3.5.2/collections/abc.py b/typing_extensions/test_data/python-3.5.2/collections/abc.py deleted file mode 100644 index 891600d1..00000000 --- a/typing_extensions/test_data/python-3.5.2/collections/abc.py +++ /dev/null @@ -1,2 +0,0 @@ -from _collections_abc import * -from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.5.2/typing.py b/typing_extensions/test_data/python-3.5.2/typing.py deleted file mode 100644 index 4cac66cd..00000000 --- a/typing_extensions/test_data/python-3.5.2/typing.py +++ /dev/null @@ -1,1843 +0,0 @@ -import abc -from abc import abstractmethod, abstractproperty -import collections -import contextlib -import functools -import re as stdlib_re # Avoid confusion with the re we export. -import sys -import types -try: - import collections.abc as collections_abc -except ImportError: - import collections as collections_abc # Fallback for PY3.2. - - -# Please keep __all__ alphabetized within each category. -__all__ = [ - # Super-special typing primitives. - 'Any', - 'Callable', - 'Generic', - 'Optional', - 'Tuple', - 'Type', - 'TypeVar', - 'Union', - - # ABCs (from collections.abc). - 'AbstractSet', # collections.abc.Set. - 'Awaitable', - 'AsyncIterator', - 'AsyncIterable', - 'ByteString', - 'Container', - 'Hashable', - 'ItemsView', - 'Iterable', - 'Iterator', - 'KeysView', - 'Mapping', - 'MappingView', - 'MutableMapping', - 'MutableSequence', - 'MutableSet', - 'Sequence', - 'Sized', - 'ValuesView', - - # Structural checks, a.k.a. protocols. - 'Reversible', - 'SupportsAbs', - 'SupportsFloat', - 'SupportsInt', - 'SupportsRound', - - # Concrete collection types. - 'Dict', - 'DefaultDict', - 'List', - 'Set', - 'NamedTuple', # Not really a type. - 'Generator', - - # One-off things. - 'AnyStr', - 'cast', - 'get_type_hints', - 'NewType', - 'no_type_check', - 'no_type_check_decorator', - 'overload', - 'Text', - 'TYPE_CHECKING', -] - -# The pseudo-submodules 're' and 'io' are part of the public -# namespace, but excluded from __all__ because they might stomp on -# legitimate imports of those modules. - - -def _qualname(x): - if sys.version_info[:2] >= (3, 3): - return x.__qualname__ - else: - # Fall back to just name. - return x.__name__ - - -class TypingMeta(type): - """Metaclass for every type defined below. - - This overrides __new__() to require an extra keyword parameter - '_root', which serves as a guard against naive subclassing of the - typing classes. Any legitimate class defined using a metaclass - derived from TypingMeta (including internal subclasses created by - e.g. Union[X, Y]) must pass _root=True. - - This also defines a dummy constructor (all the work is done in - __new__) and a nicer repr(). - """ - - _is_protocol = False - - def __new__(cls, name, bases, namespace, *, _root=False): - if not _root: - raise TypeError("Cannot subclass %s" % - (', '.join(map(_type_repr, bases)) or '()')) - return super().__new__(cls, name, bases, namespace) - - def __init__(self, *args, **kwds): - pass - - def _eval_type(self, globalns, localns): - """Override this in subclasses to interpret forward references. - - For example, Union['C'] is internally stored as - Union[_ForwardRef('C')], which should evaluate to _Union[C], - where C is an object found in globalns or localns (searching - localns first, of course). - """ - return self - - def _get_type_vars(self, tvars): - pass - - def __repr__(self): - return '%s.%s' % (self.__module__, _qualname(self)) - - -class Final: - """Mix-in class to prevent instantiation.""" - - __slots__ = () - - def __new__(self, *args, **kwds): - raise TypeError("Cannot instantiate %r" % self.__class__) - - -class _ForwardRef(TypingMeta): - """Wrapper to hold a forward reference.""" - - def __new__(cls, arg): - if not isinstance(arg, str): - raise TypeError('ForwardRef must be a string -- got %r' % (arg,)) - try: - code = compile(arg, '', 'eval') - except SyntaxError: - raise SyntaxError('ForwardRef must be an expression -- got %r' % - (arg,)) - self = super().__new__(cls, arg, (), {}, _root=True) - self.__forward_arg__ = arg - self.__forward_code__ = code - self.__forward_evaluated__ = False - self.__forward_value__ = None - typing_globals = globals() - frame = sys._getframe(1) - while frame is not None and frame.f_globals is typing_globals: - frame = frame.f_back - assert frame is not None - self.__forward_frame__ = frame - return self - - def _eval_type(self, globalns, localns): - if not isinstance(localns, dict): - raise TypeError('ForwardRef localns must be a dict -- got %r' % - (localns,)) - if not isinstance(globalns, dict): - raise TypeError('ForwardRef globalns must be a dict -- got %r' % - (globalns,)) - if not self.__forward_evaluated__: - if globalns is None and localns is None: - globalns = localns = {} - elif globalns is None: - globalns = localns - elif localns is None: - localns = globalns - self.__forward_value__ = _type_check( - eval(self.__forward_code__, globalns, localns), - "Forward references must evaluate to types.") - self.__forward_evaluated__ = True - return self.__forward_value__ - - def __instancecheck__(self, obj): - raise TypeError("Forward references cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not self.__forward_evaluated__: - globalns = self.__forward_frame__.f_globals - localns = self.__forward_frame__.f_locals - try: - self._eval_type(globalns, localns) - except NameError: - return False # Too early. - return issubclass(cls, self.__forward_value__) - - def __repr__(self): - return '_ForwardRef(%r)' % (self.__forward_arg__,) - - -class _TypeAlias: - """Internal helper class for defining generic variants of concrete types. - - Note that this is not a type; let's call it a pseudo-type. It can - be used in instance and subclass checks, e.g. isinstance(m, Match) - or issubclass(type(m), Match). However, it cannot be itself the - target of an issubclass() call; e.g. issubclass(Match, C) (for - some arbitrary class C) raises TypeError rather than returning - False. - """ - - __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') - - def __new__(cls, *args, **kwds): - """Constructor. - - This only exists to give a better error message in case - someone tries to subclass a type alias (not a good idea). - """ - if (len(args) == 3 and - isinstance(args[0], str) and - isinstance(args[1], tuple)): - # Close enough. - raise TypeError("A type alias cannot be subclassed") - return object.__new__(cls) - - def __init__(self, name, type_var, impl_type, type_checker): - """Initializer. - - Args: - name: The name, e.g. 'Pattern'. - type_var: The type parameter, e.g. AnyStr, or the - specific type, e.g. str. - impl_type: The implementation type. - type_checker: Function that takes an impl_type instance. - and returns a value that should be a type_var instance. - """ - assert isinstance(name, str), repr(name) - assert isinstance(type_var, type), repr(type_var) - assert isinstance(impl_type, type), repr(impl_type) - assert not isinstance(impl_type, TypingMeta), repr(impl_type) - self.name = name - self.type_var = type_var - self.impl_type = impl_type - self.type_checker = type_checker - - def __repr__(self): - return "%s[%s]" % (self.name, _type_repr(self.type_var)) - - def __getitem__(self, parameter): - assert isinstance(parameter, type), repr(parameter) - if not isinstance(self.type_var, TypeVar): - raise TypeError("%s cannot be further parameterized." % self) - if self.type_var.__constraints__: - if not issubclass(parameter, Union[self.type_var.__constraints__]): - raise TypeError("%s is not a valid substitution for %s." % - (parameter, self.type_var)) - return self.__class__(self.name, parameter, - self.impl_type, self.type_checker) - - def __instancecheck__(self, obj): - raise TypeError("Type aliases cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if isinstance(cls, _TypeAlias): - # Covariance. For now, we compare by name. - return (cls.name == self.name and - issubclass(cls.type_var, self.type_var)) - else: - # Note that this is too lenient, because the - # implementation type doesn't carry information about - # whether it is about bytes or str (for example). - return issubclass(cls, self.impl_type) - - -def _get_type_vars(types, tvars): - for t in types: - if isinstance(t, TypingMeta): - t._get_type_vars(tvars) - - -def _type_vars(types): - tvars = [] - _get_type_vars(types, tvars) - return tuple(tvars) - - -def _eval_type(t, globalns, localns): - if isinstance(t, TypingMeta): - return t._eval_type(globalns, localns) - else: - return t - - -def _type_check(arg, msg): - """Check that the argument is a type, and return it. - - As a special case, accept None and return type(None) instead. - Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. - - The msg argument is a human-readable error message, e.g. - - "Union[arg, ...]: arg should be a type." - - We append the repr() of the actual value (truncated to 100 chars). - """ - if arg is None: - return type(None) - if isinstance(arg, str): - arg = _ForwardRef(arg) - if not isinstance(arg, (type, _TypeAlias)) and not callable(arg): - raise TypeError(msg + " Got %.100r." % (arg,)) - return arg - - -def _type_repr(obj): - """Return the repr() of an object, special-casing types. - - If obj is a type, we return a shorter version than the default - type.__repr__, based on the module and qualified name, which is - typically enough to uniquely identify a type. For everything - else, we fall back on repr(obj). - """ - if isinstance(obj, type) and not isinstance(obj, TypingMeta): - if obj.__module__ == 'builtins': - return _qualname(obj) - else: - return '%s.%s' % (obj.__module__, _qualname(obj)) - else: - return repr(obj) - - -class AnyMeta(TypingMeta): - """Metaclass for Any.""" - - def __new__(cls, name, bases, namespace, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - return self - - def __instancecheck__(self, obj): - raise TypeError("Any cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not isinstance(cls, type): - return super().__subclasscheck__(cls) # To TypeError. - return True - - -class Any(Final, metaclass=AnyMeta, _root=True): - """Special type indicating an unconstrained type. - - - Any object is an instance of Any. - - Any class is a subclass of Any. - - As a special case, Any and object are subclasses of each other. - """ - - __slots__ = () - - -class TypeVar(TypingMeta, metaclass=TypingMeta, _root=True): - """Type variable. - - Usage:: - - T = TypeVar('T') # Can be anything - A = TypeVar('A', str, bytes) # Must be str or bytes - - Type variables exist primarily for the benefit of static type - checkers. They serve as the parameters for generic types as well - as for generic function definitions. See class Generic for more - information on generic types. Generic functions work as follows: - - def repeat(x: T, n: int) -> Sequence[T]: - '''Return a list containing n references to x.''' - return [x]*n - - def longest(x: A, y: A) -> A: - '''Return the longest of two strings.''' - return x if len(x) >= len(y) else y - - The latter example's signature is essentially the overloading - of (str, str) -> str and (bytes, bytes) -> bytes. Also note - that if the arguments are instances of some subclass of str, - the return type is still plain str. - - At runtime, isinstance(x, T) will raise TypeError. However, - issubclass(C, T) is true for any class C, and issubclass(str, A) - and issubclass(bytes, A) are true, and issubclass(int, A) is - false. (TODO: Why is this needed? This may change. See #136.) - - Type variables may be marked covariant or contravariant by passing - covariant=True or contravariant=True. See PEP 484 for more - details. By default type variables are invariant. - - Type variables can be introspected. e.g.: - - T.__name__ == 'T' - T.__constraints__ == () - T.__covariant__ == False - T.__contravariant__ = False - A.__constraints__ == (str, bytes) - """ - - def __new__(cls, name, *constraints, bound=None, - covariant=False, contravariant=False): - self = super().__new__(cls, name, (Final,), {}, _root=True) - if covariant and contravariant: - raise ValueError("Bivariant type variables are not supported.") - self.__covariant__ = bool(covariant) - self.__contravariant__ = bool(contravariant) - if constraints and bound is not None: - raise TypeError("Constraints cannot be combined with bound=...") - if constraints and len(constraints) == 1: - raise TypeError("A single constraint is not allowed") - msg = "TypeVar(name, constraint, ...): constraints must be types." - self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) - if bound: - self.__bound__ = _type_check(bound, "Bound must be a type.") - else: - self.__bound__ = None - return self - - def _get_type_vars(self, tvars): - if self not in tvars: - tvars.append(self) - - def __repr__(self): - if self.__covariant__: - prefix = '+' - elif self.__contravariant__: - prefix = '-' - else: - prefix = '~' - return prefix + self.__name__ - - def __instancecheck__(self, instance): - raise TypeError("Type variables cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - # TODO: Make this raise TypeError too? - if cls is self: - return True - if cls is Any: - return True - if self.__bound__ is not None: - return issubclass(cls, self.__bound__) - if self.__constraints__: - return any(issubclass(cls, c) for c in self.__constraints__) - return True - - -# Some unconstrained type variables. These are used by the container types. -# (These are not for export.) -T = TypeVar('T') # Any type. -KT = TypeVar('KT') # Key type. -VT = TypeVar('VT') # Value type. -T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. -V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. -VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. -T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. - -# A useful type variable with constraints. This represents string types. -# (This one *is* for export!) -AnyStr = TypeVar('AnyStr', bytes, str) - - -class UnionMeta(TypingMeta): - """Metaclass for Union.""" - - def __new__(cls, name, bases, namespace, parameters=None, _root=False): - if parameters is None: - return super().__new__(cls, name, bases, namespace, _root=_root) - if not isinstance(parameters, tuple): - raise TypeError("Expected parameters=") - # Flatten out Union[Union[...], ...] and type-check non-Union args. - params = [] - msg = "Union[arg, ...]: each arg must be a type." - for p in parameters: - if isinstance(p, UnionMeta): - params.extend(p.__union_params__) - else: - params.append(_type_check(p, msg)) - # Weed out strict duplicates, preserving the first of each occurrence. - all_params = set(params) - if len(all_params) < len(params): - new_params = [] - for t in params: - if t in all_params: - new_params.append(t) - all_params.remove(t) - params = new_params - assert not all_params, all_params - # Weed out subclasses. - # E.g. Union[int, Employee, Manager] == Union[int, Employee]. - # If Any or object is present it will be the sole survivor. - # If both Any and object are present, Any wins. - # Never discard type variables, except against Any. - # (In particular, Union[str, AnyStr] != AnyStr.) - all_params = set(params) - for t1 in params: - if t1 is Any: - return Any - if isinstance(t1, TypeVar): - continue - if isinstance(t1, _TypeAlias): - # _TypeAlias is not a real class. - continue - if not isinstance(t1, type): - assert callable(t1) # A callable might sneak through. - continue - if any(isinstance(t2, type) and issubclass(t1, t2) - for t2 in all_params - {t1} if not isinstance(t2, TypeVar)): - all_params.remove(t1) - # It's not a union if there's only one type left. - if len(all_params) == 1: - return all_params.pop() - # Create a new class with these params. - self = super().__new__(cls, name, bases, {}, _root=True) - self.__union_params__ = tuple(t for t in params if t in all_params) - self.__union_set_params__ = frozenset(self.__union_params__) - return self - - def _eval_type(self, globalns, localns): - p = tuple(_eval_type(t, globalns, localns) - for t in self.__union_params__) - if p == self.__union_params__: - return self - else: - return self.__class__(self.__name__, self.__bases__, {}, - p, _root=True) - - def _get_type_vars(self, tvars): - if self.__union_params__: - _get_type_vars(self.__union_params__, tvars) - - def __repr__(self): - r = super().__repr__() - if self.__union_params__: - r += '[%s]' % (', '.join(_type_repr(t) - for t in self.__union_params__)) - return r - - def __getitem__(self, parameters): - if self.__union_params__ is not None: - raise TypeError( - "Cannot subscript an existing Union. Use Union[u, t] instead.") - if parameters == (): - raise TypeError("Cannot take a Union of no types.") - if not isinstance(parameters, tuple): - parameters = (parameters,) - return self.__class__(self.__name__, self.__bases__, - dict(self.__dict__), parameters, _root=True) - - def __eq__(self, other): - if not isinstance(other, UnionMeta): - return NotImplemented - return self.__union_set_params__ == other.__union_set_params__ - - def __hash__(self): - return hash(self.__union_set_params__) - - def __instancecheck__(self, obj): - raise TypeError("Unions cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if self.__union_params__ is None: - return isinstance(cls, UnionMeta) - elif isinstance(cls, UnionMeta): - if cls.__union_params__ is None: - return False - return all(issubclass(c, self) for c in (cls.__union_params__)) - elif isinstance(cls, TypeVar): - if cls in self.__union_params__: - return True - if cls.__constraints__: - return issubclass(Union[cls.__constraints__], self) - return False - else: - return any(issubclass(cls, t) for t in self.__union_params__) - - -class Union(Final, metaclass=UnionMeta, _root=True): - """Union type; Union[X, Y] means either X or Y. - - To define a union, use e.g. Union[int, str]. Details: - - - The arguments must be types and there must be at least one. - - - None as an argument is a special case and is replaced by - type(None). - - - Unions of unions are flattened, e.g.:: - - Union[Union[int, str], float] == Union[int, str, float] - - - Unions of a single argument vanish, e.g.:: - - Union[int] == int # The constructor actually returns int - - - Redundant arguments are skipped, e.g.:: - - Union[int, str, int] == Union[int, str] - - - When comparing unions, the argument order is ignored, e.g.:: - - Union[int, str] == Union[str, int] - - - When two arguments have a subclass relationship, the least - derived argument is kept, e.g.:: - - class Employee: pass - class Manager(Employee): pass - Union[int, Employee, Manager] == Union[int, Employee] - Union[Manager, int, Employee] == Union[int, Employee] - Union[Employee, Manager] == Employee - - - Corollary: if Any is present it is the sole survivor, e.g.:: - - Union[int, Any] == Any - - - Similar for object:: - - Union[int, object] == object - - - To cut a tie: Union[object, Any] == Union[Any, object] == Any. - - - You cannot subclass or instantiate a union. - - - You cannot write Union[X][Y] (what would it mean?). - - - You can use Optional[X] as a shorthand for Union[X, None]. - """ - - # Unsubscripted Union type has params set to None. - __union_params__ = None - __union_set_params__ = None - - -class OptionalMeta(TypingMeta): - """Metaclass for Optional.""" - - def __new__(cls, name, bases, namespace, _root=False): - return super().__new__(cls, name, bases, namespace, _root=_root) - - def __getitem__(self, arg): - arg = _type_check(arg, "Optional[t] requires a single type.") - return Union[arg, type(None)] - - -class Optional(Final, metaclass=OptionalMeta, _root=True): - """Optional type. - - Optional[X] is equivalent to Union[X, type(None)]. - """ - - __slots__ = () - - -class TupleMeta(TypingMeta): - """Metaclass for Tuple.""" - - def __new__(cls, name, bases, namespace, parameters=None, - use_ellipsis=False, _root=False): - self = super().__new__(cls, name, bases, namespace, _root=_root) - self.__tuple_params__ = parameters - self.__tuple_use_ellipsis__ = use_ellipsis - return self - - def _get_type_vars(self, tvars): - if self.__tuple_params__: - _get_type_vars(self.__tuple_params__, tvars) - - def _eval_type(self, globalns, localns): - tp = self.__tuple_params__ - if tp is None: - return self - p = tuple(_eval_type(t, globalns, localns) for t in tp) - if p == self.__tuple_params__: - return self - else: - return self.__class__(self.__name__, self.__bases__, {}, - p, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__tuple_params__ is not None: - params = [_type_repr(p) for p in self.__tuple_params__] - if self.__tuple_use_ellipsis__: - params.append('...') - if not params: - params.append('()') - r += '[%s]' % ( - ', '.join(params)) - return r - - def __getitem__(self, parameters): - if self.__tuple_params__ is not None: - raise TypeError("Cannot re-parameterize %r" % (self,)) - if not isinstance(parameters, tuple): - parameters = (parameters,) - if len(parameters) == 2 and parameters[1] == Ellipsis: - parameters = parameters[:1] - use_ellipsis = True - msg = "Tuple[t, ...]: t must be a type." - else: - use_ellipsis = False - msg = "Tuple[t0, t1, ...]: each t must be a type." - parameters = tuple(_type_check(p, msg) for p in parameters) - return self.__class__(self.__name__, self.__bases__, - dict(self.__dict__), parameters, - use_ellipsis=use_ellipsis, _root=True) - - def __eq__(self, other): - if not isinstance(other, TupleMeta): - return NotImplemented - return (self.__tuple_params__ == other.__tuple_params__ and - self.__tuple_use_ellipsis__ == other.__tuple_use_ellipsis__) - - def __hash__(self): - return hash(self.__tuple_params__) - - def __instancecheck__(self, obj): - raise TypeError("Tuples cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if not isinstance(cls, type): - return super().__subclasscheck__(cls) # To TypeError. - if issubclass(cls, tuple): - return True # Special case. - if not isinstance(cls, TupleMeta): - return super().__subclasscheck__(cls) # False. - if self.__tuple_params__ is None: - return True - if cls.__tuple_params__ is None: - return False # ??? - if cls.__tuple_use_ellipsis__ != self.__tuple_use_ellipsis__: - return False - # Covariance. - return (len(self.__tuple_params__) == len(cls.__tuple_params__) and - all(issubclass(x, p) - for x, p in zip(cls.__tuple_params__, - self.__tuple_params__))) - - -class Tuple(Final, metaclass=TupleMeta, _root=True): - """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. - - Example: Tuple[T1, T2] is a tuple of two elements corresponding - to type variables T1 and T2. Tuple[int, float, str] is a tuple - of an int, a float and a string. - - To specify a variable-length tuple of homogeneous type, use Sequence[T]. - """ - - __slots__ = () - - -class CallableMeta(TypingMeta): - """Metaclass for Callable.""" - - def __new__(cls, name, bases, namespace, _root=False, - args=None, result=None): - if args is None and result is None: - pass # Must be 'class Callable'. - else: - if args is not Ellipsis: - if not isinstance(args, list): - raise TypeError("Callable[args, result]: " - "args must be a list." - " Got %.100r." % (args,)) - msg = "Callable[[arg, ...], result]: each arg must be a type." - args = tuple(_type_check(arg, msg) for arg in args) - msg = "Callable[args, result]: result must be a type." - result = _type_check(result, msg) - self = super().__new__(cls, name, bases, namespace, _root=_root) - self.__args__ = args - self.__result__ = result - return self - - def _get_type_vars(self, tvars): - if self.__args__: - _get_type_vars(self.__args__, tvars) - - def _eval_type(self, globalns, localns): - if self.__args__ is None and self.__result__ is None: - return self - if self.__args__ is Ellipsis: - args = self.__args__ - else: - args = [_eval_type(t, globalns, localns) for t in self.__args__] - result = _eval_type(self.__result__, globalns, localns) - if args == self.__args__ and result == self.__result__: - return self - else: - return self.__class__(self.__name__, self.__bases__, {}, - args=args, result=result, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__args__ is not None or self.__result__ is not None: - if self.__args__ is Ellipsis: - args_r = '...' - else: - args_r = '[%s]' % ', '.join(_type_repr(t) - for t in self.__args__) - r += '[%s, %s]' % (args_r, _type_repr(self.__result__)) - return r - - def __getitem__(self, parameters): - if self.__args__ is not None or self.__result__ is not None: - raise TypeError("This Callable type is already parameterized.") - if not isinstance(parameters, tuple) or len(parameters) != 2: - raise TypeError( - "Callable must be used as Callable[[arg, ...], result].") - args, result = parameters - return self.__class__(self.__name__, self.__bases__, - dict(self.__dict__), _root=True, - args=args, result=result) - - def __eq__(self, other): - if not isinstance(other, CallableMeta): - return NotImplemented - return (self.__args__ == other.__args__ and - self.__result__ == other.__result__) - - def __hash__(self): - return hash(self.__args__) ^ hash(self.__result__) - - def __instancecheck__(self, obj): - # For unparametrized Callable we allow this, because - # typing.Callable should be equivalent to - # collections.abc.Callable. - if self.__args__ is None and self.__result__ is None: - return isinstance(obj, collections_abc.Callable) - else: - raise TypeError("Callable[] cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if not isinstance(cls, CallableMeta): - return super().__subclasscheck__(cls) - if self.__args__ is None and self.__result__ is None: - return True - # We're not doing covariance or contravariance -- this is *invariance*. - return self == cls - - -class Callable(Final, metaclass=CallableMeta, _root=True): - """Callable type; Callable[[int], str] is a function of (int) -> str. - - The subscription syntax must always be used with exactly two - values: the argument list and the return type. The argument list - must be a list of types; the return type must be a single type. - - There is no syntax to indicate optional or keyword arguments, - such function types are rarely used as callback types. - """ - - __slots__ = () - - -def _gorg(a): - """Return the farthest origin of a generic class.""" - assert isinstance(a, GenericMeta) - while a.__origin__ is not None: - a = a.__origin__ - return a - - -def _geqv(a, b): - """Return whether two generic classes are equivalent. - - The intention is to consider generic class X and any of its - parameterized forms (X[T], X[int], etc.) as equivalent. - - However, X is not equivalent to a subclass of X. - - The relation is reflexive, symmetric and transitive. - """ - assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) - # Reduce each to its origin. - return _gorg(a) is _gorg(b) - - -def _next_in_mro(cls): - """Helper for Generic.__new__. - - Returns the class after the last occurrence of Generic or - Generic[...] in cls.__mro__. - """ - next_in_mro = object - # Look for the last occurrence of Generic or Generic[...]. - for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and _gorg(c) is Generic: - next_in_mro = cls.__mro__[i+1] - return next_in_mro - - -class GenericMeta(TypingMeta, abc.ABCMeta): - """Metaclass for generic types.""" - - def __new__(cls, name, bases, namespace, - tvars=None, args=None, origin=None, extra=None): - self = super().__new__(cls, name, bases, namespace, _root=True) - - if tvars is not None: - # Called from __getitem__() below. - assert origin is not None - assert all(isinstance(t, TypeVar) for t in tvars), tvars - else: - # Called from class statement. - assert tvars is None, tvars - assert args is None, args - assert origin is None, origin - - # Get the full set of tvars from the bases. - tvars = _type_vars(bases) - # Look for Generic[T1, ..., Tn]. - # If found, tvars must be a subset of it. - # If not found, tvars is it. - # Also check for and reject plain Generic, - # and reject multiple Generic[...]. - gvars = None - for base in bases: - if base is Generic: - raise TypeError("Cannot inherit from plain Generic") - if (isinstance(base, GenericMeta) and - base.__origin__ is Generic): - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...] multiple types.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - raise TypeError( - "Some type variables (%s) " - "are not listed in Generic[%s]" % - (", ".join(str(t) for t in tvars if t not in gvarset), - ", ".join(str(g) for g in gvars))) - tvars = gvars - - self.__parameters__ = tvars - self.__args__ = args - self.__origin__ = origin - self.__extra__ = extra - # Speed hack (https://github.com/python/typing/issues/196). - self.__next_in_mro__ = _next_in_mro(self) - return self - - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - _get_type_vars(self.__parameters__, tvars) - - def __repr__(self): - if self.__origin__ is not None: - r = repr(self.__origin__) - else: - r = super().__repr__() - if self.__args__: - r += '[%s]' % ( - ', '.join(_type_repr(p) for p in self.__args__)) - if self.__parameters__: - r += '<%s>' % ( - ', '.join(_type_repr(p) for p in self.__parameters__)) - return r - - def __eq__(self, other): - if not isinstance(other, GenericMeta): - return NotImplemented - if self.__origin__ is not None: - return (self.__origin__ is other.__origin__ and - self.__args__ == other.__args__ and - self.__parameters__ == other.__parameters__) - else: - return self is other - - def __hash__(self): - return hash((self.__name__, self.__parameters__)) - - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if not params: - raise TypeError( - "Parameter list to %s[...] cannot be empty" % _qualname(self)) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self is Generic: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, TypeVar) for p in params): - raise TypeError( - "Parameters to Generic[...] must all be type variables") - if len(set(params)) != len(params): - raise TypeError( - "Parameters to Generic[...] must all be unique") - tvars = params - args = None - elif self is _Protocol: - # _Protocol is internal, don't check anything. - tvars = params - args = None - elif self.__origin__ in (Generic, _Protocol): - # Can't subscript Generic[...] or _Protocol[...]. - raise TypeError("Cannot subscript already-subscripted %s" % - repr(self)) - else: - # Subscripting a regular Generic subclass. - if not self.__parameters__: - raise TypeError("%s is not a generic class" % repr(self)) - alen = len(params) - elen = len(self.__parameters__) - if alen != elen: - raise TypeError( - "Too %s parameters for %s; actual %s, expected %s" % - ("many" if alen > elen else "few", repr(self), alen, elen)) - tvars = _type_vars(params) - args = params - return self.__class__(self.__name__, - (self,) + self.__bases__, - dict(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__) - - def __instancecheck__(self, instance): - # Since we extend ABC.__subclasscheck__ and - # ABC.__instancecheck__ inlines the cache checking done by the - # latter, we must extend __instancecheck__ too. For simplicity - # we just skip the cache check -- instance checks for generic - # classes are supposed to be rare anyways. - return self.__subclasscheck__(instance.__class__) - - def __subclasscheck__(self, cls): - if cls is Any: - return True - if isinstance(cls, GenericMeta): - # For a class C(Generic[T]) where T is co-variant, - # C[X] is a subclass of C[Y] iff X is a subclass of Y. - origin = self.__origin__ - if origin is not None and origin is cls.__origin__: - assert len(self.__args__) == len(origin.__parameters__) - assert len(cls.__args__) == len(origin.__parameters__) - for p_self, p_cls, p_origin in zip(self.__args__, - cls.__args__, - origin.__parameters__): - if isinstance(p_origin, TypeVar): - if p_origin.__covariant__: - # Covariant -- p_cls must be a subclass of p_self. - if not issubclass(p_cls, p_self): - break - elif p_origin.__contravariant__: - # Contravariant. I think it's the opposite. :-) - if not issubclass(p_self, p_cls): - break - else: - # Invariant -- p_cls and p_self must equal. - if p_self != p_cls: - break - else: - # If the origin's parameter is not a typevar, - # insist on invariance. - if p_self != p_cls: - break - else: - return True - # If we break out of the loop, the superclass gets a chance. - if super().__subclasscheck__(cls): - return True - if self.__extra__ is None or isinstance(cls, GenericMeta): - return False - return issubclass(cls, self.__extra__) - - -# Prevent checks for Generic to crash when defining Generic. -Generic = None - - -class Generic(metaclass=GenericMeta): - """Abstract base class for generic types. - - A generic type is typically declared by inheriting from an - instantiation of this class with one or more type variables. - For example, a generic mapping type might be defined as:: - - class Mapping(Generic[KT, VT]): - def __getitem__(self, key: KT) -> VT: - ... - # Etc. - - This class can then be used as follows:: - - def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: - try: - return mapping[key] - except KeyError: - return default - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls.__origin__ is None: - return cls.__next_in_mro__.__new__(cls) - else: - origin = _gorg(cls) - obj = cls.__next_in_mro__.__new__(origin) - obj.__init__(*args, **kwds) - return obj - - -def cast(typ, val): - """Cast a value to a type. - - This returns the value unchanged. To the type checker this - signals that the return value has the designated type, but at - runtime we intentionally don't check anything (we want this - to be as fast as possible). - """ - return val - - -def _get_defaults(func): - """Internal helper to extract the default arguments, by name.""" - code = func.__code__ - pos_count = code.co_argcount - arg_names = code.co_varnames - arg_names = arg_names[:pos_count] - defaults = func.__defaults__ or () - kwdefaults = func.__kwdefaults__ - res = dict(kwdefaults) if kwdefaults else {} - pos_offset = pos_count - len(defaults) - for name, value in zip(arg_names[pos_offset:], defaults): - assert name not in res - res[name] = value - return res - - -def get_type_hints(obj, globalns=None, localns=None): - """Return type hints for a function or method object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, and if necessary - adds Optional[t] if a default value equal to None is set. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj, and these are also used as the locals. If the - object does not appear to have globals, an exception is raised. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - if getattr(obj, '__no_type_check__', None): - return {} - if globalns is None: - globalns = getattr(obj, '__globals__', {}) - if localns is None: - localns = globalns - elif localns is None: - localns = globalns - defaults = _get_defaults(obj) - hints = dict(obj.__annotations__) - for name, value in hints.items(): - if isinstance(value, str): - value = _ForwardRef(value) - value = _eval_type(value, globalns, localns) - if name in defaults and defaults[name] is None: - value = Optional[value] - hints[name] = value - return hints - - -def no_type_check(arg): - """Decorator to indicate that annotations are not type hints. - - The argument must be a class or function; if it is a class, it - applies recursively to all methods defined in that class (but not - to methods defined in its superclasses or subclasses). - - This mutates the function(s) in place. - """ - if isinstance(arg, type): - for obj in arg.__dict__.values(): - if isinstance(obj, types.FunctionType): - obj.__no_type_check__ = True - else: - arg.__no_type_check__ = True - return arg - - -def no_type_check_decorator(decorator): - """Decorator to give another decorator the @no_type_check effect. - - This wraps the decorator with something that wraps the decorated - function in @no_type_check. - """ - - @functools.wraps(decorator) - def wrapped_decorator(*args, **kwds): - func = decorator(*args, **kwds) - func = no_type_check(func) - return func - - return wrapped_decorator - - -def _overload_dummy(*args, **kwds): - """Helper for @overload to raise when called.""" - raise NotImplementedError( - "You should not call an overloaded function. " - "A series of @overload-decorated functions " - "outside a stub module should always be followed " - "by an implementation that is not @overload-ed.") - - -def overload(func): - """Decorator for overloaded functions/methods. - - In a stub file, place two or more stub definitions for the same - function in a row, each decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - - In a non-stub file (i.e. a regular .py file), do the same but - follow it with an implementation. The implementation should *not* - be decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - # implementation goes here - """ - return _overload_dummy - - -class _ProtocolMeta(GenericMeta): - """Internal metaclass for _Protocol. - - This exists so _Protocol classes can be generic without deriving - from Generic. - """ - - def __instancecheck__(self, obj): - raise TypeError("Protocols cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not self._is_protocol: - # No structural checks since this isn't a protocol. - return NotImplemented - - if self is _Protocol: - # Every class is a subclass of the empty protocol. - return True - - # Find all attributes defined in the protocol. - attrs = self._get_protocol_attrs() - - for attr in attrs: - if not any(attr in d.__dict__ for d in cls.__mro__): - return False - return True - - def _get_protocol_attrs(self): - # Get all Protocol base classes. - protocol_bases = [] - for c in self.__mro__: - if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': - protocol_bases.append(c) - - # Get attributes included in protocol. - attrs = set() - for base in protocol_bases: - for attr in base.__dict__.keys(): - # Include attributes not defined in any non-protocol bases. - for c in self.__mro__: - if (c is not base and attr in c.__dict__ and - not getattr(c, '_is_protocol', False)): - break - else: - if (not attr.startswith('_abc_') and - attr != '__abstractmethods__' and - attr != '_is_protocol' and - attr != '__dict__' and - attr != '__args__' and - attr != '__slots__' and - attr != '_get_protocol_attrs' and - attr != '__next_in_mro__' and - attr != '__parameters__' and - attr != '__origin__' and - attr != '__extra__' and - attr != '__module__'): - attrs.add(attr) - - return attrs - - -class _Protocol(metaclass=_ProtocolMeta): - """Internal base class for protocol classes. - - This implements a simple-minded structural isinstance check - (similar but more general than the one-offs in collections.abc - such as Hashable). - """ - - __slots__ = () - - _is_protocol = True - - -# Various ABCs mimicking those in collections.abc. -# A few are simply re-exported for completeness. - -Hashable = collections_abc.Hashable # Not generic. - - -if hasattr(collections_abc, 'Awaitable'): - class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): - __slots__ = () -else: - Awaitable = None - - -if hasattr(collections_abc, 'AsyncIterable'): - - class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): - __slots__ = () - - class AsyncIterator(AsyncIterable[T_co], - extra=collections_abc.AsyncIterator): - __slots__ = () - -else: - AsyncIterable = None - AsyncIterator = None - - -class Iterable(Generic[T_co], extra=collections_abc.Iterable): - __slots__ = () - - -class Iterator(Iterable[T_co], extra=collections_abc.Iterator): - __slots__ = () - - -class SupportsInt(_Protocol): - __slots__ = () - - @abstractmethod - def __int__(self) -> int: - pass - - -class SupportsFloat(_Protocol): - __slots__ = () - - @abstractmethod - def __float__(self) -> float: - pass - - -class SupportsComplex(_Protocol): - __slots__ = () - - @abstractmethod - def __complex__(self) -> complex: - pass - - -class SupportsBytes(_Protocol): - __slots__ = () - - @abstractmethod - def __bytes__(self) -> bytes: - pass - - -class SupportsAbs(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __abs__(self) -> T_co: - pass - - -class SupportsRound(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __round__(self, ndigits: int = 0) -> T_co: - pass - - -if hasattr(collections_abc, 'Reversible'): - class Reversible(Iterable[T_co], extra=collections_abc.Reversible): - __slots__ = () -else: - class Reversible(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __reversed__(self) -> 'Iterator[T_co]': - pass - - -Sized = collections_abc.Sized # Not generic. - - -class Container(Generic[T_co], extra=collections_abc.Container): - __slots__ = () - - -# Callable was defined earlier. - - -class AbstractSet(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Set): - pass - - -class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): - pass - - -# NOTE: Only the value type is covariant. -class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], - extra=collections_abc.Mapping): - pass - - -class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): - pass - -if hasattr(collections_abc, 'Reversible'): - class Sequence(Sized, Reversible[T_co], Container[T_co], - extra=collections_abc.Sequence): - pass -else: - class Sequence(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Sequence): - pass - - -class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): - pass - - -class ByteString(Sequence[int], extra=collections_abc.ByteString): - pass - - -ByteString.register(type(memoryview(b''))) - - -class List(list, MutableSequence[T], extra=list): - - def __new__(cls, *args, **kwds): - if _geqv(cls, List): - raise TypeError("Type List cannot be instantiated; " - "use list() instead") - return list.__new__(cls, *args, **kwds) - - -class Set(set, MutableSet[T], extra=set): - - def __new__(cls, *args, **kwds): - if _geqv(cls, Set): - raise TypeError("Type Set cannot be instantiated; " - "use set() instead") - return set.__new__(cls, *args, **kwds) - - -class _FrozenSetMeta(GenericMeta): - """This metaclass ensures set is not a subclass of FrozenSet. - - Without this metaclass, set would be considered a subclass of - FrozenSet, because FrozenSet.__extra__ is collections.abc.Set, and - set is a subclass of that. - """ - - def __subclasscheck__(self, cls): - if issubclass(cls, Set): - return False - return super().__subclasscheck__(cls) - - -class FrozenSet(frozenset, AbstractSet[T_co], metaclass=_FrozenSetMeta, - extra=frozenset): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, FrozenSet): - raise TypeError("Type FrozenSet cannot be instantiated; " - "use frozenset() instead") - return frozenset.__new__(cls, *args, **kwds) - - -class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): - pass - - -class KeysView(MappingView[KT], AbstractSet[KT], - extra=collections_abc.KeysView): - pass - - -class ItemsView(MappingView[Tuple[KT, VT_co]], - AbstractSet[Tuple[KT, VT_co]], - Generic[KT, VT_co], - extra=collections_abc.ItemsView): - pass - - -class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): - pass - - -if hasattr(contextlib, 'AbstractContextManager'): - class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): - __slots__ = () - __all__.append('ContextManager') - - -class Dict(dict, MutableMapping[KT, VT], extra=dict): - - def __new__(cls, *args, **kwds): - if _geqv(cls, Dict): - raise TypeError("Type Dict cannot be instantiated; " - "use dict() instead") - return dict.__new__(cls, *args, **kwds) - -class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], - extra=collections.defaultdict): - - def __new__(cls, *args, **kwds): - if _geqv(cls, DefaultDict): - raise TypeError("Type DefaultDict cannot be instantiated; " - "use collections.defaultdict() instead") - return collections.defaultdict.__new__(cls, *args, **kwds) - -# Determine what base class to use for Generator. -if hasattr(collections_abc, 'Generator'): - # Sufficiently recent versions of 3.5 have a Generator ABC. - _G_base = collections_abc.Generator -else: - # Fall back on the exact type. - _G_base = types.GeneratorType - - -class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], - extra=_G_base): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Generator): - raise TypeError("Type Generator cannot be instantiated; " - "create a subclass instead") - return super().__new__(cls, *args, **kwds) - - -# Internal type variable used for Type[]. -CT = TypeVar('CT', covariant=True, bound=type) - - -# This is not a real generic class. Don't use outside annotations. -class Type(type, Generic[CT], extra=type): - """A special construct usable to annotate class objects. - - For example, suppose we have the following classes:: - - class User: ... # Abstract base for User classes - class BasicUser(User): ... - class ProUser(User): ... - class TeamUser(User): ... - - And a function that takes a class argument that's a subclass of - User and returns an instance of the corresponding class:: - - U = TypeVar('U', bound=User) - def new_user(user_class: Type[U]) -> U: - user = user_class() - # (Here we could write the user object to a database) - return user - - joe = new_user(BasicUser) - - At this point the type checker knows that joe has type BasicUser. - """ - - -def NamedTuple(typename, fields): - """Typed version of namedtuple. - - Usage:: - - Employee = typing.NamedTuple('Employee', [('name', str), 'id', int)]) - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has one extra attribute: _field_types, - giving a dict mapping field names to types. (The field names - are in the _fields attribute, which is part of the namedtuple - API.) - """ - fields = [(n, t) for n, t in fields] - cls = collections.namedtuple(typename, [n for n, t in fields]) - cls._field_types = dict(fields) - # Set the module to the caller's module (otherwise it'd be 'typing'). - try: - cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - return cls - - -def NewType(name, tp): - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy function that simply returns its argument. Usage:: - - UserId = NewType('UserId', int) - - def name_by_id(user_id: UserId) -> str: - ... - - UserId('user') # Fails type check - - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - - num = UserId(5) + 1 # type: int - """ - - def new_type(x): - return x - - new_type.__name__ = name - new_type.__supertype__ = tp - return new_type - - -# Python-version-specific alias (Python 2: unicode; Python 3: str) -Text = str - - -# Constant that's True when type checking, but False here. -TYPE_CHECKING = False - - -class IO(Generic[AnyStr]): - """Generic base class for TextIO and BinaryIO. - - This is an abstract, generic version of the return of open(). - - NOTE: This does not distinguish between the different possible - classes (text vs. binary, read vs. write vs. read/write, - append-only, unbuffered). The TextIO and BinaryIO subclasses - below capture the distinctions between text vs. binary, which is - pervasive in the interface; however we currently do not offer a - way to track the other distinctions in the type system. - """ - - __slots__ = () - - @abstractproperty - def mode(self) -> str: - pass - - @abstractproperty - def name(self) -> str: - pass - - @abstractmethod - def close(self) -> None: - pass - - @abstractmethod - def closed(self) -> bool: - pass - - @abstractmethod - def fileno(self) -> int: - pass - - @abstractmethod - def flush(self) -> None: - pass - - @abstractmethod - def isatty(self) -> bool: - pass - - @abstractmethod - def read(self, n: int = -1) -> AnyStr: - pass - - @abstractmethod - def readable(self) -> bool: - pass - - @abstractmethod - def readline(self, limit: int = -1) -> AnyStr: - pass - - @abstractmethod - def readlines(self, hint: int = -1) -> List[AnyStr]: - pass - - @abstractmethod - def seek(self, offset: int, whence: int = 0) -> int: - pass - - @abstractmethod - def seekable(self) -> bool: - pass - - @abstractmethod - def tell(self) -> int: - pass - - @abstractmethod - def truncate(self, size: int = None) -> int: - pass - - @abstractmethod - def writable(self) -> bool: - pass - - @abstractmethod - def write(self, s: AnyStr) -> int: - pass - - @abstractmethod - def writelines(self, lines: List[AnyStr]) -> None: - pass - - @abstractmethod - def __enter__(self) -> 'IO[AnyStr]': - pass - - @abstractmethod - def __exit__(self, type, value, traceback) -> None: - pass - - -class BinaryIO(IO[bytes]): - """Typed version of the return of open() in binary mode.""" - - __slots__ = () - - @abstractmethod - def write(self, s: Union[bytes, bytearray]) -> int: - pass - - @abstractmethod - def __enter__(self) -> 'BinaryIO': - pass - - -class TextIO(IO[str]): - """Typed version of the return of open() in text mode.""" - - __slots__ = () - - @abstractproperty - def buffer(self) -> BinaryIO: - pass - - @abstractproperty - def encoding(self) -> str: - pass - - @abstractproperty - def errors(self) -> str: - pass - - @abstractproperty - def line_buffering(self) -> bool: - pass - - @abstractproperty - def newlines(self) -> Any: - pass - - @abstractmethod - def __enter__(self) -> 'TextIO': - pass - - -class io: - """Wrapper namespace for IO generic classes.""" - - __all__ = ['IO', 'TextIO', 'BinaryIO'] - IO = IO - TextIO = TextIO - BinaryIO = BinaryIO - -io.__name__ = __name__ + '.io' -sys.modules[io.__name__] = io - - -Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), - lambda p: p.pattern) -Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), - lambda m: m.re.pattern) - - -class re: - """Wrapper namespace for re type aliases.""" - - __all__ = ['Pattern', 'Match'] - Pattern = Pattern - Match = Match - -re.__name__ = __name__ + '.re' -sys.modules[re.__name__] = re diff --git a/typing_extensions/test_data/python-3.5.3/_collections_abc.py b/typing_extensions/test_data/python-3.5.3/_collections_abc.py deleted file mode 100644 index 8bebd69b..00000000 --- a/typing_extensions/test_data/python-3.5.3/_collections_abc.py +++ /dev/null @@ -1,941 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. - -Unit tests are in test_collections. -""" - -from abc import ABCMeta, abstractmethod -import sys - -__all__ = ["Awaitable", "Coroutine", "AsyncIterable", "AsyncIterator", - "Hashable", "Iterable", "Iterator", "Generator", - "Sized", "Container", "Callable", - "Set", "MutableSet", - "Mapping", "MutableMapping", - "MappingView", "KeysView", "ItemsView", "ValuesView", - "Sequence", "MutableSequence", - "ByteString", - ] - -# This module has been renamed from collections.abc to _collections_abc to -# speed up interpreter startup. Some of the types such as MutableMapping are -# required early but collections module imports a lot of other modules. -# See issue #19218 -__name__ = "collections.abc" - -# Private list of types that we want to register with the various ABCs -# so that they will pass tests like: -# it = iter(somebytearray) -# assert isinstance(it, Iterable) -# Note: in other implementations, these types might not be distinct -# and they may have their own implementation specific types that -# are not included on this list. -bytes_iterator = type(iter(b'')) -bytearray_iterator = type(iter(bytearray())) -#callable_iterator = ??? -dict_keyiterator = type(iter({}.keys())) -dict_valueiterator = type(iter({}.values())) -dict_itemiterator = type(iter({}.items())) -list_iterator = type(iter([])) -list_reverseiterator = type(iter(reversed([]))) -range_iterator = type(iter(range(0))) -longrange_iterator = type(iter(range(1 << 1000))) -set_iterator = type(iter(set())) -str_iterator = type(iter("")) -tuple_iterator = type(iter(())) -zip_iterator = type(iter(zip())) -## views ## -dict_keys = type({}.keys()) -dict_values = type({}.values()) -dict_items = type({}.items()) -## misc ## -mappingproxy = type(type.__dict__) -generator = type((lambda: (yield))()) -## coroutine ## -async def _coro(): pass -_coro = _coro() -coroutine = type(_coro) -_coro.close() # Prevent ResourceWarning -del _coro - - -### ONE-TRICK PONIES ### - -class Hashable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __hash__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Hashable: - for B in C.__mro__: - if "__hash__" in B.__dict__: - if B.__dict__["__hash__"]: - return True - break - return NotImplemented - - -class Awaitable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __await__(self): - yield - - @classmethod - def __subclasshook__(cls, C): - if cls is Awaitable: - for B in C.__mro__: - if "__await__" in B.__dict__: - if B.__dict__["__await__"]: - return True - break - return NotImplemented - - -class Coroutine(Awaitable): - - __slots__ = () - - @abstractmethod - def send(self, value): - """Send a value into the coroutine. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the coroutine. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside coroutine. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("coroutine ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Coroutine: - mro = C.__mro__ - for method in ('__await__', 'send', 'throw', 'close'): - for base in mro: - if method in base.__dict__: - break - else: - return NotImplemented - return True - return NotImplemented - - -Coroutine.register(coroutine) - - -class AsyncIterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __aiter__(self): - return AsyncIterator() - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterable: - if any("__aiter__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class AsyncIterator(AsyncIterable): - - __slots__ = () - - @abstractmethod - async def __anext__(self): - """Return the next item or raise StopAsyncIteration when exhausted.""" - raise StopAsyncIteration - - def __aiter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterator: - if (any("__anext__" in B.__dict__ for B in C.__mro__) and - any("__aiter__" in B.__dict__ for B in C.__mro__)): - return True - return NotImplemented - - -class Iterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __iter__(self): - while False: - yield None - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterable: - if any("__iter__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Iterator(Iterable): - - __slots__ = () - - @abstractmethod - def __next__(self): - 'Return the next item from the iterator. When exhausted, raise StopIteration' - raise StopIteration - - def __iter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterator: - if (any("__next__" in B.__dict__ for B in C.__mro__) and - any("__iter__" in B.__dict__ for B in C.__mro__)): - return True - return NotImplemented - -Iterator.register(bytes_iterator) -Iterator.register(bytearray_iterator) -#Iterator.register(callable_iterator) -Iterator.register(dict_keyiterator) -Iterator.register(dict_valueiterator) -Iterator.register(dict_itemiterator) -Iterator.register(list_iterator) -Iterator.register(list_reverseiterator) -Iterator.register(range_iterator) -Iterator.register(longrange_iterator) -Iterator.register(set_iterator) -Iterator.register(str_iterator) -Iterator.register(tuple_iterator) -Iterator.register(zip_iterator) - - -class Generator(Iterator): - - __slots__ = () - - def __next__(self): - """Return the next item from the generator. - When exhausted, raise StopIteration. - """ - return self.send(None) - - @abstractmethod - def send(self, value): - """Send a value into the generator. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the generator. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside generator. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("generator ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Generator: - mro = C.__mro__ - for method in ('__iter__', '__next__', 'send', 'throw', 'close'): - for base in mro: - if method in base.__dict__: - break - else: - return NotImplemented - return True - return NotImplemented - - -Generator.register(generator) - - -class Sized(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __len__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Sized: - if any("__len__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Container(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __contains__(self, x): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Container: - if any("__contains__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -class Callable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __call__(self, *args, **kwds): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Callable: - if any("__call__" in B.__dict__ for B in C.__mro__): - return True - return NotImplemented - - -### SETS ### - - -class Set(Sized, Iterable, Container): - - """A set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__ and __len__. - - To override the comparisons (presumably for speed, as the - semantics are fixed), redefine __le__ and __ge__, - then the other operations will automatically follow suit. - """ - - __slots__ = () - - def __le__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) > len(other): - return False - for elem in self: - if elem not in other: - return False - return True - - def __lt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) < len(other) and self.__le__(other) - - def __gt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) > len(other) and self.__ge__(other) - - def __ge__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) < len(other): - return False - for elem in other: - if elem not in self: - return False - return True - - def __eq__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) == len(other) and self.__le__(other) - - @classmethod - def _from_iterable(cls, it): - '''Construct an instance of the class from any iterable input. - - Must override this method if the class constructor signature - does not accept an iterable for an input. - ''' - return cls(it) - - def __and__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - return self._from_iterable(value for value in other if value in self) - - __rand__ = __and__ - - def isdisjoint(self, other): - 'Return True if two sets have a null intersection.' - for value in other: - if value in self: - return False - return True - - def __or__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - chain = (e for s in (self, other) for e in s) - return self._from_iterable(chain) - - __ror__ = __or__ - - def __sub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in self - if value not in other) - - def __rsub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in other - if value not in self) - - def __xor__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return (self - other) | (other - self) - - __rxor__ = __xor__ - - def _hash(self): - """Compute the hash value of a set. - - Note that we don't define __hash__: not all sets are hashable. - But if you define a hashable set type, its __hash__ should - call this function. - - This must be compatible __eq__. - - All sets ought to compare equal if they contain the same - elements, regardless of how they are implemented, and - regardless of the order of the elements; so there's not much - freedom for __eq__ or __hash__. We match the algorithm used - by the built-in frozenset type. - """ - MAX = sys.maxsize - MASK = 2 * MAX + 1 - n = len(self) - h = 1927868237 * (n + 1) - h &= MASK - for x in self: - hx = hash(x) - h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 - h &= MASK - h = h * 69069 + 907133923 - h &= MASK - if h > MAX: - h -= MASK + 1 - if h == -1: - h = 590923713 - return h - -Set.register(frozenset) - - -class MutableSet(Set): - """A mutable set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__, __len__, - add(), and discard(). - - To override the comparisons (presumably for speed, as the - semantics are fixed), all you have to do is redefine __le__ and - then the other operations will automatically follow suit. - """ - - __slots__ = () - - @abstractmethod - def add(self, value): - """Add an element.""" - raise NotImplementedError - - @abstractmethod - def discard(self, value): - """Remove an element. Do not raise an exception if absent.""" - raise NotImplementedError - - def remove(self, value): - """Remove an element. If not a member, raise a KeyError.""" - if value not in self: - raise KeyError(value) - self.discard(value) - - def pop(self): - """Return the popped value. Raise KeyError if empty.""" - it = iter(self) - try: - value = next(it) - except StopIteration: - raise KeyError - self.discard(value) - return value - - def clear(self): - """This is slow (creates N new iterators!) but effective.""" - try: - while True: - self.pop() - except KeyError: - pass - - def __ior__(self, it): - for value in it: - self.add(value) - return self - - def __iand__(self, it): - for value in (self - it): - self.discard(value) - return self - - def __ixor__(self, it): - if it is self: - self.clear() - else: - if not isinstance(it, Set): - it = self._from_iterable(it) - for value in it: - if value in self: - self.discard(value) - else: - self.add(value) - return self - - def __isub__(self, it): - if it is self: - self.clear() - else: - for value in it: - self.discard(value) - return self - -MutableSet.register(set) - - -### MAPPINGS ### - - -class Mapping(Sized, Iterable, Container): - - __slots__ = () - - """A Mapping is a generic container for associating key/value - pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __iter__, and __len__. - - """ - - @abstractmethod - def __getitem__(self, key): - raise KeyError - - def get(self, key, default=None): - 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' - try: - return self[key] - except KeyError: - return default - - def __contains__(self, key): - try: - self[key] - except KeyError: - return False - else: - return True - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return KeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return ItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return ValuesView(self) - - def __eq__(self, other): - if not isinstance(other, Mapping): - return NotImplemented - return dict(self.items()) == dict(other.items()) - -Mapping.register(mappingproxy) - - -class MappingView(Sized): - - __slots__ = '_mapping', - - def __init__(self, mapping): - self._mapping = mapping - - def __len__(self): - return len(self._mapping) - - def __repr__(self): - return '{0.__class__.__name__}({0._mapping!r})'.format(self) - - -class KeysView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, key): - return key in self._mapping - - def __iter__(self): - yield from self._mapping - -KeysView.register(dict_keys) - - -class ItemsView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, item): - key, value = item - try: - v = self._mapping[key] - except KeyError: - return False - else: - return v == value - - def __iter__(self): - for key in self._mapping: - yield (key, self._mapping[key]) - -ItemsView.register(dict_items) - - -class ValuesView(MappingView): - - __slots__ = () - - def __contains__(self, value): - for key in self._mapping: - if value == self._mapping[key]: - return True - return False - - def __iter__(self): - for key in self._mapping: - yield self._mapping[key] - -ValuesView.register(dict_values) - - -class MutableMapping(Mapping): - - __slots__ = () - - """A MutableMapping is a generic container for associating - key/value pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __setitem__, __delitem__, - __iter__, and __len__. - - """ - - @abstractmethod - def __setitem__(self, key, value): - raise KeyError - - @abstractmethod - def __delitem__(self, key): - raise KeyError - - __marker = object() - - def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - ''' - try: - value = self[key] - except KeyError: - if default is self.__marker: - raise - return default - else: - del self[key] - return value - - def popitem(self): - '''D.popitem() -> (k, v), remove and return some (key, value) pair - as a 2-tuple; but raise KeyError if D is empty. - ''' - try: - key = next(iter(self)) - except StopIteration: - raise KeyError - value = self[key] - del self[key] - return key, value - - def clear(self): - 'D.clear() -> None. Remove all items from D.' - try: - while True: - self.popitem() - except KeyError: - pass - - def update(*args, **kwds): - ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. - If E present and has a .keys() method, does: for k in E: D[k] = E[k] - If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v - In either case, this is followed by: for k, v in F.items(): D[k] = v - ''' - if not args: - raise TypeError("descriptor 'update' of 'MutableMapping' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('update expected at most 1 arguments, got %d' % - len(args)) - if args: - other = args[0] - if isinstance(other, Mapping): - for key in other: - self[key] = other[key] - elif hasattr(other, "keys"): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - def setdefault(self, key, default=None): - 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' - try: - return self[key] - except KeyError: - self[key] = default - return default - -MutableMapping.register(dict) - - -### SEQUENCES ### - - -class Sequence(Sized, Iterable, Container): - - """All the operations on a read-only sequence. - - Concrete subclasses must override __new__ or __init__, - __getitem__, and __len__. - """ - - __slots__ = () - - @abstractmethod - def __getitem__(self, index): - raise IndexError - - def __iter__(self): - i = 0 - try: - while True: - v = self[i] - yield v - i += 1 - except IndexError: - return - - def __contains__(self, value): - for v in self: - if v == value: - return True - return False - - def __reversed__(self): - for i in reversed(range(len(self))): - yield self[i] - - def index(self, value, start=0, stop=None): - '''S.index(value, [start, [stop]]) -> integer -- return first index of value. - Raises ValueError if the value is not present. - ''' - if start is not None and start < 0: - start = max(len(self) + start, 0) - if stop is not None and stop < 0: - stop += len(self) - - i = start - while stop is None or i < stop: - try: - if self[i] == value: - return i - except IndexError: - break - i += 1 - raise ValueError - - def count(self, value): - 'S.count(value) -> integer -- return number of occurrences of value' - return sum(1 for v in self if v == value) - -Sequence.register(tuple) -Sequence.register(str) -Sequence.register(range) -Sequence.register(memoryview) - - -class ByteString(Sequence): - - """This unifies bytes and bytearray. - - XXX Should add all their methods. - """ - - __slots__ = () - -ByteString.register(bytes) -ByteString.register(bytearray) - - -class MutableSequence(Sequence): - - __slots__ = () - - """All the operations on a read-write sequence. - - Concrete subclasses must provide __new__ or __init__, - __getitem__, __setitem__, __delitem__, __len__, and insert(). - - """ - - @abstractmethod - def __setitem__(self, index, value): - raise IndexError - - @abstractmethod - def __delitem__(self, index): - raise IndexError - - @abstractmethod - def insert(self, index, value): - 'S.insert(index, value) -- insert value before index' - raise IndexError - - def append(self, value): - 'S.append(value) -- append value to the end of the sequence' - self.insert(len(self), value) - - def clear(self): - 'S.clear() -> None -- remove all items from S' - try: - while True: - self.pop() - except IndexError: - pass - - def reverse(self): - 'S.reverse() -- reverse *IN PLACE*' - n = len(self) - for i in range(n//2): - self[i], self[n-i-1] = self[n-i-1], self[i] - - def extend(self, values): - 'S.extend(iterable) -- extend sequence by appending elements from the iterable' - for v in values: - self.append(v) - - def pop(self, index=-1): - '''S.pop([index]) -> item -- remove and return item at index (default last). - Raise IndexError if list is empty or index is out of range. - ''' - v = self[index] - del self[index] - return v - - def remove(self, value): - '''S.remove(value) -- remove first occurrence of value. - Raise ValueError if the value is not present. - ''' - del self[self.index(value)] - - def __iadd__(self, values): - self.extend(values) - return self - -MutableSequence.register(list) -MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.5.3/abc.py b/typing_extensions/test_data/python-3.5.3/abc.py deleted file mode 100644 index 1cbf96a6..00000000 --- a/typing_extensions/test_data/python-3.5.3/abc.py +++ /dev/null @@ -1,248 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) according to PEP 3119.""" - -from _weakrefset import WeakSet - - -def abstractmethod(funcobj): - """A decorator indicating abstract methods. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract methods are overridden. - The abstract methods can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractmethod - def my_abstract_method(self, ...): - ... - """ - funcobj.__isabstractmethod__ = True - return funcobj - - -class abstractclassmethod(classmethod): - """ - A decorator indicating abstract classmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractclassmethod - def my_abstract_classmethod(cls, ...): - ... - - 'abstractclassmethod' is deprecated. Use 'classmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractstaticmethod(staticmethod): - """ - A decorator indicating abstract staticmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractstaticmethod - def my_abstract_staticmethod(...): - ... - - 'abstractstaticmethod' is deprecated. Use 'staticmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractproperty(property): - """ - A decorator indicating abstract properties. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract properties are overridden. - The abstract properties can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractproperty - def my_abstract_property(self): - ... - - This defines a read-only property; you can also define a read-write - abstract property using the 'long' form of property declaration: - - class C(metaclass=ABCMeta): - def getx(self): ... - def setx(self, value): ... - x = abstractproperty(getx, setx) - - 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' - instead. - """ - - __isabstractmethod__ = True - - -class ABCMeta(type): - - """Metaclass for defining Abstract Base Classes (ABCs). - - Use this metaclass to create an ABC. An ABC can be subclassed - directly, and then acts as a mix-in class. You can also register - unrelated concrete classes (even built-in classes) and unrelated - ABCs as 'virtual subclasses' -- these and their descendants will - be considered subclasses of the registering ABC by the built-in - issubclass() function, but the registering ABC won't show up in - their MRO (Method Resolution Order) nor will method - implementations defined by the registering ABC be callable (not - even via super()). - - """ - - # A global counter that is incremented each time a class is - # registered as a virtual subclass of anything. It forces the - # negative cache to be cleared before its next use. - # Note: this counter is private. Use `abc.get_cache_token()` for - # external code. - _abc_invalidation_counter = 0 - - def __new__(mcls, name, bases, namespace): - cls = super().__new__(mcls, name, bases, namespace) - # Compute set of abstract method names - abstracts = {name - for name, value in namespace.items() - if getattr(value, "__isabstractmethod__", False)} - for base in bases: - for name in getattr(base, "__abstractmethods__", set()): - value = getattr(cls, name, None) - if getattr(value, "__isabstractmethod__", False): - abstracts.add(name) - cls.__abstractmethods__ = frozenset(abstracts) - # Set up inheritance registry - cls._abc_registry = WeakSet() - cls._abc_cache = WeakSet() - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - return cls - - def register(cls, subclass): - """Register a virtual subclass of an ABC. - - Returns the subclass, to allow usage as a class decorator. - """ - if not isinstance(subclass, type): - raise TypeError("Can only register classes") - if issubclass(subclass, cls): - return subclass # Already a subclass - # Subtle: test for cycles *after* testing for "already a subclass"; - # this means we allow X.register(X) and interpret it as a no-op. - if issubclass(cls, subclass): - # This would create a cycle, which is bad for the algorithm below - raise RuntimeError("Refusing to create an inheritance cycle") - cls._abc_registry.add(subclass) - ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache - return subclass - - def _dump_registry(cls, file=None): - """Debug helper to print the ABC registry.""" - print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) - print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) - for name in sorted(cls.__dict__.keys()): - if name.startswith("_abc_"): - value = getattr(cls, name) - print("%s: %r" % (name, value), file=file) - - def __instancecheck__(cls, instance): - """Override for isinstance(instance, cls).""" - # Inline the cache checking - subclass = instance.__class__ - if subclass in cls._abc_cache: - return True - subtype = type(instance) - if subtype is subclass: - if (cls._abc_negative_cache_version == - ABCMeta._abc_invalidation_counter and - subclass in cls._abc_negative_cache): - return False - # Fall back to the subclass check. - return cls.__subclasscheck__(subclass) - return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) - - def __subclasscheck__(cls, subclass): - """Override for issubclass(subclass, cls).""" - # Check cache - if subclass in cls._abc_cache: - return True - # Check negative cache; may have to invalidate - if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: - # Invalidate the negative cache - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - elif subclass in cls._abc_negative_cache: - return False - # Check the subclass hook - ok = cls.__subclasshook__(subclass) - if ok is not NotImplemented: - assert isinstance(ok, bool) - if ok: - cls._abc_cache.add(subclass) - else: - cls._abc_negative_cache.add(subclass) - return ok - # Check if it's a direct subclass - if cls in getattr(subclass, '__mro__', ()): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a registered class (recursive) - for rcls in cls._abc_registry: - if issubclass(subclass, rcls): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a subclass (recursive) - for scls in cls.__subclasses__(): - if issubclass(subclass, scls): - cls._abc_cache.add(subclass) - return True - # No dice; update negative cache - cls._abc_negative_cache.add(subclass) - return False - - -class ABC(metaclass=ABCMeta): - """Helper class that provides a standard way to create an ABC using - inheritance. - """ - pass - - -def get_cache_token(): - """Returns the current ABC cache token. - - The token is an opaque object (supporting equality testing) identifying the - current version of the ABC cache for virtual subclasses. The token changes - with every call to ``register()`` on any ABC. - """ - return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.5.3/collections/__init__.py b/typing_extensions/test_data/python-3.5.3/collections/__init__.py deleted file mode 100644 index bea811db..00000000 --- a/typing_extensions/test_data/python-3.5.3/collections/__init__.py +++ /dev/null @@ -1,1242 +0,0 @@ -'''This module implements specialized container datatypes providing -alternatives to Python's general purpose built-in containers, dict, -list, set, and tuple. - -* namedtuple factory function for creating tuple subclasses with named fields -* deque list-like container with fast appends and pops on either end -* ChainMap dict-like class for creating a single view of multiple mappings -* Counter dict subclass for counting hashable objects -* OrderedDict dict subclass that remembers the order entries were added -* defaultdict dict subclass that calls a factory function to supply missing values -* UserDict wrapper around dictionary objects for easier dict subclassing -* UserList wrapper around list objects for easier list subclassing -* UserString wrapper around string objects for easier string subclassing - -''' - -__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', - 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] - -# For backwards compatibility, continue to make the collections ABCs -# available through the collections module. -from _collections_abc import * -import _collections_abc -__all__ += _collections_abc.__all__ - -from operator import itemgetter as _itemgetter, eq as _eq -from keyword import iskeyword as _iskeyword -import sys as _sys -import heapq as _heapq -from _weakref import proxy as _proxy -from itertools import repeat as _repeat, chain as _chain, starmap as _starmap -from reprlib import recursive_repr as _recursive_repr - -try: - from _collections import deque -except ImportError: - pass -else: - MutableSequence.register(deque) - -try: - from _collections import defaultdict -except ImportError: - pass - - -################################################################################ -### OrderedDict -################################################################################ - -class _OrderedDictKeysView(KeysView): - - def __reversed__(self): - yield from reversed(self._mapping) - -class _OrderedDictItemsView(ItemsView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield (key, self._mapping[key]) - -class _OrderedDictValuesView(ValuesView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield self._mapping[key] - -class _Link(object): - __slots__ = 'prev', 'next', 'key', '__weakref__' - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as regular dictionaries. - - # The internal self.__map dict maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # The sentinel is in self.__hardroot with a weakref proxy in self.__root. - # The prev links are weakref proxies (to prevent circular references). - # Individual links are kept alive by the hard reference in self.__map. - # Those hard references disappear when a key is deleted from an OrderedDict. - - def __init__(*args, **kwds): - '''Initialize an ordered dictionary. The signature is the same as - regular dictionaries, but keyword arguments are not recommended because - their insertion order is arbitrary. - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'OrderedDict' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__hardroot = _Link() - self.__root = root = _proxy(self.__hardroot) - root.prev = root.next = root - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, - dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link at the end of the linked list, - # and the inherited dictionary is updated with the new key/value pair. - if key not in self: - self.__map[key] = link = Link() - root = self.__root - last = root.prev - link.prev, link.next, link.key = last, root, key - last.next = link - root.prev = proxy(link) - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which gets - # removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link = self.__map.pop(key) - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - link.prev = None - link.next = None - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - # Traverse the linked list in order. - root = self.__root - curr = root.next - while curr is not root: - yield curr.key - curr = curr.next - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - # Traverse the linked list in reverse order. - root = self.__root - curr = root.prev - while curr is not root: - yield curr.key - curr = curr.prev - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - root = self.__root - root.prev = root.next = root - self.__map.clear() - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root.prev - link_prev = link.prev - link_prev.next = root - root.prev = link_prev - else: - link = root.next - link_next = link.next - root.next = link_next - link_next.prev = root - key = link.key - del self.__map[key] - value = dict.pop(self, key) - return key, value - - def move_to_end(self, key, last=True): - '''Move an existing element to the end (or beginning if last==False). - - Raises KeyError if the element does not exist. - When last=True, acts like a fast version of self[key]=self.pop(key). - - ''' - link = self.__map[key] - link_prev = link.prev - link_next = link.next - soft_link = link_next.prev - link_prev.next = link_next - link_next.prev = link_prev - root = self.__root - if last: - last = root.prev - link.prev = last - link.next = root - root.prev = soft_link - last.next = link - else: - first = root.next - link.prev = root - link.next = first - first.prev = soft_link - root.next = link - - def __sizeof__(self): - sizeof = _sys.getsizeof - n = len(self) + 1 # number of links including root - size = sizeof(self.__dict__) # instance dictionary - size += sizeof(self.__map) * 2 # internal dict and inherited dict - size += sizeof(self.__hardroot) * n # link objects - size += sizeof(self.__root) * n # proxy objects - return size - - update = __update = MutableMapping.update - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return _OrderedDictKeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return _OrderedDictItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return _OrderedDictValuesView(self) - - __ne__ = MutableMapping.__ne__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding - value. If key is not found, d is returned if given, otherwise KeyError - is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - @_recursive_repr() - def __repr__(self): - 'od.__repr__() <==> repr(od)' - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, list(self.items())) - - def __reduce__(self): - 'Return state information for pickling' - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - return self.__class__, (), inst_dict or None, None, iter(self.items()) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. - If not specified, the value defaults to None. - - ''' - self = cls() - for key in iterable: - self[key] = value - return self - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return dict.__eq__(self, other) and all(map(_eq, self, other)) - return dict.__eq__(self, other) - - -try: - from _collections import OrderedDict -except ImportError: - # Leave the pure Python version in place. - pass - - -################################################################################ -### namedtuple -################################################################################ - -_class_template = """\ -from builtins import property as _property, tuple as _tuple -from operator import itemgetter as _itemgetter -from collections import OrderedDict - -class {typename}(tuple): - '{typename}({arg_list})' - - __slots__ = () - - _fields = {field_names!r} - - def __new__(_cls, {arg_list}): - 'Create new instance of {typename}({arg_list})' - return _tuple.__new__(_cls, ({arg_list})) - - @classmethod - def _make(cls, iterable, new=tuple.__new__, len=len): - 'Make a new {typename} object from a sequence or iterable' - result = new(cls, iterable) - if len(result) != {num_fields:d}: - raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) - return result - - def _replace(_self, **kwds): - 'Return a new {typename} object replacing specified fields with new values' - result = _self._make(map(kwds.pop, {field_names!r}, _self)) - if kwds: - raise ValueError('Got unexpected field names: %r' % list(kwds)) - return result - - def __repr__(self): - 'Return a nicely formatted representation string' - return self.__class__.__name__ + '({repr_fmt})' % self - - def _asdict(self): - 'Return a new OrderedDict which maps field names to their values.' - return OrderedDict(zip(self._fields, self)) - - def __getnewargs__(self): - 'Return self as a plain tuple. Used by copy and pickle.' - return tuple(self) - -{field_defs} -""" - -_repr_template = '{name}=%r' - -_field_template = '''\ - {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') -''' - -def namedtuple(typename, field_names, verbose=False, rename=False): - """Returns a new subclass of tuple with named fields. - - >>> Point = namedtuple('Point', ['x', 'y']) - >>> Point.__doc__ # docstring for the new class - 'Point(x, y)' - >>> p = Point(11, y=22) # instantiate with positional args or keywords - >>> p[0] + p[1] # indexable like a plain tuple - 33 - >>> x, y = p # unpack like a regular tuple - >>> x, y - (11, 22) - >>> p.x + p.y # fields also accessible by name - 33 - >>> d = p._asdict() # convert to a dictionary - >>> d['x'] - 11 - >>> Point(**d) # convert from a dictionary - Point(x=11, y=22) - >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields - Point(x=100, y=22) - - """ - - # Validate the field names. At the user's option, either generate an error - # message or automatically replace the field name with a valid name. - if isinstance(field_names, str): - field_names = field_names.replace(',', ' ').split() - field_names = list(map(str, field_names)) - typename = str(typename) - if rename: - seen = set() - for index, name in enumerate(field_names): - if (not name.isidentifier() - or _iskeyword(name) - or name.startswith('_') - or name in seen): - field_names[index] = '_%d' % index - seen.add(name) - for name in [typename] + field_names: - if type(name) != str: - raise TypeError('Type names and field names must be strings') - if not name.isidentifier(): - raise ValueError('Type names and field names must be valid ' - 'identifiers: %r' % name) - if _iskeyword(name): - raise ValueError('Type names and field names cannot be a ' - 'keyword: %r' % name) - seen = set() - for name in field_names: - if name.startswith('_') and not rename: - raise ValueError('Field names cannot start with an underscore: ' - '%r' % name) - if name in seen: - raise ValueError('Encountered duplicate field name: %r' % name) - seen.add(name) - - # Fill-in the class template - class_definition = _class_template.format( - typename = typename, - field_names = tuple(field_names), - num_fields = len(field_names), - arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], - repr_fmt = ', '.join(_repr_template.format(name=name) - for name in field_names), - field_defs = '\n'.join(_field_template.format(index=index, name=name) - for index, name in enumerate(field_names)) - ) - - # Execute the template string in a temporary namespace and support - # tracing utilities by setting a value for frame.f_globals['__name__'] - namespace = dict(__name__='namedtuple_%s' % typename) - exec(class_definition, namespace) - result = namespace[typename] - result._source = class_definition - if verbose: - print(result._source) - - # For pickling to work, the __module__ variable needs to be set to the frame - # where the named tuple is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython). - try: - result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - - return result - - -######################################################################## -### Counter -######################################################################## - -def _count_elements(mapping, iterable): - 'Tally elements from the iterable.' - mapping_get = mapping.get - for elem in iterable: - mapping[elem] = mapping_get(elem, 0) + 1 - -try: # Load C helper function if available - from _collections import _count_elements -except ImportError: - pass - -class Counter(dict): - '''Dict subclass for counting hashable items. Sometimes called a bag - or multiset. Elements are stored as dictionary keys and their counts - are stored as dictionary values. - - >>> c = Counter('abcdeabcdabcaba') # count elements from a string - - >>> c.most_common(3) # three most common elements - [('a', 5), ('b', 4), ('c', 3)] - >>> sorted(c) # list all unique elements - ['a', 'b', 'c', 'd', 'e'] - >>> ''.join(sorted(c.elements())) # list elements with repetitions - 'aaaaabbbbcccdde' - >>> sum(c.values()) # total of all counts - 15 - - >>> c['a'] # count of letter 'a' - 5 - >>> for elem in 'shazam': # update counts from an iterable - ... c[elem] += 1 # by adding 1 to each element's count - >>> c['a'] # now there are seven 'a' - 7 - >>> del c['b'] # remove all 'b' - >>> c['b'] # now there are zero 'b' - 0 - - >>> d = Counter('simsalabim') # make another counter - >>> c.update(d) # add in the second counter - >>> c['a'] # now there are nine 'a' - 9 - - >>> c.clear() # empty the counter - >>> c - Counter() - - Note: If a count is set to zero or reduced to zero, it will remain - in the counter until the entry is deleted or the counter is cleared: - - >>> c = Counter('aaabbc') - >>> c['b'] -= 2 # reduce the count of 'b' by two - >>> c.most_common() # 'b' is still in, but its count is zero - [('a', 3), ('c', 1), ('b', 0)] - - ''' - # References: - # http://en.wikipedia.org/wiki/Multiset - # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html - # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm - # http://code.activestate.com/recipes/259174/ - # Knuth, TAOCP Vol. II section 4.6.3 - - def __init__(*args, **kwds): - '''Create a new, empty Counter object. And if given, count elements - from an input iterable. Or, initialize the count from another mapping - of elements to their counts. - - >>> c = Counter() # a new, empty counter - >>> c = Counter('gallahad') # a new counter from an iterable - >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping - >>> c = Counter(a=4, b=2) # a new counter from keyword args - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - super(Counter, self).__init__() - self.update(*args, **kwds) - - def __missing__(self, key): - 'The count of elements not in the Counter is zero.' - # Needed so that self[missing_item] does not raise KeyError - return 0 - - def most_common(self, n=None): - '''List the n most common elements and their counts from the most - common to the least. If n is None, then list all element counts. - - >>> Counter('abcdeabcdabcaba').most_common(3) - [('a', 5), ('b', 4), ('c', 3)] - - ''' - # Emulate Bag.sortedByCount from Smalltalk - if n is None: - return sorted(self.items(), key=_itemgetter(1), reverse=True) - return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) - - def elements(self): - '''Iterator over elements repeating each as many times as its count. - - >>> c = Counter('ABCABC') - >>> sorted(c.elements()) - ['A', 'A', 'B', 'B', 'C', 'C'] - - # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 - >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) - >>> product = 1 - >>> for factor in prime_factors.elements(): # loop over factors - ... product *= factor # and multiply them - >>> product - 1836 - - Note, if an element's count has been set to zero or is a negative - number, elements() will ignore it. - - ''' - # Emulate Bag.do from Smalltalk and Multiset.begin from C++. - return _chain.from_iterable(_starmap(_repeat, self.items())) - - # Override dict methods where necessary - - @classmethod - def fromkeys(cls, iterable, v=None): - # There is no equivalent method for counters because setting v=1 - # means that no element can have a count greater than one. - raise NotImplementedError( - 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') - - def update(*args, **kwds): - '''Like dict.update() but add counts instead of replacing them. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.update('witch') # add elements from another iterable - >>> d = Counter('watch') - >>> c.update(d) # add elements from another counter - >>> c['h'] # four 'h' in which, witch, and watch - 4 - - ''' - # The regular dict.update() operation makes no sense here because the - # replace behavior results in the some of original untouched counts - # being mixed-in with all of the other counts for a mismash that - # doesn't have a straight-forward interpretation in most counting - # contexts. Instead, we implement straight-addition. Both the inputs - # and outputs are allowed to contain zero and negative counts. - - if not args: - raise TypeError("descriptor 'update' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - if isinstance(iterable, Mapping): - if self: - self_get = self.get - for elem, count in iterable.items(): - self[elem] = count + self_get(elem, 0) - else: - super(Counter, self).update(iterable) # fast path when counter is empty - else: - _count_elements(self, iterable) - if kwds: - self.update(kwds) - - def subtract(*args, **kwds): - '''Like dict.update() but subtracts counts instead of replacing them. - Counts can be reduced below zero. Both the inputs and outputs are - allowed to contain zero and negative counts. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.subtract('witch') # subtract elements from another iterable - >>> c.subtract(Counter('watch')) # subtract elements from another counter - >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch - 0 - >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch - -1 - - ''' - if not args: - raise TypeError("descriptor 'subtract' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - self_get = self.get - if isinstance(iterable, Mapping): - for elem, count in iterable.items(): - self[elem] = self_get(elem, 0) - count - else: - for elem in iterable: - self[elem] = self_get(elem, 0) - 1 - if kwds: - self.subtract(kwds) - - def copy(self): - 'Return a shallow copy.' - return self.__class__(self) - - def __reduce__(self): - return self.__class__, (dict(self),) - - def __delitem__(self, elem): - 'Like dict.__delitem__() but does not raise KeyError for missing values.' - if elem in self: - super().__delitem__(elem) - - def __repr__(self): - if not self: - return '%s()' % self.__class__.__name__ - try: - items = ', '.join(map('%r: %r'.__mod__, self.most_common())) - return '%s({%s})' % (self.__class__.__name__, items) - except TypeError: - # handle case where values are not orderable - return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) - - # Multiset-style mathematical operations discussed in: - # Knuth TAOCP Volume II section 4.6.3 exercise 19 - # and at http://en.wikipedia.org/wiki/Multiset - # - # Outputs guaranteed to only include positive counts. - # - # To strip negative and zero counts, add-in an empty counter: - # c += Counter() - - def __add__(self, other): - '''Add counts from two counters. - - >>> Counter('abbb') + Counter('bcc') - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count + other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __sub__(self, other): - ''' Subtract count, but keep only results with positive counts. - - >>> Counter('abbbc') - Counter('bccd') - Counter({'b': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count - other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count < 0: - result[elem] = 0 - count - return result - - def __or__(self, other): - '''Union is the maximum of value in either of the input counters. - - >>> Counter('abbb') | Counter('bcc') - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = other_count if count < other_count else count - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __and__(self, other): - ''' Intersection is the minimum of corresponding counts. - - >>> Counter('abbb') & Counter('bcc') - Counter({'b': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = count if count < other_count else other_count - if newcount > 0: - result[elem] = newcount - return result - - def __pos__(self): - 'Adds an empty counter, effectively stripping negative and zero counts' - result = Counter() - for elem, count in self.items(): - if count > 0: - result[elem] = count - return result - - def __neg__(self): - '''Subtracts from an empty counter. Strips positive and zero counts, - and flips the sign on negative counts. - - ''' - result = Counter() - for elem, count in self.items(): - if count < 0: - result[elem] = 0 - count - return result - - def _keep_positive(self): - '''Internal method to strip elements with a negative or zero count''' - nonpositive = [elem for elem, count in self.items() if not count > 0] - for elem in nonpositive: - del self[elem] - return self - - def __iadd__(self, other): - '''Inplace add from another counter, keeping only positive counts. - - >>> c = Counter('abbb') - >>> c += Counter('bcc') - >>> c - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] += count - return self._keep_positive() - - def __isub__(self, other): - '''Inplace subtract counter, but keep only results with positive counts. - - >>> c = Counter('abbbc') - >>> c -= Counter('bccd') - >>> c - Counter({'b': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] -= count - return self._keep_positive() - - def __ior__(self, other): - '''Inplace union is the maximum of value from either counter. - - >>> c = Counter('abbb') - >>> c |= Counter('bcc') - >>> c - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - for elem, other_count in other.items(): - count = self[elem] - if other_count > count: - self[elem] = other_count - return self._keep_positive() - - def __iand__(self, other): - '''Inplace intersection is the minimum of corresponding counts. - - >>> c = Counter('abbb') - >>> c &= Counter('bcc') - >>> c - Counter({'b': 1}) - - ''' - for elem, count in self.items(): - other_count = other[elem] - if other_count < count: - self[elem] = other_count - return self._keep_positive() - - -######################################################################## -### ChainMap (helper for configparser and string.Template) -######################################################################## - -class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together - to create a single, updateable view. - - The underlying mappings are stored in a list. That list is public and can - be accessed or updated using the *maps* attribute. There is no other - state. - - Lookups search the underlying mappings successively until a key is found. - In contrast, writes, updates, and deletions only operate on the first - mapping. - - ''' - - def __init__(self, *maps): - '''Initialize a ChainMap by setting *maps* to the given mappings. - If no mappings are provided, a single empty dictionary is used. - - ''' - self.maps = list(maps) or [{}] # always at least one map - - def __missing__(self, key): - raise KeyError(key) - - def __getitem__(self, key): - for mapping in self.maps: - try: - return mapping[key] # can't use 'key in mapping' with defaultdict - except KeyError: - pass - return self.__missing__(key) # support subclasses that define __missing__ - - def get(self, key, default=None): - return self[key] if key in self else default - - def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible - - def __iter__(self): - return iter(set().union(*self.maps)) - - def __contains__(self, key): - return any(key in m for m in self.maps) - - def __bool__(self): - return any(self.maps) - - @_recursive_repr() - def __repr__(self): - return '{0.__class__.__name__}({1})'.format( - self, ', '.join(map(repr, self.maps))) - - @classmethod - def fromkeys(cls, iterable, *args): - 'Create a ChainMap with a single dict created from the iterable.' - return cls(dict.fromkeys(iterable, *args)) - - def copy(self): - 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' - return self.__class__(self.maps[0].copy(), *self.maps[1:]) - - __copy__ = copy - - def new_child(self, m=None): # like Django's Context.push() - '''New ChainMap with a new map followed by all previous maps. - If no map is provided, an empty dict is used. - ''' - if m is None: - m = {} - return self.__class__(m, *self.maps) - - @property - def parents(self): # like Django's Context.pop() - 'New ChainMap from maps[1:].' - return self.__class__(*self.maps[1:]) - - def __setitem__(self, key, value): - self.maps[0][key] = value - - def __delitem__(self, key): - try: - del self.maps[0][key] - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def popitem(self): - 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' - try: - return self.maps[0].popitem() - except KeyError: - raise KeyError('No keys found in the first mapping.') - - def pop(self, key, *args): - 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' - try: - return self.maps[0].pop(key, *args) - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def clear(self): - 'Clear maps[0], leaving maps[1:] intact.' - self.maps[0].clear() - - -################################################################################ -### UserDict -################################################################################ - -class UserDict(MutableMapping): - - # Start by filling-out the abstract methods - def __init__(*args, **kwargs): - if not args: - raise TypeError("descriptor '__init__' of 'UserDict' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - if args: - dict = args[0] - elif 'dict' in kwargs: - dict = kwargs.pop('dict') - import warnings - warnings.warn("Passing 'dict' as keyword argument is deprecated", - PendingDeprecationWarning, stacklevel=2) - else: - dict = None - self.data = {} - if dict is not None: - self.update(dict) - if len(kwargs): - self.update(kwargs) - def __len__(self): return len(self.data) - def __getitem__(self, key): - if key in self.data: - return self.data[key] - if hasattr(self.__class__, "__missing__"): - return self.__class__.__missing__(self, key) - raise KeyError(key) - def __setitem__(self, key, item): self.data[key] = item - def __delitem__(self, key): del self.data[key] - def __iter__(self): - return iter(self.data) - - # Modify __contains__ to work correctly when __missing__ is present - def __contains__(self, key): - return key in self.data - - # Now, add the methods in dicts but not in MutableMapping - def __repr__(self): return repr(self.data) - def copy(self): - if self.__class__ is UserDict: - return UserDict(self.data.copy()) - import copy - data = self.data - try: - self.data = {} - c = copy.copy(self) - finally: - self.data = data - c.update(self) - return c - @classmethod - def fromkeys(cls, iterable, value=None): - d = cls() - for key in iterable: - d[key] = value - return d - - - -################################################################################ -### UserList -################################################################################ - -class UserList(MutableSequence): - """A more or less complete user-defined wrapper around list objects.""" - def __init__(self, initlist=None): - self.data = [] - if initlist is not None: - # XXX should this accept an arbitrary sequence? - if type(initlist) == type(self.data): - self.data[:] = initlist - elif isinstance(initlist, UserList): - self.data[:] = initlist.data[:] - else: - self.data = list(initlist) - def __repr__(self): return repr(self.data) - def __lt__(self, other): return self.data < self.__cast(other) - def __le__(self, other): return self.data <= self.__cast(other) - def __eq__(self, other): return self.data == self.__cast(other) - def __gt__(self, other): return self.data > self.__cast(other) - def __ge__(self, other): return self.data >= self.__cast(other) - def __cast(self, other): - return other.data if isinstance(other, UserList) else other - def __contains__(self, item): return item in self.data - def __len__(self): return len(self.data) - def __getitem__(self, i): return self.data[i] - def __setitem__(self, i, item): self.data[i] = item - def __delitem__(self, i): del self.data[i] - def __add__(self, other): - if isinstance(other, UserList): - return self.__class__(self.data + other.data) - elif isinstance(other, type(self.data)): - return self.__class__(self.data + other) - return self.__class__(self.data + list(other)) - def __radd__(self, other): - if isinstance(other, UserList): - return self.__class__(other.data + self.data) - elif isinstance(other, type(self.data)): - return self.__class__(other + self.data) - return self.__class__(list(other) + self.data) - def __iadd__(self, other): - if isinstance(other, UserList): - self.data += other.data - elif isinstance(other, type(self.data)): - self.data += other - else: - self.data += list(other) - return self - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __imul__(self, n): - self.data *= n - return self - def append(self, item): self.data.append(item) - def insert(self, i, item): self.data.insert(i, item) - def pop(self, i=-1): return self.data.pop(i) - def remove(self, item): self.data.remove(item) - def clear(self): self.data.clear() - def copy(self): return self.__class__(self) - def count(self, item): return self.data.count(item) - def index(self, item, *args): return self.data.index(item, *args) - def reverse(self): self.data.reverse() - def sort(self, *args, **kwds): self.data.sort(*args, **kwds) - def extend(self, other): - if isinstance(other, UserList): - self.data.extend(other.data) - else: - self.data.extend(other) - - - -################################################################################ -### UserString -################################################################################ - -class UserString(Sequence): - def __init__(self, seq): - if isinstance(seq, str): - self.data = seq - elif isinstance(seq, UserString): - self.data = seq.data[:] - else: - self.data = str(seq) - def __str__(self): return str(self.data) - def __repr__(self): return repr(self.data) - def __int__(self): return int(self.data) - def __float__(self): return float(self.data) - def __complex__(self): return complex(self.data) - def __hash__(self): return hash(self.data) - def __getnewargs__(self): - return (self.data[:],) - - def __eq__(self, string): - if isinstance(string, UserString): - return self.data == string.data - return self.data == string - def __lt__(self, string): - if isinstance(string, UserString): - return self.data < string.data - return self.data < string - def __le__(self, string): - if isinstance(string, UserString): - return self.data <= string.data - return self.data <= string - def __gt__(self, string): - if isinstance(string, UserString): - return self.data > string.data - return self.data > string - def __ge__(self, string): - if isinstance(string, UserString): - return self.data >= string.data - return self.data >= string - - def __contains__(self, char): - if isinstance(char, UserString): - char = char.data - return char in self.data - - def __len__(self): return len(self.data) - def __getitem__(self, index): return self.__class__(self.data[index]) - def __add__(self, other): - if isinstance(other, UserString): - return self.__class__(self.data + other.data) - elif isinstance(other, str): - return self.__class__(self.data + other) - return self.__class__(self.data + str(other)) - def __radd__(self, other): - if isinstance(other, str): - return self.__class__(other + self.data) - return self.__class__(str(other) + self.data) - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __mod__(self, args): - return self.__class__(self.data % args) - def __rmod__(self, format): - return self.__class__(format % args) - - # the following methods are defined in alphabetical order: - def capitalize(self): return self.__class__(self.data.capitalize()) - def casefold(self): - return self.__class__(self.data.casefold()) - def center(self, width, *args): - return self.__class__(self.data.center(width, *args)) - def count(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.count(sub, start, end) - def encode(self, encoding=None, errors=None): # XXX improve this? - if encoding: - if errors: - return self.__class__(self.data.encode(encoding, errors)) - return self.__class__(self.data.encode(encoding)) - return self.__class__(self.data.encode()) - def endswith(self, suffix, start=0, end=_sys.maxsize): - return self.data.endswith(suffix, start, end) - def expandtabs(self, tabsize=8): - return self.__class__(self.data.expandtabs(tabsize)) - def find(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.find(sub, start, end) - def format(self, *args, **kwds): - return self.data.format(*args, **kwds) - def format_map(self, mapping): - return self.data.format_map(mapping) - def index(self, sub, start=0, end=_sys.maxsize): - return self.data.index(sub, start, end) - def isalpha(self): return self.data.isalpha() - def isalnum(self): return self.data.isalnum() - def isdecimal(self): return self.data.isdecimal() - def isdigit(self): return self.data.isdigit() - def isidentifier(self): return self.data.isidentifier() - def islower(self): return self.data.islower() - def isnumeric(self): return self.data.isnumeric() - def isprintable(self): return self.data.isprintable() - def isspace(self): return self.data.isspace() - def istitle(self): return self.data.istitle() - def isupper(self): return self.data.isupper() - def join(self, seq): return self.data.join(seq) - def ljust(self, width, *args): - return self.__class__(self.data.ljust(width, *args)) - def lower(self): return self.__class__(self.data.lower()) - def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) - maketrans = str.maketrans - def partition(self, sep): - return self.data.partition(sep) - def replace(self, old, new, maxsplit=-1): - if isinstance(old, UserString): - old = old.data - if isinstance(new, UserString): - new = new.data - return self.__class__(self.data.replace(old, new, maxsplit)) - def rfind(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.rfind(sub, start, end) - def rindex(self, sub, start=0, end=_sys.maxsize): - return self.data.rindex(sub, start, end) - def rjust(self, width, *args): - return self.__class__(self.data.rjust(width, *args)) - def rpartition(self, sep): - return self.data.rpartition(sep) - def rstrip(self, chars=None): - return self.__class__(self.data.rstrip(chars)) - def split(self, sep=None, maxsplit=-1): - return self.data.split(sep, maxsplit) - def rsplit(self, sep=None, maxsplit=-1): - return self.data.rsplit(sep, maxsplit) - def splitlines(self, keepends=False): return self.data.splitlines(keepends) - def startswith(self, prefix, start=0, end=_sys.maxsize): - return self.data.startswith(prefix, start, end) - def strip(self, chars=None): return self.__class__(self.data.strip(chars)) - def swapcase(self): return self.__class__(self.data.swapcase()) - def title(self): return self.__class__(self.data.title()) - def translate(self, *args): - return self.__class__(self.data.translate(*args)) - def upper(self): return self.__class__(self.data.upper()) - def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.5.3/collections/__main__.py b/typing_extensions/test_data/python-3.5.3/collections/__main__.py deleted file mode 100644 index 763e38e0..00000000 --- a/typing_extensions/test_data/python-3.5.3/collections/__main__.py +++ /dev/null @@ -1,38 +0,0 @@ -################################################################################ -### Simple tests -################################################################################ - -# verify that instances can be pickled -from collections import namedtuple -from pickle import loads, dumps -Point = namedtuple('Point', 'x, y', True) -p = Point(x=10, y=20) -assert p == loads(dumps(p)) - -# test and demonstrate ability to override methods -class Point(namedtuple('Point', 'x y')): - __slots__ = () - @property - def hypot(self): - return (self.x ** 2 + self.y ** 2) ** 0.5 - def __str__(self): - return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) - -for p in Point(3, 4), Point(14, 5/7.): - print (p) - -class Point(namedtuple('Point', 'x y')): - 'Point class with optimized _make() and _replace() without error-checking' - __slots__ = () - _make = classmethod(tuple.__new__) - def _replace(self, _map=map, **kwds): - return self._make(_map(kwds.get, ('x', 'y'), self)) - -print(Point(11, 22)._replace(x=100)) - -Point3D = namedtuple('Point3D', Point._fields + ('z',)) -print(Point3D.__doc__) - -import doctest, collections -TestResults = namedtuple('TestResults', 'failed attempted') -print(TestResults(*doctest.testmod(collections))) diff --git a/typing_extensions/test_data/python-3.5.3/collections/abc.py b/typing_extensions/test_data/python-3.5.3/collections/abc.py deleted file mode 100644 index 891600d1..00000000 --- a/typing_extensions/test_data/python-3.5.3/collections/abc.py +++ /dev/null @@ -1,2 +0,0 @@ -from _collections_abc import * -from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.5.3/typing.py b/typing_extensions/test_data/python-3.5.3/typing.py deleted file mode 100644 index 34845b74..00000000 --- a/typing_extensions/test_data/python-3.5.3/typing.py +++ /dev/null @@ -1,2160 +0,0 @@ -import abc -from abc import abstractmethod, abstractproperty -import collections -import contextlib -import functools -import re as stdlib_re # Avoid confusion with the re we export. -import sys -import types -try: - import collections.abc as collections_abc -except ImportError: - import collections as collections_abc # Fallback for PY3.2. - - -# Please keep __all__ alphabetized within each category. -__all__ = [ - # Super-special typing primitives. - 'Any', - 'Callable', - 'ClassVar', - 'Generic', - 'Optional', - 'Tuple', - 'Type', - 'TypeVar', - 'Union', - - # ABCs (from collections.abc). - 'AbstractSet', # collections.abc.Set. - 'ByteString', - 'Container', - 'Hashable', - 'ItemsView', - 'Iterable', - 'Iterator', - 'KeysView', - 'Mapping', - 'MappingView', - 'MutableMapping', - 'MutableSequence', - 'MutableSet', - 'Sequence', - 'Sized', - 'ValuesView', - # The following are added depending on presence - # of their non-generic counterparts in stdlib: - # Awaitable, - # AsyncIterator, - # AsyncIterable, - # Coroutine, - # Collection, - # ContextManager - - # Structural checks, a.k.a. protocols. - 'Reversible', - 'SupportsAbs', - 'SupportsFloat', - 'SupportsInt', - 'SupportsRound', - - # Concrete collection types. - 'Dict', - 'DefaultDict', - 'List', - 'Set', - 'FrozenSet', - 'NamedTuple', # Not really a type. - 'Generator', - - # One-off things. - 'AnyStr', - 'cast', - 'get_type_hints', - 'NewType', - 'no_type_check', - 'no_type_check_decorator', - 'overload', - 'Text', - 'TYPE_CHECKING', -] - -# The pseudo-submodules 're' and 'io' are part of the public -# namespace, but excluded from __all__ because they might stomp on -# legitimate imports of those modules. - - -def _qualname(x): - if sys.version_info[:2] >= (3, 3): - return x.__qualname__ - else: - # Fall back to just name. - return x.__name__ - - -def _trim_name(nm): - if nm.startswith('_') and nm not in ('_TypeAlias', - '_ForwardRef', '_TypingBase', '_FinalTypingBase'): - nm = nm[1:] - return nm - - -class TypingMeta(type): - """Metaclass for most types defined in typing module - (not a part of public API). - - This overrides __new__() to require an extra keyword parameter - '_root', which serves as a guard against naive subclassing of the - typing classes. Any legitimate class defined using a metaclass - derived from TypingMeta must pass _root=True. - - This also defines a dummy constructor (all the work for most typing - constructs is done in __new__) and a nicer repr(). - """ - - _is_protocol = False - - def __new__(cls, name, bases, namespace, *, _root=False): - if not _root: - raise TypeError("Cannot subclass %s" % - (', '.join(map(_type_repr, bases)) or '()')) - return super().__new__(cls, name, bases, namespace) - - def __init__(self, *args, **kwds): - pass - - def _eval_type(self, globalns, localns): - """Override this in subclasses to interpret forward references. - - For example, List['C'] is internally stored as - List[_ForwardRef('C')], which should evaluate to List[C], - where C is an object found in globalns or localns (searching - localns first, of course). - """ - return self - - def _get_type_vars(self, tvars): - pass - - def __repr__(self): - qname = _trim_name(_qualname(self)) - return '%s.%s' % (self.__module__, qname) - - -class _TypingBase(metaclass=TypingMeta, _root=True): - """Internal indicator of special typing constructs.""" - - __slots__ = () - - def __init__(self, *args, **kwds): - pass - - def __new__(cls, *args, **kwds): - """Constructor. - - This only exists to give a better error message in case - someone tries to subclass a special typing object (not a good idea). - """ - if (len(args) == 3 and - isinstance(args[0], str) and - isinstance(args[1], tuple)): - # Close enough. - raise TypeError("Cannot subclass %r" % cls) - return super().__new__(cls) - - # Things that are not classes also need these. - def _eval_type(self, globalns, localns): - return self - - def _get_type_vars(self, tvars): - pass - - def __repr__(self): - cls = type(self) - qname = _trim_name(_qualname(cls)) - return '%s.%s' % (cls.__module__, qname) - - def __call__(self, *args, **kwds): - raise TypeError("Cannot instantiate %r" % type(self)) - - -class _FinalTypingBase(_TypingBase, _root=True): - """Internal mix-in class to prevent instantiation. - - Prevents instantiation unless _root=True is given in class call. - It is used to create pseudo-singleton instances Any, Union, Optional, etc. - """ - - __slots__ = () - - def __new__(cls, *args, _root=False, **kwds): - self = super().__new__(cls, *args, **kwds) - if _root is True: - return self - raise TypeError("Cannot instantiate %r" % cls) - - def __reduce__(self): - return _trim_name(type(self).__name__) - - -class _ForwardRef(_TypingBase, _root=True): - """Internal wrapper to hold a forward reference.""" - - __slots__ = ('__forward_arg__', '__forward_code__', - '__forward_evaluated__', '__forward_value__') - - def __init__(self, arg): - super().__init__(arg) - if not isinstance(arg, str): - raise TypeError('Forward reference must be a string -- got %r' % (arg,)) - try: - code = compile(arg, '', 'eval') - except SyntaxError: - raise SyntaxError('Forward reference must be an expression -- got %r' % - (arg,)) - self.__forward_arg__ = arg - self.__forward_code__ = code - self.__forward_evaluated__ = False - self.__forward_value__ = None - - def _eval_type(self, globalns, localns): - if not self.__forward_evaluated__ or localns is not globalns: - if globalns is None and localns is None: - globalns = localns = {} - elif globalns is None: - globalns = localns - elif localns is None: - localns = globalns - self.__forward_value__ = _type_check( - eval(self.__forward_code__, globalns, localns), - "Forward references must evaluate to types.") - self.__forward_evaluated__ = True - return self.__forward_value__ - - def __eq__(self, other): - if not isinstance(other, _ForwardRef): - return NotImplemented - return (self.__forward_arg__ == other.__forward_arg__ and - self.__forward_value__ == other.__forward_value__) - - def __hash__(self): - return hash((self.__forward_arg__, self.__forward_value__)) - - def __instancecheck__(self, obj): - raise TypeError("Forward references cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Forward references cannot be used with issubclass().") - - def __repr__(self): - return '_ForwardRef(%r)' % (self.__forward_arg__,) - - -class _TypeAlias(_TypingBase, _root=True): - """Internal helper class for defining generic variants of concrete types. - - Note that this is not a type; let's call it a pseudo-type. It cannot - be used in instance and subclass checks in parameterized form, i.e. - ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning - ``False``. - """ - - __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') - - def __init__(self, name, type_var, impl_type, type_checker): - """Initializer. - - Args: - name: The name, e.g. 'Pattern'. - type_var: The type parameter, e.g. AnyStr, or the - specific type, e.g. str. - impl_type: The implementation type. - type_checker: Function that takes an impl_type instance. - and returns a value that should be a type_var instance. - """ - assert isinstance(name, str), repr(name) - assert isinstance(impl_type, type), repr(impl_type) - assert not isinstance(impl_type, TypingMeta), repr(impl_type) - assert isinstance(type_var, (type, _TypingBase)), repr(type_var) - self.name = name - self.type_var = type_var - self.impl_type = impl_type - self.type_checker = type_checker - - def __repr__(self): - return "%s[%s]" % (self.name, _type_repr(self.type_var)) - - def __getitem__(self, parameter): - if not isinstance(self.type_var, TypeVar): - raise TypeError("%s cannot be further parameterized." % self) - if self.type_var.__constraints__ and isinstance(parameter, type): - if not issubclass(parameter, self.type_var.__constraints__): - raise TypeError("%s is not a valid substitution for %s." % - (parameter, self.type_var)) - if isinstance(parameter, TypeVar) and parameter is not self.type_var: - raise TypeError("%s cannot be re-parameterized." % self) - return self.__class__(self.name, parameter, - self.impl_type, self.type_checker) - - def __eq__(self, other): - if not isinstance(other, _TypeAlias): - return NotImplemented - return self.name == other.name and self.type_var == other.type_var - - def __hash__(self): - return hash((self.name, self.type_var)) - - def __instancecheck__(self, obj): - if not isinstance(self.type_var, TypeVar): - raise TypeError("Parameterized type aliases cannot be used " - "with isinstance().") - return isinstance(obj, self.impl_type) - - def __subclasscheck__(self, cls): - if not isinstance(self.type_var, TypeVar): - raise TypeError("Parameterized type aliases cannot be used " - "with issubclass().") - return issubclass(cls, self.impl_type) - - -def _get_type_vars(types, tvars): - for t in types: - if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): - t._get_type_vars(tvars) - - -def _type_vars(types): - tvars = [] - _get_type_vars(types, tvars) - return tuple(tvars) - - -def _eval_type(t, globalns, localns): - if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): - return t._eval_type(globalns, localns) - return t - - -def _type_check(arg, msg): - """Check that the argument is a type, and return it (internal helper). - - As a special case, accept None and return type(None) instead. - Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. - - The msg argument is a human-readable error message, e.g. - - "Union[arg, ...]: arg should be a type." - - We append the repr() of the actual value (truncated to 100 chars). - """ - if arg is None: - return type(None) - if isinstance(arg, str): - arg = _ForwardRef(arg) - if (isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or - not isinstance(arg, (type, _TypingBase)) and not callable(arg)): - raise TypeError(msg + " Got %.100r." % (arg,)) - # Bare Union etc. are not valid as type arguments - if (type(arg).__name__ in ('_Union', '_Optional') - and not getattr(arg, '__origin__', None) - or isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol)): - raise TypeError("Plain %s is not valid as type argument" % arg) - return arg - - -def _type_repr(obj): - """Return the repr() of an object, special-casing types (internal helper). - - If obj is a type, we return a shorter version than the default - type.__repr__, based on the module and qualified name, which is - typically enough to uniquely identify a type. For everything - else, we fall back on repr(obj). - """ - if isinstance(obj, type) and not isinstance(obj, TypingMeta): - if obj.__module__ == 'builtins': - return _qualname(obj) - return '%s.%s' % (obj.__module__, _qualname(obj)) - if obj is ...: - return('...') - if isinstance(obj, types.FunctionType): - return obj.__name__ - return repr(obj) - - -class _Any(_FinalTypingBase, _root=True): - """Special type indicating an unconstrained type. - - - Any is compatible with every type. - - Any assumed to have all methods. - - All values assumed to be instances of Any. - - Note that all the above statements are true from the point of view of - static type checkers. At runtime, Any should not be used with instance - or class checks. - """ - - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("Any cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Any cannot be used with issubclass().") - - -Any = _Any(_root=True) - - -class TypeVar(_TypingBase, _root=True): - """Type variable. - - Usage:: - - T = TypeVar('T') # Can be anything - A = TypeVar('A', str, bytes) # Must be str or bytes - - Type variables exist primarily for the benefit of static type - checkers. They serve as the parameters for generic types as well - as for generic function definitions. See class Generic for more - information on generic types. Generic functions work as follows: - - def repeat(x: T, n: int) -> List[T]: - '''Return a list containing n references to x.''' - return [x]*n - - def longest(x: A, y: A) -> A: - '''Return the longest of two strings.''' - return x if len(x) >= len(y) else y - - The latter example's signature is essentially the overloading - of (str, str) -> str and (bytes, bytes) -> bytes. Also note - that if the arguments are instances of some subclass of str, - the return type is still plain str. - - At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. - - Type variables defined with covariant=True or contravariant=True - can be used do declare covariant or contravariant generic types. - See PEP 484 for more details. By default generic types are invariant - in all type variables. - - Type variables can be introspected. e.g.: - - T.__name__ == 'T' - T.__constraints__ == () - T.__covariant__ == False - T.__contravariant__ = False - A.__constraints__ == (str, bytes) - """ - - __slots__ = ('__name__', '__bound__', '__constraints__', - '__covariant__', '__contravariant__') - - def __init__(self, name, *constraints, bound=None, - covariant=False, contravariant=False): - super().__init__(name, *constraints, bound=bound, - covariant=covariant, contravariant=contravariant) - self.__name__ = name - if covariant and contravariant: - raise ValueError("Bivariant types are not supported.") - self.__covariant__ = bool(covariant) - self.__contravariant__ = bool(contravariant) - if constraints and bound is not None: - raise TypeError("Constraints cannot be combined with bound=...") - if constraints and len(constraints) == 1: - raise TypeError("A single constraint is not allowed") - msg = "TypeVar(name, constraint, ...): constraints must be types." - self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) - if bound: - self.__bound__ = _type_check(bound, "Bound must be a type.") - else: - self.__bound__ = None - - def _get_type_vars(self, tvars): - if self not in tvars: - tvars.append(self) - - def __repr__(self): - if self.__covariant__: - prefix = '+' - elif self.__contravariant__: - prefix = '-' - else: - prefix = '~' - return prefix + self.__name__ - - def __instancecheck__(self, instance): - raise TypeError("Type variables cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Type variables cannot be used with issubclass().") - - -# Some unconstrained type variables. These are used by the container types. -# (These are not for export.) -T = TypeVar('T') # Any type. -KT = TypeVar('KT') # Key type. -VT = TypeVar('VT') # Value type. -T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. -V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. -VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. -T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. - -# A useful type variable with constraints. This represents string types. -# (This one *is* for export!) -AnyStr = TypeVar('AnyStr', bytes, str) - - -def _replace_arg(arg, tvars, args): - """An internal helper function: replace arg if it is a type variable - found in tvars with corresponding substitution from args or - with corresponding substitution sub-tree if arg is a generic type. - """ - - if tvars is None: - tvars = [] - if hasattr(arg, '_subs_tree'): - return arg._subs_tree(tvars, args) - if isinstance(arg, TypeVar): - for i, tvar in enumerate(tvars): - if arg == tvar: - return args[i] - return arg - - -def _subs_tree(cls, tvars=None, args=None): - """An internal helper function: calculate substitution tree - for generic cls after replacing its type parameters with - substitutions in tvars -> args (if any). - Repeat the same following __origin__'s. - - Return a list of arguments with all possible substitutions - performed. Arguments that are generic classes themselves are represented - as tuples (so that no new classes are created by this function). - For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] - """ - - if cls.__origin__ is None: - return cls - # Make of chain of origins (i.e. cls -> cls.__origin__) - current = cls.__origin__ - orig_chain = [] - while current.__origin__ is not None: - orig_chain.append(current) - current = current.__origin__ - # Replace type variables in __args__ if asked ... - tree_args = [] - for arg in cls.__args__: - tree_args.append(_replace_arg(arg, tvars, args)) - # ... then continue replacing down the origin chain. - for ocls in orig_chain: - new_tree_args = [] - for i, arg in enumerate(ocls.__args__): - new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) - tree_args = new_tree_args - return tree_args - - -def _remove_dups_flatten(parameters): - """An internal helper for Union creation and substitution: flatten Union's - among parameters, then remove duplicates and strict subclasses. - """ - - # Flatten out Union[Union[...], ...]. - params = [] - for p in parameters: - if isinstance(p, _Union) and p.__origin__ is Union: - params.extend(p.__args__) - elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: - params.extend(p[1:]) - else: - params.append(p) - # Weed out strict duplicates, preserving the first of each occurrence. - all_params = set(params) - if len(all_params) < len(params): - new_params = [] - for t in params: - if t in all_params: - new_params.append(t) - all_params.remove(t) - params = new_params - assert not all_params, all_params - # Weed out subclasses. - # E.g. Union[int, Employee, Manager] == Union[int, Employee]. - # If object is present it will be sole survivor among proper classes. - # Never discard type variables. - # (In particular, Union[str, AnyStr] != AnyStr.) - all_params = set(params) - for t1 in params: - if not isinstance(t1, type): - continue - if any(isinstance(t2, type) and issubclass(t1, t2) - for t2 in all_params - {t1} - if not (isinstance(t2, GenericMeta) and - t2.__origin__ is not None)): - all_params.remove(t1) - return tuple(t for t in params if t in all_params) - - -def _check_generic(cls, parameters): - # Check correct count for parameters of a generic cls (internal helper). - if not cls.__parameters__: - raise TypeError("%s is not a generic class" % repr(cls)) - alen = len(parameters) - elen = len(cls.__parameters__) - if alen != elen: - raise TypeError("Too %s parameters for %s; actual %s, expected %s" % - ("many" if alen > elen else "few", repr(cls), alen, elen)) - - -_cleanups = [] - - -def _tp_cache(func): - """Internal wrapper caching __getitem__ of generic types with a fallback to - original function for non-hashable arguments. - """ - - cached = functools.lru_cache()(func) - _cleanups.append(cached.cache_clear) - @functools.wraps(func) - def inner(*args, **kwds): - try: - return cached(*args, **kwds) - except TypeError: - pass # All real errors (not unhashable args) are raised below. - return func(*args, **kwds) - return inner - - -class _Union(_FinalTypingBase, _root=True): - """Union type; Union[X, Y] means either X or Y. - - To define a union, use e.g. Union[int, str]. Details: - - - The arguments must be types and there must be at least one. - - - None as an argument is a special case and is replaced by - type(None). - - - Unions of unions are flattened, e.g.:: - - Union[Union[int, str], float] == Union[int, str, float] - - - Unions of a single argument vanish, e.g.:: - - Union[int] == int # The constructor actually returns int - - - Redundant arguments are skipped, e.g.:: - - Union[int, str, int] == Union[int, str] - - - When comparing unions, the argument order is ignored, e.g.:: - - Union[int, str] == Union[str, int] - - - When two arguments have a subclass relationship, the least - derived argument is kept, e.g.:: - - class Employee: pass - class Manager(Employee): pass - Union[int, Employee, Manager] == Union[int, Employee] - Union[Manager, int, Employee] == Union[int, Employee] - Union[Employee, Manager] == Employee - - - Similar for object:: - - Union[int, object] == object - - - You cannot subclass or instantiate a union. - - - You can use Optional[X] as a shorthand for Union[X, None]. - """ - - __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') - - def __new__(cls, parameters=None, origin=None, *args, _root=False): - self = super().__new__(cls, parameters, origin, *args, _root=_root) - if origin is None: - self.__parameters__ = None - self.__args__ = None - self.__origin__ = None - self.__tree_hash__ = hash(frozenset(('Union',))) - return self - if not isinstance(parameters, tuple): - raise TypeError("Expected parameters=") - if origin is Union: - parameters = _remove_dups_flatten(parameters) - # It's not a union if there's only one type left. - if len(parameters) == 1: - return parameters[0] - self.__parameters__ = _type_vars(parameters) - self.__args__ = parameters - self.__origin__ = origin - # Pre-calculate the __hash__ on instantiation. - # This improves speed for complex substitutions. - subs_tree = self._subs_tree() - if isinstance(subs_tree, tuple): - self.__tree_hash__ = hash(frozenset(subs_tree)) - else: - self.__tree_hash__ = hash(subs_tree) - return self - - def _eval_type(self, globalns, localns): - if self.__args__ is None: - return self - ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) - ev_origin = _eval_type(self.__origin__, globalns, localns) - if ev_args == self.__args__ and ev_origin == self.__origin__: - # Everything is already evaluated. - return self - return self.__class__(ev_args, ev_origin, _root=True) - - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - _get_type_vars(self.__parameters__, tvars) - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - tree = self._subs_tree() - if not isinstance(tree, tuple): - return repr(tree) - return tree[0]._tree_repr(tree) - - def _tree_repr(self, tree): - arg_list = [] - for arg in tree[1:]: - if not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - return super().__repr__() + '[%s]' % ', '.join(arg_list) - - @_tp_cache - def __getitem__(self, parameters): - if parameters == (): - raise TypeError("Cannot take a Union of no types.") - if not isinstance(parameters, tuple): - parameters = (parameters,) - if self.__origin__ is None: - msg = "Union[arg, ...]: each arg must be a type." - else: - msg = "Parameters to generic types must be types." - parameters = tuple(_type_check(p, msg) for p in parameters) - if self is not Union: - _check_generic(self, parameters) - return self.__class__(parameters, origin=self, _root=True) - - def _subs_tree(self, tvars=None, args=None): - if self is Union: - return Union # Nothing to substitute - tree_args = _subs_tree(self, tvars, args) - tree_args = _remove_dups_flatten(tree_args) - if len(tree_args) == 1: - return tree_args[0] # Union of a single type is that type - return (Union,) + tree_args - - def __eq__(self, other): - if not isinstance(other, _Union): - return self._subs_tree() == other - return self.__tree_hash__ == other.__tree_hash__ - - def __hash__(self): - return self.__tree_hash__ - - def __instancecheck__(self, obj): - raise TypeError("Unions cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Unions cannot be used with issubclass().") - - -Union = _Union(_root=True) - - -class _Optional(_FinalTypingBase, _root=True): - """Optional type. - - Optional[X] is equivalent to Union[X, None]. - """ - - __slots__ = () - - @_tp_cache - def __getitem__(self, arg): - arg = _type_check(arg, "Optional[t] requires a single type.") - return Union[arg, type(None)] - - -Optional = _Optional(_root=True) - - -def _gorg(a): - """Return the farthest origin of a generic class (internal helper).""" - assert isinstance(a, GenericMeta) - while a.__origin__ is not None: - a = a.__origin__ - return a - - -def _geqv(a, b): - """Return whether two generic classes are equivalent (internal helper). - - The intention is to consider generic class X and any of its - parameterized forms (X[T], X[int], etc.) as equivalent. - - However, X is not equivalent to a subclass of X. - - The relation is reflexive, symmetric and transitive. - """ - assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) - # Reduce each to its origin. - return _gorg(a) is _gorg(b) - - -def _next_in_mro(cls): - """Helper for Generic.__new__. - - Returns the class after the last occurrence of Generic or - Generic[...] in cls.__mro__. - """ - next_in_mro = object - # Look for the last occurrence of Generic or Generic[...]. - for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and _gorg(c) is Generic: - next_in_mro = cls.__mro__[i+1] - return next_in_mro - - -def _valid_for_check(cls): - """An internal helper to prohibit isinstance([1], List[str]) etc.""" - if cls is Generic: - raise TypeError("Class %r cannot be used with class " - "or instance checks" % cls) - if (cls.__origin__ is not None and - sys._getframe(3).f_globals['__name__'] not in ['abc', 'functools']): - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - - -def _make_subclasshook(cls): - """Construct a __subclasshook__ callable that incorporates - the associated __extra__ class in subclass checks performed - against cls. - """ - if isinstance(cls.__extra__, abc.ABCMeta): - # The logic mirrors that of ABCMeta.__subclasscheck__. - # Registered classes need not be checked here because - # cls and its extra share the same _abc_registry. - def __extrahook__(subclass): - _valid_for_check(cls) - res = cls.__extra__.__subclasshook__(subclass) - if res is not NotImplemented: - return res - if cls.__extra__ in subclass.__mro__: - return True - for scls in cls.__extra__.__subclasses__(): - if isinstance(scls, GenericMeta): - continue - if issubclass(subclass, scls): - return True - return NotImplemented - else: - # For non-ABC extras we'll just call issubclass(). - def __extrahook__(subclass): - _valid_for_check(cls) - if cls.__extra__ and issubclass(subclass, cls.__extra__): - return True - return NotImplemented - return __extrahook__ - - -def _no_slots_copy(dct): - """Internal helper: copy class __dict__ and clean slots class variables. - (They will be re-created if necessary by normal class machinery.) - """ - dict_copy = dict(dct) - if '__slots__' in dict_copy: - for slot in dict_copy['__slots__']: - dict_copy.pop(slot, None) - return dict_copy - - -class GenericMeta(TypingMeta, abc.ABCMeta): - """Metaclass for generic types.""" - - def __new__(cls, name, bases, namespace, - tvars=None, args=None, origin=None, extra=None, orig_bases=None): - if tvars is not None: - # Called from __getitem__() below. - assert origin is not None - assert all(isinstance(t, TypeVar) for t in tvars), tvars - else: - # Called from class statement. - assert tvars is None, tvars - assert args is None, args - assert origin is None, origin - - # Get the full set of tvars from the bases. - tvars = _type_vars(bases) - # Look for Generic[T1, ..., Tn]. - # If found, tvars must be a subset of it. - # If not found, tvars is it. - # Also check for and reject plain Generic, - # and reject multiple Generic[...]. - gvars = None - for base in bases: - if base is Generic: - raise TypeError("Cannot inherit from plain Generic") - if (isinstance(base, GenericMeta) and - base.__origin__ is Generic): - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...] multiple types.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - raise TypeError( - "Some type variables (%s) " - "are not listed in Generic[%s]" % - (", ".join(str(t) for t in tvars if t not in gvarset), - ", ".join(str(g) for g in gvars))) - tvars = gvars - - initial_bases = bases - if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: - bases = (extra,) + bases - bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) - - # remove bare Generic from bases if there are other generic bases - if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): - bases = tuple(b for b in bases if b is not Generic) - self = super().__new__(cls, name, bases, namespace, _root=True) - - self.__parameters__ = tvars - # Be prepared that GenericMeta will be subclassed by TupleMeta - # and CallableMeta, those two allow ..., (), or [] in __args___. - self.__args__ = tuple(... if a is _TypingEllipsis else - () if a is _TypingEmpty else - a for a in args) if args else None - self.__origin__ = origin - self.__extra__ = extra - # Speed hack (https://github.com/python/typing/issues/196). - self.__next_in_mro__ = _next_in_mro(self) - # Preserve base classes on subclassing (__bases__ are type erased now). - if orig_bases is None: - self.__orig_bases__ = initial_bases - - # This allows unparameterized generic collections to be used - # with issubclass() and isinstance() in the same way as their - # collections.abc counterparts (e.g., isinstance([], Iterable)). - if ('__subclasshook__' not in namespace and extra # allow overriding - or hasattr(self.__subclasshook__, '__name__') and - self.__subclasshook__.__name__ == '__extrahook__'): - self.__subclasshook__ = _make_subclasshook(self) - if isinstance(extra, abc.ABCMeta): - self._abc_registry = extra._abc_registry - - if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. - self.__qualname__ = origin.__qualname__ - self.__tree_hash__ = hash(self._subs_tree()) if origin else hash((self.__name__,)) - return self - - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - _get_type_vars(self.__parameters__, tvars) - - def _eval_type(self, globalns, localns): - ev_origin = (self.__origin__._eval_type(globalns, localns) - if self.__origin__ else None) - ev_args = tuple(_eval_type(a, globalns, localns) for a - in self.__args__) if self.__args__ else None - if ev_origin == self.__origin__ and ev_args == self.__args__: - return self - return self.__class__(self.__name__, - self.__bases__, - _no_slots_copy(self.__dict__), - tvars=_type_vars(ev_args) if ev_args else None, - args=ev_args, - origin=ev_origin, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - return self._tree_repr(self._subs_tree()) - - def _tree_repr(self, tree): - arg_list = [] - for arg in tree[1:]: - if arg == (): - arg_list.append('()') - elif not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - return super().__repr__() + '[%s]' % ', '.join(arg_list) - - def _subs_tree(self, tvars=None, args=None): - if self.__origin__ is None: - return self - tree_args = _subs_tree(self, tvars, args) - return (_gorg(self),) + tuple(tree_args) - - def __eq__(self, other): - if not isinstance(other, GenericMeta): - return NotImplemented - if self.__origin__ is None or other.__origin__ is None: - return self is other - return self.__tree_hash__ == other.__tree_hash__ - - def __hash__(self): - return self.__tree_hash__ - - @_tp_cache - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if not params and not _gorg(self) is Tuple: - raise TypeError( - "Parameter list to %s[...] cannot be empty" % _qualname(self)) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self is Generic: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, TypeVar) for p in params): - raise TypeError( - "Parameters to Generic[...] must all be type variables") - if len(set(params)) != len(params): - raise TypeError( - "Parameters to Generic[...] must all be unique") - tvars = params - args = params - elif self in (Tuple, Callable): - tvars = _type_vars(params) - args = params - elif self is _Protocol: - # _Protocol is internal, don't check anything. - tvars = params - args = params - elif self.__origin__ in (Generic, _Protocol): - # Can't subscript Generic[...] or _Protocol[...]. - raise TypeError("Cannot subscript already-subscripted %s" % - repr(self)) - else: - # Subscripting a regular Generic subclass. - _check_generic(self, params) - tvars = _type_vars(params) - args = params - return self.__class__(self.__name__, - self.__bases__, - _no_slots_copy(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - def __instancecheck__(self, instance): - # Since we extend ABC.__subclasscheck__ and - # ABC.__instancecheck__ inlines the cache checking done by the - # latter, we must extend __instancecheck__ too. For simplicity - # we just skip the cache check -- instance checks for generic - # classes are supposed to be rare anyways. - return issubclass(instance.__class__, self) - - def __copy__(self): - return self.__class__(self.__name__, self.__bases__, - _no_slots_copy(self.__dict__), - self.__parameters__, self.__args__, self.__origin__, - self.__extra__, self.__orig_bases__) - - -# Prevent checks for Generic to crash when defining Generic. -Generic = None - - -def _generic_new(base_cls, cls, *args, **kwds): - # Assure type is erased on instantiation, - # but attempt to store it in __orig_class__ - if cls.__origin__ is None: - return base_cls.__new__(cls) - else: - origin = _gorg(cls) - obj = base_cls.__new__(origin) - try: - obj.__orig_class__ = cls - except AttributeError: - pass - obj.__init__(*args, **kwds) - return obj - - -class Generic(metaclass=GenericMeta): - """Abstract base class for generic types. - - A generic type is typically declared by inheriting from - this class parameterized with one or more type variables. - For example, a generic mapping type might be defined as:: - - class Mapping(Generic[KT, VT]): - def __getitem__(self, key: KT) -> VT: - ... - # Etc. - - This class can then be used as follows:: - - def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: - try: - return mapping[key] - except KeyError: - return default - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Generic): - raise TypeError("Type Generic cannot be instantiated; " - "it can be used only as a base class") - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - -class _TypingEmpty: - """Internal placeholder for () or []. Used by TupleMeta and CallableMeta - to allow empty list/tuple in specific places, without allowing them - to sneak in where prohibited. - """ - - -class _TypingEllipsis: - """Internal placeholder for ... (ellipsis).""" - - -class TupleMeta(GenericMeta): - """Metaclass for Tuple (internal).""" - - @_tp_cache - def __getitem__(self, parameters): - if self.__origin__ is not None or not _geqv(self, Tuple): - # Normal generic rules apply if this is not the first subscription - # or a subscription of a subclass. - return super().__getitem__(parameters) - if parameters == (): - return super().__getitem__((_TypingEmpty,)) - if not isinstance(parameters, tuple): - parameters = (parameters,) - if len(parameters) == 2 and parameters[1] is ...: - msg = "Tuple[t, ...]: t must be a type." - p = _type_check(parameters[0], msg) - return super().__getitem__((p, _TypingEllipsis)) - msg = "Tuple[t0, t1, ...]: each t must be a type." - parameters = tuple(_type_check(p, msg) for p in parameters) - return super().__getitem__(parameters) - - def __instancecheck__(self, obj): - if self.__args__ == None: - return isinstance(obj, tuple) - raise TypeError("Parameterized Tuple cannot be used " - "with isinstance().") - - def __subclasscheck__(self, cls): - if self.__args__ == None: - return issubclass(cls, tuple) - raise TypeError("Parameterized Tuple cannot be used " - "with issubclass().") - - -class Tuple(tuple, extra=tuple, metaclass=TupleMeta): - """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. - - Example: Tuple[T1, T2] is a tuple of two elements corresponding - to type variables T1 and T2. Tuple[int, float, str] is a tuple - of an int, a float and a string. - - To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Tuple): - raise TypeError("Type Tuple cannot be instantiated; " - "use tuple() instead") - return _generic_new(tuple, cls, *args, **kwds) - - -class CallableMeta(GenericMeta): - """Metaclass for Callable (internal).""" - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - return self._tree_repr(self._subs_tree()) - - def _tree_repr(self, tree): - if _gorg(self) is not Callable: - return super()._tree_repr(tree) - # For actual Callable (not its subclass) we override - # super()._tree_repr() for nice formatting. - arg_list = [] - for arg in tree[1:]: - if not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - if arg_list[0] == '...': - return repr(tree[0]) + '[..., %s]' % arg_list[1] - return (repr(tree[0]) + - '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) - - def __getitem__(self, parameters): - """A thin wrapper around __getitem_inner__ to provide the latter - with hashable arguments to improve speed. - """ - - if self.__origin__ is not None or not _geqv(self, Callable): - return super().__getitem__(parameters) - if not isinstance(parameters, tuple) or len(parameters) != 2: - raise TypeError("Callable must be used as " - "Callable[[arg, ...], result].") - args, result = parameters - if args is Ellipsis: - parameters = (Ellipsis, result) - else: - if not isinstance(args, list): - raise TypeError("Callable[args, result]: args must be a list." - " Got %.100r." % (args,)) - parameters = (tuple(args), result) - return self.__getitem_inner__(parameters) - - @_tp_cache - def __getitem_inner__(self, parameters): - args, result = parameters - msg = "Callable[args, result]: result must be a type." - result = _type_check(result, msg) - if args is Ellipsis: - return super().__getitem__((_TypingEllipsis, result)) - msg = "Callable[[arg, ...], result]: each arg must be a type." - args = tuple(_type_check(arg, msg) for arg in args) - parameters = args + (result,) - return super().__getitem__(parameters) - - -class Callable(extra=collections_abc.Callable, metaclass = CallableMeta): - """Callable type; Callable[[int], str] is a function of (int) -> str. - - The subscription syntax must always be used with exactly two - values: the argument list and the return type. The argument list - must be a list of types or ellipsis; the return type must be a single type. - - There is no syntax to indicate optional or keyword arguments, - such function types are rarely used as callback types. - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Callable): - raise TypeError("Type Callable cannot be instantiated; " - "use a non-abstract subclass instead") - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - -class _ClassVar(_FinalTypingBase, _root=True): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats: ClassVar[Dict[str, int]] = {} # class variable - damage: int = 10 # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(_type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = _eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(_type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - -ClassVar = _ClassVar(_root=True) - - -def cast(typ, val): - """Cast a value to a type. - - This returns the value unchanged. To the type checker this - signals that the return value has the designated type, but at - runtime we intentionally don't check anything (we want this - to be as fast as possible). - """ - return val - - -def _get_defaults(func): - """Internal helper to extract the default arguments, by name.""" - try: - code = func.__code__ - except AttributeError: - # Some built-in functions don't have __code__, __defaults__, etc. - return {} - pos_count = code.co_argcount - arg_names = code.co_varnames - arg_names = arg_names[:pos_count] - defaults = func.__defaults__ or () - kwdefaults = func.__kwdefaults__ - res = dict(kwdefaults) if kwdefaults else {} - pos_offset = pos_count - len(defaults) - for name, value in zip(arg_names[pos_offset:], defaults): - assert name not in res - res[name] = value - return res - - -def get_type_hints(obj, globalns=None, localns=None): - """Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, and if necessary - adds Optional[t] if a default value equal to None is set. - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj, and these are also used as the locals. If the - object does not appear to have globals, an exception is raised. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - - if getattr(obj, '__no_type_check__', None): - return {} - if globalns is None: - globalns = getattr(obj, '__globals__', {}) - if localns is None: - localns = globalns - elif localns is None: - localns = globalns - # Classes require a special treatment. - if isinstance(obj, type): - hints = {} - for base in reversed(obj.__mro__): - ann = base.__dict__.get('__annotations__', {}) - for name, value in ann.items(): - if value is None: - value = type(None) - if isinstance(value, str): - value = _ForwardRef(value) - value = _eval_type(value, globalns, localns) - hints[name] = value - return hints - hints = getattr(obj, '__annotations__', None) - if hints is None: - # Return empty annotations for something that _could_ have them. - if (isinstance(obj, types.FunctionType) or - isinstance(obj, types.BuiltinFunctionType) or - isinstance(obj, types.MethodType) or - isinstance(obj, types.ModuleType)): - return {} - else: - raise TypeError('{!r} is not a module, class, method, ' - 'or function.'.format(obj)) - defaults = _get_defaults(obj) - hints = dict(hints) - for name, value in hints.items(): - if value is None: - value = type(None) - if isinstance(value, str): - value = _ForwardRef(value) - value = _eval_type(value, globalns, localns) - if name in defaults and defaults[name] is None: - value = Optional[value] - hints[name] = value - return hints - - -def no_type_check(arg): - """Decorator to indicate that annotations are not type hints. - - The argument must be a class or function; if it is a class, it - applies recursively to all methods and classes defined in that class - (but not to methods defined in its superclasses or subclasses). - - This mutates the function(s) or class(es) in place. - """ - if isinstance(arg, type): - arg_attrs = arg.__dict__.copy() - for attr, val in arg.__dict__.items(): - if val in arg.__bases__: - arg_attrs.pop(attr) - for obj in arg_attrs.values(): - if isinstance(obj, types.FunctionType): - obj.__no_type_check__ = True - if isinstance(obj, type): - no_type_check(obj) - try: - arg.__no_type_check__ = True - except TypeError: # built-in classes - pass - return arg - - -def no_type_check_decorator(decorator): - """Decorator to give another decorator the @no_type_check effect. - - This wraps the decorator with something that wraps the decorated - function in @no_type_check. - """ - - @functools.wraps(decorator) - def wrapped_decorator(*args, **kwds): - func = decorator(*args, **kwds) - func = no_type_check(func) - return func - - return wrapped_decorator - - -def _overload_dummy(*args, **kwds): - """Helper for @overload to raise when called.""" - raise NotImplementedError( - "You should not call an overloaded function. " - "A series of @overload-decorated functions " - "outside a stub module should always be followed " - "by an implementation that is not @overload-ed.") - - -def overload(func): - """Decorator for overloaded functions/methods. - - In a stub file, place two or more stub definitions for the same - function in a row, each decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - - In a non-stub file (i.e. a regular .py file), do the same but - follow it with an implementation. The implementation should *not* - be decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - # implementation goes here - """ - return _overload_dummy - - -class _ProtocolMeta(GenericMeta): - """Internal metaclass for _Protocol. - - This exists so _Protocol classes can be generic without deriving - from Generic. - """ - - def __instancecheck__(self, obj): - if _Protocol not in self.__bases__: - return super().__instancecheck__(obj) - raise TypeError("Protocols cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not self._is_protocol: - # No structural checks since this isn't a protocol. - return NotImplemented - - if self is _Protocol: - # Every class is a subclass of the empty protocol. - return True - - # Find all attributes defined in the protocol. - attrs = self._get_protocol_attrs() - - for attr in attrs: - if not any(attr in d.__dict__ for d in cls.__mro__): - return False - return True - - def _get_protocol_attrs(self): - # Get all Protocol base classes. - protocol_bases = [] - for c in self.__mro__: - if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': - protocol_bases.append(c) - - # Get attributes included in protocol. - attrs = set() - for base in protocol_bases: - for attr in base.__dict__.keys(): - # Include attributes not defined in any non-protocol bases. - for c in self.__mro__: - if (c is not base and attr in c.__dict__ and - not getattr(c, '_is_protocol', False)): - break - else: - if (not attr.startswith('_abc_') and - attr != '__abstractmethods__' and - attr != '__annotations__' and - attr != '__weakref__' and - attr != '_is_protocol' and - attr != '__dict__' and - attr != '__args__' and - attr != '__slots__' and - attr != '_get_protocol_attrs' and - attr != '__next_in_mro__' and - attr != '__parameters__' and - attr != '__origin__' and - attr != '__orig_bases__' and - attr != '__extra__' and - attr != '__tree_hash__' and - attr != '__module__'): - attrs.add(attr) - - return attrs - - -class _Protocol(metaclass=_ProtocolMeta): - """Internal base class for protocol classes. - - This implements a simple-minded structural issubclass check - (similar but more general than the one-offs in collections.abc - such as Hashable). - """ - - __slots__ = () - - _is_protocol = True - - -# Various ABCs mimicking those in collections.abc. -# A few are simply re-exported for completeness. - -Hashable = collections_abc.Hashable # Not generic. - - -if hasattr(collections_abc, 'Awaitable'): - class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): - __slots__ = () - - __all__.append('Awaitable') - - -if hasattr(collections_abc, 'Coroutine'): - class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co], - extra=collections_abc.Coroutine): - __slots__ = () - - __all__.append('Coroutine') - - -if hasattr(collections_abc, 'AsyncIterable'): - - class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): - __slots__ = () - - class AsyncIterator(AsyncIterable[T_co], - extra=collections_abc.AsyncIterator): - __slots__ = () - - __all__.append('AsyncIterable') - __all__.append('AsyncIterator') - - -class Iterable(Generic[T_co], extra=collections_abc.Iterable): - __slots__ = () - - -class Iterator(Iterable[T_co], extra=collections_abc.Iterator): - __slots__ = () - - -class SupportsInt(_Protocol): - __slots__ = () - - @abstractmethod - def __int__(self) -> int: - pass - - -class SupportsFloat(_Protocol): - __slots__ = () - - @abstractmethod - def __float__(self) -> float: - pass - - -class SupportsComplex(_Protocol): - __slots__ = () - - @abstractmethod - def __complex__(self) -> complex: - pass - - -class SupportsBytes(_Protocol): - __slots__ = () - - @abstractmethod - def __bytes__(self) -> bytes: - pass - - -class SupportsAbs(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __abs__(self) -> T_co: - pass - - -class SupportsRound(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __round__(self, ndigits: int = 0) -> T_co: - pass - - -if hasattr(collections_abc, 'Reversible'): - class Reversible(Iterable[T_co], extra=collections_abc.Reversible): - __slots__ = () -else: - class Reversible(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __reversed__(self) -> 'Iterator[T_co]': - pass - - -Sized = collections_abc.Sized # Not generic. - - -class Container(Generic[T_co], extra=collections_abc.Container): - __slots__ = () - - -if hasattr(collections_abc, 'Collection'): - class Collection(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Collection): - __slots__ = () - - __all__.append('Collection') - - -# Callable was defined earlier. - -if hasattr(collections_abc, 'Collection'): - class AbstractSet(Collection[T_co], - extra=collections_abc.Set): - __slots__ = () -else: - class AbstractSet(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Set): - __slots__ = () - - -class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): - __slots__ = () - - -# NOTE: It is only covariant in the value type. -if hasattr(collections_abc, 'Collection'): - class Mapping(Collection[KT], Generic[KT, VT_co], - extra=collections_abc.Mapping): - __slots__ = () -else: - class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], - extra=collections_abc.Mapping): - __slots__ = () - - -class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): - __slots__ = () - -if hasattr(collections_abc, 'Reversible'): - if hasattr(collections_abc, 'Collection'): - class Sequence(Reversible[T_co], Collection[T_co], - extra=collections_abc.Sequence): - __slots__ = () - else: - class Sequence(Sized, Reversible[T_co], Container[T_co], - extra=collections_abc.Sequence): - __slots__ = () -else: - class Sequence(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Sequence): - __slots__ = () - - -class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): - __slots__ = () - - -class ByteString(Sequence[int], extra=collections_abc.ByteString): - __slots__ = () - - -class List(list, MutableSequence[T], extra=list): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, List): - raise TypeError("Type List cannot be instantiated; " - "use list() instead") - return _generic_new(list, cls, *args, **kwds) - - -class Set(set, MutableSet[T], extra=set): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Set): - raise TypeError("Type Set cannot be instantiated; " - "use set() instead") - return _generic_new(set, cls, *args, **kwds) - - -class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, FrozenSet): - raise TypeError("Type FrozenSet cannot be instantiated; " - "use frozenset() instead") - return _generic_new(frozenset, cls, *args, **kwds) - - -class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): - __slots__ = () - - -class KeysView(MappingView[KT], AbstractSet[KT], - extra=collections_abc.KeysView): - __slots__ = () - - -class ItemsView(MappingView[Tuple[KT, VT_co]], - AbstractSet[Tuple[KT, VT_co]], - Generic[KT, VT_co], - extra=collections_abc.ItemsView): - __slots__ = () - - -class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): - __slots__ = () - - -if hasattr(contextlib, 'AbstractContextManager'): - class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): - __slots__ = () - __all__.append('ContextManager') - - -class Dict(dict, MutableMapping[KT, VT], extra=dict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Dict): - raise TypeError("Type Dict cannot be instantiated; " - "use dict() instead") - return _generic_new(dict, cls, *args, **kwds) - -class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], - extra=collections.defaultdict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, DefaultDict): - raise TypeError("Type DefaultDict cannot be instantiated; " - "use collections.defaultdict() instead") - return _generic_new(collections.defaultdict, cls, *args, **kwds) - -# Determine what base class to use for Generator. -if hasattr(collections_abc, 'Generator'): - # Sufficiently recent versions of 3.5 have a Generator ABC. - _G_base = collections_abc.Generator -else: - # Fall back on the exact type. - _G_base = types.GeneratorType - - -class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], - extra=_G_base): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Generator): - raise TypeError("Type Generator cannot be instantiated; " - "create a subclass instead") - return _generic_new(_G_base, cls, *args, **kwds) - - -# Internal type variable used for Type[]. -CT_co = TypeVar('CT_co', covariant=True, bound=type) - - -# This is not a real generic class. Don't use outside annotations. -class Type(Generic[CT_co], extra=type): - """A special construct usable to annotate class objects. - - For example, suppose we have the following classes:: - - class User: ... # Abstract base for User classes - class BasicUser(User): ... - class ProUser(User): ... - class TeamUser(User): ... - - And a function that takes a class argument that's a subclass of - User and returns an instance of the corresponding class:: - - U = TypeVar('U', bound=User) - def new_user(user_class: Type[U]) -> U: - user = user_class() - # (Here we could write the user object to a database) - return user - - joe = new_user(BasicUser) - - At this point the type checker knows that joe has type BasicUser. - """ - - __slots__ = () - - -def _make_nmtuple(name, types): - msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" - types = [(n, _type_check(t, msg)) for n, t in types] - nm_tpl = collections.namedtuple(name, [n for n, t in types]) - nm_tpl._field_types = dict(types) - try: - nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - return nm_tpl - - -_PY36 = sys.version_info[:2] >= (3, 6) - - -class NamedTupleMeta(type): - - def __new__(cls, typename, bases, ns): - if ns.get('_root', False): - return super().__new__(cls, typename, bases, ns) - if not _PY36: - raise TypeError("Class syntax for NamedTuple is only supported" - " in Python 3.6+") - types = ns.get('__annotations__', {}) - return _make_nmtuple(typename, types.items()) - -class NamedTuple(metaclass=NamedTupleMeta): - """Typed version of namedtuple. - - Usage in Python versions >= 3.6:: - - class Employee(NamedTuple): - name: str - id: int - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has one extra attribute: _field_types, - giving a dict mapping field names to types. (The field names - are in the _fields attribute, which is part of the namedtuple - API.) Alternative equivalent keyword syntax is also accepted:: - - Employee = NamedTuple('Employee', name=str, id=int) - - In Python versions <= 3.5 use:: - - Employee = NamedTuple('Employee', [('name', str), ('id', int)]) - """ - _root = True - - def __new__(self, typename, fields=None, **kwargs): - if kwargs and not _PY36: - raise TypeError("Keyword syntax for NamedTuple is only supported" - " in Python 3.6+") - if fields is None: - fields = kwargs.items() - elif kwargs: - raise TypeError("Either list of fields or keywords" - " can be provided to NamedTuple, not both") - return _make_nmtuple(typename, fields) - - -def NewType(name, tp): - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy function that simply returns its argument. Usage:: - - UserId = NewType('UserId', int) - - def name_by_id(user_id: UserId) -> str: - ... - - UserId('user') # Fails type check - - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - - num = UserId(5) + 1 # type: int - """ - - def new_type(x): - return x - - new_type.__name__ = name - new_type.__supertype__ = tp - return new_type - - -# Python-version-specific alias (Python 2: unicode; Python 3: str) -Text = str - - -# Constant that's True when type checking, but False here. -TYPE_CHECKING = False - - -class IO(Generic[AnyStr]): - """Generic base class for TextIO and BinaryIO. - - This is an abstract, generic version of the return of open(). - - NOTE: This does not distinguish between the different possible - classes (text vs. binary, read vs. write vs. read/write, - append-only, unbuffered). The TextIO and BinaryIO subclasses - below capture the distinctions between text vs. binary, which is - pervasive in the interface; however we currently do not offer a - way to track the other distinctions in the type system. - """ - - __slots__ = () - - @abstractproperty - def mode(self) -> str: - pass - - @abstractproperty - def name(self) -> str: - pass - - @abstractmethod - def close(self) -> None: - pass - - @abstractmethod - def closed(self) -> bool: - pass - - @abstractmethod - def fileno(self) -> int: - pass - - @abstractmethod - def flush(self) -> None: - pass - - @abstractmethod - def isatty(self) -> bool: - pass - - @abstractmethod - def read(self, n: int = -1) -> AnyStr: - pass - - @abstractmethod - def readable(self) -> bool: - pass - - @abstractmethod - def readline(self, limit: int = -1) -> AnyStr: - pass - - @abstractmethod - def readlines(self, hint: int = -1) -> List[AnyStr]: - pass - - @abstractmethod - def seek(self, offset: int, whence: int = 0) -> int: - pass - - @abstractmethod - def seekable(self) -> bool: - pass - - @abstractmethod - def tell(self) -> int: - pass - - @abstractmethod - def truncate(self, size: int = None) -> int: - pass - - @abstractmethod - def writable(self) -> bool: - pass - - @abstractmethod - def write(self, s: AnyStr) -> int: - pass - - @abstractmethod - def writelines(self, lines: List[AnyStr]) -> None: - pass - - @abstractmethod - def __enter__(self) -> 'IO[AnyStr]': - pass - - @abstractmethod - def __exit__(self, type, value, traceback) -> None: - pass - - -class BinaryIO(IO[bytes]): - """Typed version of the return of open() in binary mode.""" - - __slots__ = () - - @abstractmethod - def write(self, s: Union[bytes, bytearray]) -> int: - pass - - @abstractmethod - def __enter__(self) -> 'BinaryIO': - pass - - -class TextIO(IO[str]): - """Typed version of the return of open() in text mode.""" - - __slots__ = () - - @abstractproperty - def buffer(self) -> BinaryIO: - pass - - @abstractproperty - def encoding(self) -> str: - pass - - @abstractproperty - def errors(self) -> Optional[str]: - pass - - @abstractproperty - def line_buffering(self) -> bool: - pass - - @abstractproperty - def newlines(self) -> Any: - pass - - @abstractmethod - def __enter__(self) -> 'TextIO': - pass - - -class io: - """Wrapper namespace for IO generic classes.""" - - __all__ = ['IO', 'TextIO', 'BinaryIO'] - IO = IO - TextIO = TextIO - BinaryIO = BinaryIO - -io.__name__ = __name__ + '.io' -sys.modules[io.__name__] = io - - -Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), - lambda p: p.pattern) -Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), - lambda m: m.re.pattern) - - -class re: - """Wrapper namespace for re type aliases.""" - - __all__ = ['Pattern', 'Match'] - Pattern = Pattern - Match = Match - -re.__name__ = __name__ + '.re' -sys.modules[re.__name__] = re diff --git a/typing_extensions/test_data/python-3.6.0/_collections_abc.py b/typing_extensions/test_data/python-3.6.0/_collections_abc.py deleted file mode 100644 index b172f3f3..00000000 --- a/typing_extensions/test_data/python-3.6.0/_collections_abc.py +++ /dev/null @@ -1,1007 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. - -Unit tests are in test_collections. -""" - -from abc import ABCMeta, abstractmethod -import sys - -__all__ = ["Awaitable", "Coroutine", - "AsyncIterable", "AsyncIterator", "AsyncGenerator", - "Hashable", "Iterable", "Iterator", "Generator", "Reversible", - "Sized", "Container", "Callable", "Collection", - "Set", "MutableSet", - "Mapping", "MutableMapping", - "MappingView", "KeysView", "ItemsView", "ValuesView", - "Sequence", "MutableSequence", - "ByteString", - ] - -# This module has been renamed from collections.abc to _collections_abc to -# speed up interpreter startup. Some of the types such as MutableMapping are -# required early but collections module imports a lot of other modules. -# See issue #19218 -__name__ = "collections.abc" - -# Private list of types that we want to register with the various ABCs -# so that they will pass tests like: -# it = iter(somebytearray) -# assert isinstance(it, Iterable) -# Note: in other implementations, these types might not be distinct -# and they may have their own implementation specific types that -# are not included on this list. -bytes_iterator = type(iter(b'')) -bytearray_iterator = type(iter(bytearray())) -#callable_iterator = ??? -dict_keyiterator = type(iter({}.keys())) -dict_valueiterator = type(iter({}.values())) -dict_itemiterator = type(iter({}.items())) -list_iterator = type(iter([])) -list_reverseiterator = type(iter(reversed([]))) -range_iterator = type(iter(range(0))) -longrange_iterator = type(iter(range(1 << 1000))) -set_iterator = type(iter(set())) -str_iterator = type(iter("")) -tuple_iterator = type(iter(())) -zip_iterator = type(iter(zip())) -## views ## -dict_keys = type({}.keys()) -dict_values = type({}.values()) -dict_items = type({}.items()) -## misc ## -mappingproxy = type(type.__dict__) -generator = type((lambda: (yield))()) -## coroutine ## -async def _coro(): pass -_coro = _coro() -coroutine = type(_coro) -_coro.close() # Prevent ResourceWarning -del _coro -## asynchronous generator ## -async def _ag(): yield -_ag = _ag() -async_generator = type(_ag) -del _ag - - -### ONE-TRICK PONIES ### - -def _check_methods(C, *methods): - mro = C.__mro__ - for method in methods: - for B in mro: - if method in B.__dict__: - if B.__dict__[method] is None: - return NotImplemented - break - else: - return NotImplemented - return True - -class Hashable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __hash__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Hashable: - return _check_methods(C, "__hash__") - return NotImplemented - - -class Awaitable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __await__(self): - yield - - @classmethod - def __subclasshook__(cls, C): - if cls is Awaitable: - return _check_methods(C, "__await__") - return NotImplemented - - -class Coroutine(Awaitable): - - __slots__ = () - - @abstractmethod - def send(self, value): - """Send a value into the coroutine. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the coroutine. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside coroutine. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("coroutine ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Coroutine: - return _check_methods(C, '__await__', 'send', 'throw', 'close') - return NotImplemented - - -Coroutine.register(coroutine) - - -class AsyncIterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __aiter__(self): - return AsyncIterator() - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterable: - return _check_methods(C, "__aiter__") - return NotImplemented - - -class AsyncIterator(AsyncIterable): - - __slots__ = () - - @abstractmethod - async def __anext__(self): - """Return the next item or raise StopAsyncIteration when exhausted.""" - raise StopAsyncIteration - - def __aiter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterator: - return _check_methods(C, "__anext__", "__aiter__") - return NotImplemented - - -class AsyncGenerator(AsyncIterator): - - __slots__ = () - - async def __anext__(self): - """Return the next item from the asynchronous generator. - When exhausted, raise StopAsyncIteration. - """ - return await self.asend(None) - - @abstractmethod - async def asend(self, value): - """Send a value into the asynchronous generator. - Return next yielded value or raise StopAsyncIteration. - """ - raise StopAsyncIteration - - @abstractmethod - async def athrow(self, typ, val=None, tb=None): - """Raise an exception in the asynchronous generator. - Return next yielded value or raise StopAsyncIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - async def aclose(self): - """Raise GeneratorExit inside coroutine. - """ - try: - await self.athrow(GeneratorExit) - except (GeneratorExit, StopAsyncIteration): - pass - else: - raise RuntimeError("asynchronous generator ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncGenerator: - return _check_methods(C, '__aiter__', '__anext__', - 'asend', 'athrow', 'aclose') - return NotImplemented - - -AsyncGenerator.register(async_generator) - - -class Iterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __iter__(self): - while False: - yield None - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterable: - return _check_methods(C, "__iter__") - return NotImplemented - - -class Iterator(Iterable): - - __slots__ = () - - @abstractmethod - def __next__(self): - 'Return the next item from the iterator. When exhausted, raise StopIteration' - raise StopIteration - - def __iter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterator: - return _check_methods(C, '__iter__', '__next__') - return NotImplemented - -Iterator.register(bytes_iterator) -Iterator.register(bytearray_iterator) -#Iterator.register(callable_iterator) -Iterator.register(dict_keyiterator) -Iterator.register(dict_valueiterator) -Iterator.register(dict_itemiterator) -Iterator.register(list_iterator) -Iterator.register(list_reverseiterator) -Iterator.register(range_iterator) -Iterator.register(longrange_iterator) -Iterator.register(set_iterator) -Iterator.register(str_iterator) -Iterator.register(tuple_iterator) -Iterator.register(zip_iterator) - - -class Reversible(Iterable): - - __slots__ = () - - @abstractmethod - def __reversed__(self): - while False: - yield None - - @classmethod - def __subclasshook__(cls, C): - if cls is Reversible: - return _check_methods(C, "__reversed__", "__iter__") - return NotImplemented - - -class Generator(Iterator): - - __slots__ = () - - def __next__(self): - """Return the next item from the generator. - When exhausted, raise StopIteration. - """ - return self.send(None) - - @abstractmethod - def send(self, value): - """Send a value into the generator. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the generator. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside generator. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("generator ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Generator: - return _check_methods(C, '__iter__', '__next__', - 'send', 'throw', 'close') - return NotImplemented - -Generator.register(generator) - - -class Sized(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __len__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Sized: - return _check_methods(C, "__len__") - return NotImplemented - - -class Container(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __contains__(self, x): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Container: - return _check_methods(C, "__contains__") - return NotImplemented - -class Collection(Sized, Iterable, Container): - - __slots__ = () - - @classmethod - def __subclasshook__(cls, C): - if cls is Collection: - return _check_methods(C, "__len__", "__iter__", "__contains__") - return NotImplemented - -class Callable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __call__(self, *args, **kwds): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Callable: - return _check_methods(C, "__call__") - return NotImplemented - - -### SETS ### - - -class Set(Collection): - - """A set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__ and __len__. - - To override the comparisons (presumably for speed, as the - semantics are fixed), redefine __le__ and __ge__, - then the other operations will automatically follow suit. - """ - - __slots__ = () - - def __le__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) > len(other): - return False - for elem in self: - if elem not in other: - return False - return True - - def __lt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) < len(other) and self.__le__(other) - - def __gt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) > len(other) and self.__ge__(other) - - def __ge__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) < len(other): - return False - for elem in other: - if elem not in self: - return False - return True - - def __eq__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) == len(other) and self.__le__(other) - - @classmethod - def _from_iterable(cls, it): - '''Construct an instance of the class from any iterable input. - - Must override this method if the class constructor signature - does not accept an iterable for an input. - ''' - return cls(it) - - def __and__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - return self._from_iterable(value for value in other if value in self) - - __rand__ = __and__ - - def isdisjoint(self, other): - 'Return True if two sets have a null intersection.' - for value in other: - if value in self: - return False - return True - - def __or__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - chain = (e for s in (self, other) for e in s) - return self._from_iterable(chain) - - __ror__ = __or__ - - def __sub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in self - if value not in other) - - def __rsub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in other - if value not in self) - - def __xor__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return (self - other) | (other - self) - - __rxor__ = __xor__ - - def _hash(self): - """Compute the hash value of a set. - - Note that we don't define __hash__: not all sets are hashable. - But if you define a hashable set type, its __hash__ should - call this function. - - This must be compatible __eq__. - - All sets ought to compare equal if they contain the same - elements, regardless of how they are implemented, and - regardless of the order of the elements; so there's not much - freedom for __eq__ or __hash__. We match the algorithm used - by the built-in frozenset type. - """ - MAX = sys.maxsize - MASK = 2 * MAX + 1 - n = len(self) - h = 1927868237 * (n + 1) - h &= MASK - for x in self: - hx = hash(x) - h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 - h &= MASK - h = h * 69069 + 907133923 - h &= MASK - if h > MAX: - h -= MASK + 1 - if h == -1: - h = 590923713 - return h - -Set.register(frozenset) - - -class MutableSet(Set): - """A mutable set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__, __len__, - add(), and discard(). - - To override the comparisons (presumably for speed, as the - semantics are fixed), all you have to do is redefine __le__ and - then the other operations will automatically follow suit. - """ - - __slots__ = () - - @abstractmethod - def add(self, value): - """Add an element.""" - raise NotImplementedError - - @abstractmethod - def discard(self, value): - """Remove an element. Do not raise an exception if absent.""" - raise NotImplementedError - - def remove(self, value): - """Remove an element. If not a member, raise a KeyError.""" - if value not in self: - raise KeyError(value) - self.discard(value) - - def pop(self): - """Return the popped value. Raise KeyError if empty.""" - it = iter(self) - try: - value = next(it) - except StopIteration: - raise KeyError - self.discard(value) - return value - - def clear(self): - """This is slow (creates N new iterators!) but effective.""" - try: - while True: - self.pop() - except KeyError: - pass - - def __ior__(self, it): - for value in it: - self.add(value) - return self - - def __iand__(self, it): - for value in (self - it): - self.discard(value) - return self - - def __ixor__(self, it): - if it is self: - self.clear() - else: - if not isinstance(it, Set): - it = self._from_iterable(it) - for value in it: - if value in self: - self.discard(value) - else: - self.add(value) - return self - - def __isub__(self, it): - if it is self: - self.clear() - else: - for value in it: - self.discard(value) - return self - -MutableSet.register(set) - - -### MAPPINGS ### - - -class Mapping(Collection): - - __slots__ = () - - """A Mapping is a generic container for associating key/value - pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __iter__, and __len__. - - """ - - @abstractmethod - def __getitem__(self, key): - raise KeyError - - def get(self, key, default=None): - 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' - try: - return self[key] - except KeyError: - return default - - def __contains__(self, key): - try: - self[key] - except KeyError: - return False - else: - return True - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return KeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return ItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return ValuesView(self) - - def __eq__(self, other): - if not isinstance(other, Mapping): - return NotImplemented - return dict(self.items()) == dict(other.items()) - - __reversed__ = None - -Mapping.register(mappingproxy) - - -class MappingView(Sized): - - __slots__ = '_mapping', - - def __init__(self, mapping): - self._mapping = mapping - - def __len__(self): - return len(self._mapping) - - def __repr__(self): - return '{0.__class__.__name__}({0._mapping!r})'.format(self) - - -class KeysView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, key): - return key in self._mapping - - def __iter__(self): - yield from self._mapping - -KeysView.register(dict_keys) - - -class ItemsView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, item): - key, value = item - try: - v = self._mapping[key] - except KeyError: - return False - else: - return v is value or v == value - - def __iter__(self): - for key in self._mapping: - yield (key, self._mapping[key]) - -ItemsView.register(dict_items) - - -class ValuesView(MappingView): - - __slots__ = () - - def __contains__(self, value): - for key in self._mapping: - v = self._mapping[key] - if v is value or v == value: - return True - return False - - def __iter__(self): - for key in self._mapping: - yield self._mapping[key] - -ValuesView.register(dict_values) - - -class MutableMapping(Mapping): - - __slots__ = () - - """A MutableMapping is a generic container for associating - key/value pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __setitem__, __delitem__, - __iter__, and __len__. - - """ - - @abstractmethod - def __setitem__(self, key, value): - raise KeyError - - @abstractmethod - def __delitem__(self, key): - raise KeyError - - __marker = object() - - def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - ''' - try: - value = self[key] - except KeyError: - if default is self.__marker: - raise - return default - else: - del self[key] - return value - - def popitem(self): - '''D.popitem() -> (k, v), remove and return some (key, value) pair - as a 2-tuple; but raise KeyError if D is empty. - ''' - try: - key = next(iter(self)) - except StopIteration: - raise KeyError - value = self[key] - del self[key] - return key, value - - def clear(self): - 'D.clear() -> None. Remove all items from D.' - try: - while True: - self.popitem() - except KeyError: - pass - - def update(*args, **kwds): - ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. - If E present and has a .keys() method, does: for k in E: D[k] = E[k] - If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v - In either case, this is followed by: for k, v in F.items(): D[k] = v - ''' - if not args: - raise TypeError("descriptor 'update' of 'MutableMapping' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('update expected at most 1 arguments, got %d' % - len(args)) - if args: - other = args[0] - if isinstance(other, Mapping): - for key in other: - self[key] = other[key] - elif hasattr(other, "keys"): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - def setdefault(self, key, default=None): - 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' - try: - return self[key] - except KeyError: - self[key] = default - return default - -MutableMapping.register(dict) - - -### SEQUENCES ### - - -class Sequence(Reversible, Collection): - - """All the operations on a read-only sequence. - - Concrete subclasses must override __new__ or __init__, - __getitem__, and __len__. - """ - - __slots__ = () - - @abstractmethod - def __getitem__(self, index): - raise IndexError - - def __iter__(self): - i = 0 - try: - while True: - v = self[i] - yield v - i += 1 - except IndexError: - return - - def __contains__(self, value): - for v in self: - if v is value or v == value: - return True - return False - - def __reversed__(self): - for i in reversed(range(len(self))): - yield self[i] - - def index(self, value, start=0, stop=None): - '''S.index(value, [start, [stop]]) -> integer -- return first index of value. - Raises ValueError if the value is not present. - ''' - if start is not None and start < 0: - start = max(len(self) + start, 0) - if stop is not None and stop < 0: - stop += len(self) - - i = start - while stop is None or i < stop: - try: - if self[i] == value: - return i - except IndexError: - break - i += 1 - raise ValueError - - def count(self, value): - 'S.count(value) -> integer -- return number of occurrences of value' - return sum(1 for v in self if v == value) - -Sequence.register(tuple) -Sequence.register(str) -Sequence.register(range) -Sequence.register(memoryview) - - -class ByteString(Sequence): - - """This unifies bytes and bytearray. - - XXX Should add all their methods. - """ - - __slots__ = () - -ByteString.register(bytes) -ByteString.register(bytearray) - - -class MutableSequence(Sequence): - - __slots__ = () - - """All the operations on a read-write sequence. - - Concrete subclasses must provide __new__ or __init__, - __getitem__, __setitem__, __delitem__, __len__, and insert(). - - """ - - @abstractmethod - def __setitem__(self, index, value): - raise IndexError - - @abstractmethod - def __delitem__(self, index): - raise IndexError - - @abstractmethod - def insert(self, index, value): - 'S.insert(index, value) -- insert value before index' - raise IndexError - - def append(self, value): - 'S.append(value) -- append value to the end of the sequence' - self.insert(len(self), value) - - def clear(self): - 'S.clear() -> None -- remove all items from S' - try: - while True: - self.pop() - except IndexError: - pass - - def reverse(self): - 'S.reverse() -- reverse *IN PLACE*' - n = len(self) - for i in range(n//2): - self[i], self[n-i-1] = self[n-i-1], self[i] - - def extend(self, values): - 'S.extend(iterable) -- extend sequence by appending elements from the iterable' - for v in values: - self.append(v) - - def pop(self, index=-1): - '''S.pop([index]) -> item -- remove and return item at index (default last). - Raise IndexError if list is empty or index is out of range. - ''' - v = self[index] - del self[index] - return v - - def remove(self, value): - '''S.remove(value) -- remove first occurrence of value. - Raise ValueError if the value is not present. - ''' - del self[self.index(value)] - - def __iadd__(self, values): - self.extend(values) - return self - -MutableSequence.register(list) -MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.6.0/abc.py b/typing_extensions/test_data/python-3.6.0/abc.py deleted file mode 100644 index 1cbf96a6..00000000 --- a/typing_extensions/test_data/python-3.6.0/abc.py +++ /dev/null @@ -1,248 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) according to PEP 3119.""" - -from _weakrefset import WeakSet - - -def abstractmethod(funcobj): - """A decorator indicating abstract methods. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract methods are overridden. - The abstract methods can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractmethod - def my_abstract_method(self, ...): - ... - """ - funcobj.__isabstractmethod__ = True - return funcobj - - -class abstractclassmethod(classmethod): - """ - A decorator indicating abstract classmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractclassmethod - def my_abstract_classmethod(cls, ...): - ... - - 'abstractclassmethod' is deprecated. Use 'classmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractstaticmethod(staticmethod): - """ - A decorator indicating abstract staticmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractstaticmethod - def my_abstract_staticmethod(...): - ... - - 'abstractstaticmethod' is deprecated. Use 'staticmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractproperty(property): - """ - A decorator indicating abstract properties. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract properties are overridden. - The abstract properties can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractproperty - def my_abstract_property(self): - ... - - This defines a read-only property; you can also define a read-write - abstract property using the 'long' form of property declaration: - - class C(metaclass=ABCMeta): - def getx(self): ... - def setx(self, value): ... - x = abstractproperty(getx, setx) - - 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' - instead. - """ - - __isabstractmethod__ = True - - -class ABCMeta(type): - - """Metaclass for defining Abstract Base Classes (ABCs). - - Use this metaclass to create an ABC. An ABC can be subclassed - directly, and then acts as a mix-in class. You can also register - unrelated concrete classes (even built-in classes) and unrelated - ABCs as 'virtual subclasses' -- these and their descendants will - be considered subclasses of the registering ABC by the built-in - issubclass() function, but the registering ABC won't show up in - their MRO (Method Resolution Order) nor will method - implementations defined by the registering ABC be callable (not - even via super()). - - """ - - # A global counter that is incremented each time a class is - # registered as a virtual subclass of anything. It forces the - # negative cache to be cleared before its next use. - # Note: this counter is private. Use `abc.get_cache_token()` for - # external code. - _abc_invalidation_counter = 0 - - def __new__(mcls, name, bases, namespace): - cls = super().__new__(mcls, name, bases, namespace) - # Compute set of abstract method names - abstracts = {name - for name, value in namespace.items() - if getattr(value, "__isabstractmethod__", False)} - for base in bases: - for name in getattr(base, "__abstractmethods__", set()): - value = getattr(cls, name, None) - if getattr(value, "__isabstractmethod__", False): - abstracts.add(name) - cls.__abstractmethods__ = frozenset(abstracts) - # Set up inheritance registry - cls._abc_registry = WeakSet() - cls._abc_cache = WeakSet() - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - return cls - - def register(cls, subclass): - """Register a virtual subclass of an ABC. - - Returns the subclass, to allow usage as a class decorator. - """ - if not isinstance(subclass, type): - raise TypeError("Can only register classes") - if issubclass(subclass, cls): - return subclass # Already a subclass - # Subtle: test for cycles *after* testing for "already a subclass"; - # this means we allow X.register(X) and interpret it as a no-op. - if issubclass(cls, subclass): - # This would create a cycle, which is bad for the algorithm below - raise RuntimeError("Refusing to create an inheritance cycle") - cls._abc_registry.add(subclass) - ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache - return subclass - - def _dump_registry(cls, file=None): - """Debug helper to print the ABC registry.""" - print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) - print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) - for name in sorted(cls.__dict__.keys()): - if name.startswith("_abc_"): - value = getattr(cls, name) - print("%s: %r" % (name, value), file=file) - - def __instancecheck__(cls, instance): - """Override for isinstance(instance, cls).""" - # Inline the cache checking - subclass = instance.__class__ - if subclass in cls._abc_cache: - return True - subtype = type(instance) - if subtype is subclass: - if (cls._abc_negative_cache_version == - ABCMeta._abc_invalidation_counter and - subclass in cls._abc_negative_cache): - return False - # Fall back to the subclass check. - return cls.__subclasscheck__(subclass) - return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) - - def __subclasscheck__(cls, subclass): - """Override for issubclass(subclass, cls).""" - # Check cache - if subclass in cls._abc_cache: - return True - # Check negative cache; may have to invalidate - if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: - # Invalidate the negative cache - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - elif subclass in cls._abc_negative_cache: - return False - # Check the subclass hook - ok = cls.__subclasshook__(subclass) - if ok is not NotImplemented: - assert isinstance(ok, bool) - if ok: - cls._abc_cache.add(subclass) - else: - cls._abc_negative_cache.add(subclass) - return ok - # Check if it's a direct subclass - if cls in getattr(subclass, '__mro__', ()): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a registered class (recursive) - for rcls in cls._abc_registry: - if issubclass(subclass, rcls): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a subclass (recursive) - for scls in cls.__subclasses__(): - if issubclass(subclass, scls): - cls._abc_cache.add(subclass) - return True - # No dice; update negative cache - cls._abc_negative_cache.add(subclass) - return False - - -class ABC(metaclass=ABCMeta): - """Helper class that provides a standard way to create an ABC using - inheritance. - """ - pass - - -def get_cache_token(): - """Returns the current ABC cache token. - - The token is an opaque object (supporting equality testing) identifying the - current version of the ABC cache for virtual subclasses. The token changes - with every call to ``register()`` on any ABC. - """ - return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.6.0/collections/__init__.py b/typing_extensions/test_data/python-3.6.0/collections/__init__.py deleted file mode 100644 index bcc42919..00000000 --- a/typing_extensions/test_data/python-3.6.0/collections/__init__.py +++ /dev/null @@ -1,1243 +0,0 @@ -'''This module implements specialized container datatypes providing -alternatives to Python's general purpose built-in containers, dict, -list, set, and tuple. - -* namedtuple factory function for creating tuple subclasses with named fields -* deque list-like container with fast appends and pops on either end -* ChainMap dict-like class for creating a single view of multiple mappings -* Counter dict subclass for counting hashable objects -* OrderedDict dict subclass that remembers the order entries were added -* defaultdict dict subclass that calls a factory function to supply missing values -* UserDict wrapper around dictionary objects for easier dict subclassing -* UserList wrapper around list objects for easier list subclassing -* UserString wrapper around string objects for easier string subclassing - -''' - -__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', - 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] - -# For backwards compatibility, continue to make the collections ABCs -# available through the collections module. -from _collections_abc import * -import _collections_abc -__all__ += _collections_abc.__all__ - -from operator import itemgetter as _itemgetter, eq as _eq -from keyword import iskeyword as _iskeyword -import sys as _sys -import heapq as _heapq -from _weakref import proxy as _proxy -from itertools import repeat as _repeat, chain as _chain, starmap as _starmap -from reprlib import recursive_repr as _recursive_repr - -try: - from _collections import deque -except ImportError: - pass -else: - MutableSequence.register(deque) - -try: - from _collections import defaultdict -except ImportError: - pass - - -################################################################################ -### OrderedDict -################################################################################ - -class _OrderedDictKeysView(KeysView): - - def __reversed__(self): - yield from reversed(self._mapping) - -class _OrderedDictItemsView(ItemsView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield (key, self._mapping[key]) - -class _OrderedDictValuesView(ValuesView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield self._mapping[key] - -class _Link(object): - __slots__ = 'prev', 'next', 'key', '__weakref__' - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as regular dictionaries. - - # The internal self.__map dict maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # The sentinel is in self.__hardroot with a weakref proxy in self.__root. - # The prev links are weakref proxies (to prevent circular references). - # Individual links are kept alive by the hard reference in self.__map. - # Those hard references disappear when a key is deleted from an OrderedDict. - - def __init__(*args, **kwds): - '''Initialize an ordered dictionary. The signature is the same as - regular dictionaries, but keyword arguments are not recommended because - their insertion order is arbitrary. - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'OrderedDict' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__hardroot = _Link() - self.__root = root = _proxy(self.__hardroot) - root.prev = root.next = root - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, - dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link at the end of the linked list, - # and the inherited dictionary is updated with the new key/value pair. - if key not in self: - self.__map[key] = link = Link() - root = self.__root - last = root.prev - link.prev, link.next, link.key = last, root, key - last.next = link - root.prev = proxy(link) - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which gets - # removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link = self.__map.pop(key) - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - link.prev = None - link.next = None - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - # Traverse the linked list in order. - root = self.__root - curr = root.next - while curr is not root: - yield curr.key - curr = curr.next - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - # Traverse the linked list in reverse order. - root = self.__root - curr = root.prev - while curr is not root: - yield curr.key - curr = curr.prev - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - root = self.__root - root.prev = root.next = root - self.__map.clear() - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root.prev - link_prev = link.prev - link_prev.next = root - root.prev = link_prev - else: - link = root.next - link_next = link.next - root.next = link_next - link_next.prev = root - key = link.key - del self.__map[key] - value = dict.pop(self, key) - return key, value - - def move_to_end(self, key, last=True): - '''Move an existing element to the end (or beginning if last==False). - - Raises KeyError if the element does not exist. - When last=True, acts like a fast version of self[key]=self.pop(key). - - ''' - link = self.__map[key] - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - root = self.__root - if last: - last = root.prev - link.prev = last - link.next = root - last.next = root.prev = link - else: - first = root.next - link.prev = root - link.next = first - root.next = first.prev = link - - def __sizeof__(self): - sizeof = _sys.getsizeof - n = len(self) + 1 # number of links including root - size = sizeof(self.__dict__) # instance dictionary - size += sizeof(self.__map) * 2 # internal dict and inherited dict - size += sizeof(self.__hardroot) * n # link objects - size += sizeof(self.__root) * n # proxy objects - return size - - update = __update = MutableMapping.update - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return _OrderedDictKeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return _OrderedDictItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return _OrderedDictValuesView(self) - - __ne__ = MutableMapping.__ne__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding - value. If key is not found, d is returned if given, otherwise KeyError - is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - @_recursive_repr() - def __repr__(self): - 'od.__repr__() <==> repr(od)' - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, list(self.items())) - - def __reduce__(self): - 'Return state information for pickling' - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - return self.__class__, (), inst_dict or None, None, iter(self.items()) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. - If not specified, the value defaults to None. - - ''' - self = cls() - for key in iterable: - self[key] = value - return self - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return dict.__eq__(self, other) and all(map(_eq, self, other)) - return dict.__eq__(self, other) - - -try: - from _collections import OrderedDict -except ImportError: - # Leave the pure Python version in place. - pass - - -################################################################################ -### namedtuple -################################################################################ - -_class_template = """\ -from builtins import property as _property, tuple as _tuple -from operator import itemgetter as _itemgetter -from collections import OrderedDict - -class {typename}(tuple): - '{typename}({arg_list})' - - __slots__ = () - - _fields = {field_names!r} - - def __new__(_cls, {arg_list}): - 'Create new instance of {typename}({arg_list})' - return _tuple.__new__(_cls, ({arg_list})) - - @classmethod - def _make(cls, iterable, new=tuple.__new__, len=len): - 'Make a new {typename} object from a sequence or iterable' - result = new(cls, iterable) - if len(result) != {num_fields:d}: - raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) - return result - - def _replace(_self, **kwds): - 'Return a new {typename} object replacing specified fields with new values' - result = _self._make(map(kwds.pop, {field_names!r}, _self)) - if kwds: - raise ValueError('Got unexpected field names: %r' % list(kwds)) - return result - - def __repr__(self): - 'Return a nicely formatted representation string' - return self.__class__.__name__ + '({repr_fmt})' % self - - def _asdict(self): - 'Return a new OrderedDict which maps field names to their values.' - return OrderedDict(zip(self._fields, self)) - - def __getnewargs__(self): - 'Return self as a plain tuple. Used by copy and pickle.' - return tuple(self) - -{field_defs} -""" - -_repr_template = '{name}=%r' - -_field_template = '''\ - {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') -''' - -def namedtuple(typename, field_names, *, verbose=False, rename=False, module=None): - """Returns a new subclass of tuple with named fields. - - >>> Point = namedtuple('Point', ['x', 'y']) - >>> Point.__doc__ # docstring for the new class - 'Point(x, y)' - >>> p = Point(11, y=22) # instantiate with positional args or keywords - >>> p[0] + p[1] # indexable like a plain tuple - 33 - >>> x, y = p # unpack like a regular tuple - >>> x, y - (11, 22) - >>> p.x + p.y # fields also accessible by name - 33 - >>> d = p._asdict() # convert to a dictionary - >>> d['x'] - 11 - >>> Point(**d) # convert from a dictionary - Point(x=11, y=22) - >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields - Point(x=100, y=22) - - """ - - # Validate the field names. At the user's option, either generate an error - # message or automatically replace the field name with a valid name. - if isinstance(field_names, str): - field_names = field_names.replace(',', ' ').split() - field_names = list(map(str, field_names)) - typename = str(typename) - if rename: - seen = set() - for index, name in enumerate(field_names): - if (not name.isidentifier() - or _iskeyword(name) - or name.startswith('_') - or name in seen): - field_names[index] = '_%d' % index - seen.add(name) - for name in [typename] + field_names: - if type(name) is not str: - raise TypeError('Type names and field names must be strings') - if not name.isidentifier(): - raise ValueError('Type names and field names must be valid ' - 'identifiers: %r' % name) - if _iskeyword(name): - raise ValueError('Type names and field names cannot be a ' - 'keyword: %r' % name) - seen = set() - for name in field_names: - if name.startswith('_') and not rename: - raise ValueError('Field names cannot start with an underscore: ' - '%r' % name) - if name in seen: - raise ValueError('Encountered duplicate field name: %r' % name) - seen.add(name) - - # Fill-in the class template - class_definition = _class_template.format( - typename = typename, - field_names = tuple(field_names), - num_fields = len(field_names), - arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], - repr_fmt = ', '.join(_repr_template.format(name=name) - for name in field_names), - field_defs = '\n'.join(_field_template.format(index=index, name=name) - for index, name in enumerate(field_names)) - ) - - # Execute the template string in a temporary namespace and support - # tracing utilities by setting a value for frame.f_globals['__name__'] - namespace = dict(__name__='namedtuple_%s' % typename) - exec(class_definition, namespace) - result = namespace[typename] - result._source = class_definition - if verbose: - print(result._source) - - # For pickling to work, the __module__ variable needs to be set to the frame - # where the named tuple is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython), or where the user has - # specified a particular module. - if module is None: - try: - module = _sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - if module is not None: - result.__module__ = module - - return result - - -######################################################################## -### Counter -######################################################################## - -def _count_elements(mapping, iterable): - 'Tally elements from the iterable.' - mapping_get = mapping.get - for elem in iterable: - mapping[elem] = mapping_get(elem, 0) + 1 - -try: # Load C helper function if available - from _collections import _count_elements -except ImportError: - pass - -class Counter(dict): - '''Dict subclass for counting hashable items. Sometimes called a bag - or multiset. Elements are stored as dictionary keys and their counts - are stored as dictionary values. - - >>> c = Counter('abcdeabcdabcaba') # count elements from a string - - >>> c.most_common(3) # three most common elements - [('a', 5), ('b', 4), ('c', 3)] - >>> sorted(c) # list all unique elements - ['a', 'b', 'c', 'd', 'e'] - >>> ''.join(sorted(c.elements())) # list elements with repetitions - 'aaaaabbbbcccdde' - >>> sum(c.values()) # total of all counts - 15 - - >>> c['a'] # count of letter 'a' - 5 - >>> for elem in 'shazam': # update counts from an iterable - ... c[elem] += 1 # by adding 1 to each element's count - >>> c['a'] # now there are seven 'a' - 7 - >>> del c['b'] # remove all 'b' - >>> c['b'] # now there are zero 'b' - 0 - - >>> d = Counter('simsalabim') # make another counter - >>> c.update(d) # add in the second counter - >>> c['a'] # now there are nine 'a' - 9 - - >>> c.clear() # empty the counter - >>> c - Counter() - - Note: If a count is set to zero or reduced to zero, it will remain - in the counter until the entry is deleted or the counter is cleared: - - >>> c = Counter('aaabbc') - >>> c['b'] -= 2 # reduce the count of 'b' by two - >>> c.most_common() # 'b' is still in, but its count is zero - [('a', 3), ('c', 1), ('b', 0)] - - ''' - # References: - # http://en.wikipedia.org/wiki/Multiset - # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html - # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm - # http://code.activestate.com/recipes/259174/ - # Knuth, TAOCP Vol. II section 4.6.3 - - def __init__(*args, **kwds): - '''Create a new, empty Counter object. And if given, count elements - from an input iterable. Or, initialize the count from another mapping - of elements to their counts. - - >>> c = Counter() # a new, empty counter - >>> c = Counter('gallahad') # a new counter from an iterable - >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping - >>> c = Counter(a=4, b=2) # a new counter from keyword args - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - super(Counter, self).__init__() - self.update(*args, **kwds) - - def __missing__(self, key): - 'The count of elements not in the Counter is zero.' - # Needed so that self[missing_item] does not raise KeyError - return 0 - - def most_common(self, n=None): - '''List the n most common elements and their counts from the most - common to the least. If n is None, then list all element counts. - - >>> Counter('abcdeabcdabcaba').most_common(3) - [('a', 5), ('b', 4), ('c', 3)] - - ''' - # Emulate Bag.sortedByCount from Smalltalk - if n is None: - return sorted(self.items(), key=_itemgetter(1), reverse=True) - return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) - - def elements(self): - '''Iterator over elements repeating each as many times as its count. - - >>> c = Counter('ABCABC') - >>> sorted(c.elements()) - ['A', 'A', 'B', 'B', 'C', 'C'] - - # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 - >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) - >>> product = 1 - >>> for factor in prime_factors.elements(): # loop over factors - ... product *= factor # and multiply them - >>> product - 1836 - - Note, if an element's count has been set to zero or is a negative - number, elements() will ignore it. - - ''' - # Emulate Bag.do from Smalltalk and Multiset.begin from C++. - return _chain.from_iterable(_starmap(_repeat, self.items())) - - # Override dict methods where necessary - - @classmethod - def fromkeys(cls, iterable, v=None): - # There is no equivalent method for counters because setting v=1 - # means that no element can have a count greater than one. - raise NotImplementedError( - 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') - - def update(*args, **kwds): - '''Like dict.update() but add counts instead of replacing them. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.update('witch') # add elements from another iterable - >>> d = Counter('watch') - >>> c.update(d) # add elements from another counter - >>> c['h'] # four 'h' in which, witch, and watch - 4 - - ''' - # The regular dict.update() operation makes no sense here because the - # replace behavior results in the some of original untouched counts - # being mixed-in with all of the other counts for a mismash that - # doesn't have a straight-forward interpretation in most counting - # contexts. Instead, we implement straight-addition. Both the inputs - # and outputs are allowed to contain zero and negative counts. - - if not args: - raise TypeError("descriptor 'update' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - if isinstance(iterable, Mapping): - if self: - self_get = self.get - for elem, count in iterable.items(): - self[elem] = count + self_get(elem, 0) - else: - super(Counter, self).update(iterable) # fast path when counter is empty - else: - _count_elements(self, iterable) - if kwds: - self.update(kwds) - - def subtract(*args, **kwds): - '''Like dict.update() but subtracts counts instead of replacing them. - Counts can be reduced below zero. Both the inputs and outputs are - allowed to contain zero and negative counts. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.subtract('witch') # subtract elements from another iterable - >>> c.subtract(Counter('watch')) # subtract elements from another counter - >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch - 0 - >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch - -1 - - ''' - if not args: - raise TypeError("descriptor 'subtract' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - self_get = self.get - if isinstance(iterable, Mapping): - for elem, count in iterable.items(): - self[elem] = self_get(elem, 0) - count - else: - for elem in iterable: - self[elem] = self_get(elem, 0) - 1 - if kwds: - self.subtract(kwds) - - def copy(self): - 'Return a shallow copy.' - return self.__class__(self) - - def __reduce__(self): - return self.__class__, (dict(self),) - - def __delitem__(self, elem): - 'Like dict.__delitem__() but does not raise KeyError for missing values.' - if elem in self: - super().__delitem__(elem) - - def __repr__(self): - if not self: - return '%s()' % self.__class__.__name__ - try: - items = ', '.join(map('%r: %r'.__mod__, self.most_common())) - return '%s({%s})' % (self.__class__.__name__, items) - except TypeError: - # handle case where values are not orderable - return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) - - # Multiset-style mathematical operations discussed in: - # Knuth TAOCP Volume II section 4.6.3 exercise 19 - # and at http://en.wikipedia.org/wiki/Multiset - # - # Outputs guaranteed to only include positive counts. - # - # To strip negative and zero counts, add-in an empty counter: - # c += Counter() - - def __add__(self, other): - '''Add counts from two counters. - - >>> Counter('abbb') + Counter('bcc') - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count + other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __sub__(self, other): - ''' Subtract count, but keep only results with positive counts. - - >>> Counter('abbbc') - Counter('bccd') - Counter({'b': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count - other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count < 0: - result[elem] = 0 - count - return result - - def __or__(self, other): - '''Union is the maximum of value in either of the input counters. - - >>> Counter('abbb') | Counter('bcc') - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = other_count if count < other_count else count - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __and__(self, other): - ''' Intersection is the minimum of corresponding counts. - - >>> Counter('abbb') & Counter('bcc') - Counter({'b': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = count if count < other_count else other_count - if newcount > 0: - result[elem] = newcount - return result - - def __pos__(self): - 'Adds an empty counter, effectively stripping negative and zero counts' - result = Counter() - for elem, count in self.items(): - if count > 0: - result[elem] = count - return result - - def __neg__(self): - '''Subtracts from an empty counter. Strips positive and zero counts, - and flips the sign on negative counts. - - ''' - result = Counter() - for elem, count in self.items(): - if count < 0: - result[elem] = 0 - count - return result - - def _keep_positive(self): - '''Internal method to strip elements with a negative or zero count''' - nonpositive = [elem for elem, count in self.items() if not count > 0] - for elem in nonpositive: - del self[elem] - return self - - def __iadd__(self, other): - '''Inplace add from another counter, keeping only positive counts. - - >>> c = Counter('abbb') - >>> c += Counter('bcc') - >>> c - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] += count - return self._keep_positive() - - def __isub__(self, other): - '''Inplace subtract counter, but keep only results with positive counts. - - >>> c = Counter('abbbc') - >>> c -= Counter('bccd') - >>> c - Counter({'b': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] -= count - return self._keep_positive() - - def __ior__(self, other): - '''Inplace union is the maximum of value from either counter. - - >>> c = Counter('abbb') - >>> c |= Counter('bcc') - >>> c - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - for elem, other_count in other.items(): - count = self[elem] - if other_count > count: - self[elem] = other_count - return self._keep_positive() - - def __iand__(self, other): - '''Inplace intersection is the minimum of corresponding counts. - - >>> c = Counter('abbb') - >>> c &= Counter('bcc') - >>> c - Counter({'b': 1}) - - ''' - for elem, count in self.items(): - other_count = other[elem] - if other_count < count: - self[elem] = other_count - return self._keep_positive() - - -######################################################################## -### ChainMap -######################################################################## - -class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together - to create a single, updateable view. - - The underlying mappings are stored in a list. That list is public and can - be accessed or updated using the *maps* attribute. There is no other - state. - - Lookups search the underlying mappings successively until a key is found. - In contrast, writes, updates, and deletions only operate on the first - mapping. - - ''' - - def __init__(self, *maps): - '''Initialize a ChainMap by setting *maps* to the given mappings. - If no mappings are provided, a single empty dictionary is used. - - ''' - self.maps = list(maps) or [{}] # always at least one map - - def __missing__(self, key): - raise KeyError(key) - - def __getitem__(self, key): - for mapping in self.maps: - try: - return mapping[key] # can't use 'key in mapping' with defaultdict - except KeyError: - pass - return self.__missing__(key) # support subclasses that define __missing__ - - def get(self, key, default=None): - return self[key] if key in self else default - - def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible - - def __iter__(self): - return iter(set().union(*self.maps)) - - def __contains__(self, key): - return any(key in m for m in self.maps) - - def __bool__(self): - return any(self.maps) - - @_recursive_repr() - def __repr__(self): - return '{0.__class__.__name__}({1})'.format( - self, ', '.join(map(repr, self.maps))) - - @classmethod - def fromkeys(cls, iterable, *args): - 'Create a ChainMap with a single dict created from the iterable.' - return cls(dict.fromkeys(iterable, *args)) - - def copy(self): - 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' - return self.__class__(self.maps[0].copy(), *self.maps[1:]) - - __copy__ = copy - - def new_child(self, m=None): # like Django's Context.push() - '''New ChainMap with a new map followed by all previous maps. - If no map is provided, an empty dict is used. - ''' - if m is None: - m = {} - return self.__class__(m, *self.maps) - - @property - def parents(self): # like Django's Context.pop() - 'New ChainMap from maps[1:].' - return self.__class__(*self.maps[1:]) - - def __setitem__(self, key, value): - self.maps[0][key] = value - - def __delitem__(self, key): - try: - del self.maps[0][key] - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def popitem(self): - 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' - try: - return self.maps[0].popitem() - except KeyError: - raise KeyError('No keys found in the first mapping.') - - def pop(self, key, *args): - 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' - try: - return self.maps[0].pop(key, *args) - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def clear(self): - 'Clear maps[0], leaving maps[1:] intact.' - self.maps[0].clear() - - -################################################################################ -### UserDict -################################################################################ - -class UserDict(MutableMapping): - - # Start by filling-out the abstract methods - def __init__(*args, **kwargs): - if not args: - raise TypeError("descriptor '__init__' of 'UserDict' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - if args: - dict = args[0] - elif 'dict' in kwargs: - dict = kwargs.pop('dict') - import warnings - warnings.warn("Passing 'dict' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) - else: - dict = None - self.data = {} - if dict is not None: - self.update(dict) - if len(kwargs): - self.update(kwargs) - def __len__(self): return len(self.data) - def __getitem__(self, key): - if key in self.data: - return self.data[key] - if hasattr(self.__class__, "__missing__"): - return self.__class__.__missing__(self, key) - raise KeyError(key) - def __setitem__(self, key, item): self.data[key] = item - def __delitem__(self, key): del self.data[key] - def __iter__(self): - return iter(self.data) - - # Modify __contains__ to work correctly when __missing__ is present - def __contains__(self, key): - return key in self.data - - # Now, add the methods in dicts but not in MutableMapping - def __repr__(self): return repr(self.data) - def copy(self): - if self.__class__ is UserDict: - return UserDict(self.data.copy()) - import copy - data = self.data - try: - self.data = {} - c = copy.copy(self) - finally: - self.data = data - c.update(self) - return c - @classmethod - def fromkeys(cls, iterable, value=None): - d = cls() - for key in iterable: - d[key] = value - return d - - - -################################################################################ -### UserList -################################################################################ - -class UserList(MutableSequence): - """A more or less complete user-defined wrapper around list objects.""" - def __init__(self, initlist=None): - self.data = [] - if initlist is not None: - # XXX should this accept an arbitrary sequence? - if type(initlist) == type(self.data): - self.data[:] = initlist - elif isinstance(initlist, UserList): - self.data[:] = initlist.data[:] - else: - self.data = list(initlist) - def __repr__(self): return repr(self.data) - def __lt__(self, other): return self.data < self.__cast(other) - def __le__(self, other): return self.data <= self.__cast(other) - def __eq__(self, other): return self.data == self.__cast(other) - def __gt__(self, other): return self.data > self.__cast(other) - def __ge__(self, other): return self.data >= self.__cast(other) - def __cast(self, other): - return other.data if isinstance(other, UserList) else other - def __contains__(self, item): return item in self.data - def __len__(self): return len(self.data) - def __getitem__(self, i): return self.data[i] - def __setitem__(self, i, item): self.data[i] = item - def __delitem__(self, i): del self.data[i] - def __add__(self, other): - if isinstance(other, UserList): - return self.__class__(self.data + other.data) - elif isinstance(other, type(self.data)): - return self.__class__(self.data + other) - return self.__class__(self.data + list(other)) - def __radd__(self, other): - if isinstance(other, UserList): - return self.__class__(other.data + self.data) - elif isinstance(other, type(self.data)): - return self.__class__(other + self.data) - return self.__class__(list(other) + self.data) - def __iadd__(self, other): - if isinstance(other, UserList): - self.data += other.data - elif isinstance(other, type(self.data)): - self.data += other - else: - self.data += list(other) - return self - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __imul__(self, n): - self.data *= n - return self - def append(self, item): self.data.append(item) - def insert(self, i, item): self.data.insert(i, item) - def pop(self, i=-1): return self.data.pop(i) - def remove(self, item): self.data.remove(item) - def clear(self): self.data.clear() - def copy(self): return self.__class__(self) - def count(self, item): return self.data.count(item) - def index(self, item, *args): return self.data.index(item, *args) - def reverse(self): self.data.reverse() - def sort(self, *args, **kwds): self.data.sort(*args, **kwds) - def extend(self, other): - if isinstance(other, UserList): - self.data.extend(other.data) - else: - self.data.extend(other) - - - -################################################################################ -### UserString -################################################################################ - -class UserString(Sequence): - def __init__(self, seq): - if isinstance(seq, str): - self.data = seq - elif isinstance(seq, UserString): - self.data = seq.data[:] - else: - self.data = str(seq) - def __str__(self): return str(self.data) - def __repr__(self): return repr(self.data) - def __int__(self): return int(self.data) - def __float__(self): return float(self.data) - def __complex__(self): return complex(self.data) - def __hash__(self): return hash(self.data) - def __getnewargs__(self): - return (self.data[:],) - - def __eq__(self, string): - if isinstance(string, UserString): - return self.data == string.data - return self.data == string - def __lt__(self, string): - if isinstance(string, UserString): - return self.data < string.data - return self.data < string - def __le__(self, string): - if isinstance(string, UserString): - return self.data <= string.data - return self.data <= string - def __gt__(self, string): - if isinstance(string, UserString): - return self.data > string.data - return self.data > string - def __ge__(self, string): - if isinstance(string, UserString): - return self.data >= string.data - return self.data >= string - - def __contains__(self, char): - if isinstance(char, UserString): - char = char.data - return char in self.data - - def __len__(self): return len(self.data) - def __getitem__(self, index): return self.__class__(self.data[index]) - def __add__(self, other): - if isinstance(other, UserString): - return self.__class__(self.data + other.data) - elif isinstance(other, str): - return self.__class__(self.data + other) - return self.__class__(self.data + str(other)) - def __radd__(self, other): - if isinstance(other, str): - return self.__class__(other + self.data) - return self.__class__(str(other) + self.data) - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __mod__(self, args): - return self.__class__(self.data % args) - def __rmod__(self, format): - return self.__class__(format % args) - - # the following methods are defined in alphabetical order: - def capitalize(self): return self.__class__(self.data.capitalize()) - def casefold(self): - return self.__class__(self.data.casefold()) - def center(self, width, *args): - return self.__class__(self.data.center(width, *args)) - def count(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.count(sub, start, end) - def encode(self, encoding=None, errors=None): # XXX improve this? - if encoding: - if errors: - return self.__class__(self.data.encode(encoding, errors)) - return self.__class__(self.data.encode(encoding)) - return self.__class__(self.data.encode()) - def endswith(self, suffix, start=0, end=_sys.maxsize): - return self.data.endswith(suffix, start, end) - def expandtabs(self, tabsize=8): - return self.__class__(self.data.expandtabs(tabsize)) - def find(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.find(sub, start, end) - def format(self, *args, **kwds): - return self.data.format(*args, **kwds) - def format_map(self, mapping): - return self.data.format_map(mapping) - def index(self, sub, start=0, end=_sys.maxsize): - return self.data.index(sub, start, end) - def isalpha(self): return self.data.isalpha() - def isalnum(self): return self.data.isalnum() - def isdecimal(self): return self.data.isdecimal() - def isdigit(self): return self.data.isdigit() - def isidentifier(self): return self.data.isidentifier() - def islower(self): return self.data.islower() - def isnumeric(self): return self.data.isnumeric() - def isprintable(self): return self.data.isprintable() - def isspace(self): return self.data.isspace() - def istitle(self): return self.data.istitle() - def isupper(self): return self.data.isupper() - def join(self, seq): return self.data.join(seq) - def ljust(self, width, *args): - return self.__class__(self.data.ljust(width, *args)) - def lower(self): return self.__class__(self.data.lower()) - def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) - maketrans = str.maketrans - def partition(self, sep): - return self.data.partition(sep) - def replace(self, old, new, maxsplit=-1): - if isinstance(old, UserString): - old = old.data - if isinstance(new, UserString): - new = new.data - return self.__class__(self.data.replace(old, new, maxsplit)) - def rfind(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.rfind(sub, start, end) - def rindex(self, sub, start=0, end=_sys.maxsize): - return self.data.rindex(sub, start, end) - def rjust(self, width, *args): - return self.__class__(self.data.rjust(width, *args)) - def rpartition(self, sep): - return self.data.rpartition(sep) - def rstrip(self, chars=None): - return self.__class__(self.data.rstrip(chars)) - def split(self, sep=None, maxsplit=-1): - return self.data.split(sep, maxsplit) - def rsplit(self, sep=None, maxsplit=-1): - return self.data.rsplit(sep, maxsplit) - def splitlines(self, keepends=False): return self.data.splitlines(keepends) - def startswith(self, prefix, start=0, end=_sys.maxsize): - return self.data.startswith(prefix, start, end) - def strip(self, chars=None): return self.__class__(self.data.strip(chars)) - def swapcase(self): return self.__class__(self.data.swapcase()) - def title(self): return self.__class__(self.data.title()) - def translate(self, *args): - return self.__class__(self.data.translate(*args)) - def upper(self): return self.__class__(self.data.upper()) - def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.6.0/collections/abc.py b/typing_extensions/test_data/python-3.6.0/collections/abc.py deleted file mode 100644 index 891600d1..00000000 --- a/typing_extensions/test_data/python-3.6.0/collections/abc.py +++ /dev/null @@ -1,2 +0,0 @@ -from _collections_abc import * -from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.6.0/typing.py b/typing_extensions/test_data/python-3.6.0/typing.py deleted file mode 100644 index 34845b74..00000000 --- a/typing_extensions/test_data/python-3.6.0/typing.py +++ /dev/null @@ -1,2160 +0,0 @@ -import abc -from abc import abstractmethod, abstractproperty -import collections -import contextlib -import functools -import re as stdlib_re # Avoid confusion with the re we export. -import sys -import types -try: - import collections.abc as collections_abc -except ImportError: - import collections as collections_abc # Fallback for PY3.2. - - -# Please keep __all__ alphabetized within each category. -__all__ = [ - # Super-special typing primitives. - 'Any', - 'Callable', - 'ClassVar', - 'Generic', - 'Optional', - 'Tuple', - 'Type', - 'TypeVar', - 'Union', - - # ABCs (from collections.abc). - 'AbstractSet', # collections.abc.Set. - 'ByteString', - 'Container', - 'Hashable', - 'ItemsView', - 'Iterable', - 'Iterator', - 'KeysView', - 'Mapping', - 'MappingView', - 'MutableMapping', - 'MutableSequence', - 'MutableSet', - 'Sequence', - 'Sized', - 'ValuesView', - # The following are added depending on presence - # of their non-generic counterparts in stdlib: - # Awaitable, - # AsyncIterator, - # AsyncIterable, - # Coroutine, - # Collection, - # ContextManager - - # Structural checks, a.k.a. protocols. - 'Reversible', - 'SupportsAbs', - 'SupportsFloat', - 'SupportsInt', - 'SupportsRound', - - # Concrete collection types. - 'Dict', - 'DefaultDict', - 'List', - 'Set', - 'FrozenSet', - 'NamedTuple', # Not really a type. - 'Generator', - - # One-off things. - 'AnyStr', - 'cast', - 'get_type_hints', - 'NewType', - 'no_type_check', - 'no_type_check_decorator', - 'overload', - 'Text', - 'TYPE_CHECKING', -] - -# The pseudo-submodules 're' and 'io' are part of the public -# namespace, but excluded from __all__ because they might stomp on -# legitimate imports of those modules. - - -def _qualname(x): - if sys.version_info[:2] >= (3, 3): - return x.__qualname__ - else: - # Fall back to just name. - return x.__name__ - - -def _trim_name(nm): - if nm.startswith('_') and nm not in ('_TypeAlias', - '_ForwardRef', '_TypingBase', '_FinalTypingBase'): - nm = nm[1:] - return nm - - -class TypingMeta(type): - """Metaclass for most types defined in typing module - (not a part of public API). - - This overrides __new__() to require an extra keyword parameter - '_root', which serves as a guard against naive subclassing of the - typing classes. Any legitimate class defined using a metaclass - derived from TypingMeta must pass _root=True. - - This also defines a dummy constructor (all the work for most typing - constructs is done in __new__) and a nicer repr(). - """ - - _is_protocol = False - - def __new__(cls, name, bases, namespace, *, _root=False): - if not _root: - raise TypeError("Cannot subclass %s" % - (', '.join(map(_type_repr, bases)) or '()')) - return super().__new__(cls, name, bases, namespace) - - def __init__(self, *args, **kwds): - pass - - def _eval_type(self, globalns, localns): - """Override this in subclasses to interpret forward references. - - For example, List['C'] is internally stored as - List[_ForwardRef('C')], which should evaluate to List[C], - where C is an object found in globalns or localns (searching - localns first, of course). - """ - return self - - def _get_type_vars(self, tvars): - pass - - def __repr__(self): - qname = _trim_name(_qualname(self)) - return '%s.%s' % (self.__module__, qname) - - -class _TypingBase(metaclass=TypingMeta, _root=True): - """Internal indicator of special typing constructs.""" - - __slots__ = () - - def __init__(self, *args, **kwds): - pass - - def __new__(cls, *args, **kwds): - """Constructor. - - This only exists to give a better error message in case - someone tries to subclass a special typing object (not a good idea). - """ - if (len(args) == 3 and - isinstance(args[0], str) and - isinstance(args[1], tuple)): - # Close enough. - raise TypeError("Cannot subclass %r" % cls) - return super().__new__(cls) - - # Things that are not classes also need these. - def _eval_type(self, globalns, localns): - return self - - def _get_type_vars(self, tvars): - pass - - def __repr__(self): - cls = type(self) - qname = _trim_name(_qualname(cls)) - return '%s.%s' % (cls.__module__, qname) - - def __call__(self, *args, **kwds): - raise TypeError("Cannot instantiate %r" % type(self)) - - -class _FinalTypingBase(_TypingBase, _root=True): - """Internal mix-in class to prevent instantiation. - - Prevents instantiation unless _root=True is given in class call. - It is used to create pseudo-singleton instances Any, Union, Optional, etc. - """ - - __slots__ = () - - def __new__(cls, *args, _root=False, **kwds): - self = super().__new__(cls, *args, **kwds) - if _root is True: - return self - raise TypeError("Cannot instantiate %r" % cls) - - def __reduce__(self): - return _trim_name(type(self).__name__) - - -class _ForwardRef(_TypingBase, _root=True): - """Internal wrapper to hold a forward reference.""" - - __slots__ = ('__forward_arg__', '__forward_code__', - '__forward_evaluated__', '__forward_value__') - - def __init__(self, arg): - super().__init__(arg) - if not isinstance(arg, str): - raise TypeError('Forward reference must be a string -- got %r' % (arg,)) - try: - code = compile(arg, '', 'eval') - except SyntaxError: - raise SyntaxError('Forward reference must be an expression -- got %r' % - (arg,)) - self.__forward_arg__ = arg - self.__forward_code__ = code - self.__forward_evaluated__ = False - self.__forward_value__ = None - - def _eval_type(self, globalns, localns): - if not self.__forward_evaluated__ or localns is not globalns: - if globalns is None and localns is None: - globalns = localns = {} - elif globalns is None: - globalns = localns - elif localns is None: - localns = globalns - self.__forward_value__ = _type_check( - eval(self.__forward_code__, globalns, localns), - "Forward references must evaluate to types.") - self.__forward_evaluated__ = True - return self.__forward_value__ - - def __eq__(self, other): - if not isinstance(other, _ForwardRef): - return NotImplemented - return (self.__forward_arg__ == other.__forward_arg__ and - self.__forward_value__ == other.__forward_value__) - - def __hash__(self): - return hash((self.__forward_arg__, self.__forward_value__)) - - def __instancecheck__(self, obj): - raise TypeError("Forward references cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Forward references cannot be used with issubclass().") - - def __repr__(self): - return '_ForwardRef(%r)' % (self.__forward_arg__,) - - -class _TypeAlias(_TypingBase, _root=True): - """Internal helper class for defining generic variants of concrete types. - - Note that this is not a type; let's call it a pseudo-type. It cannot - be used in instance and subclass checks in parameterized form, i.e. - ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning - ``False``. - """ - - __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') - - def __init__(self, name, type_var, impl_type, type_checker): - """Initializer. - - Args: - name: The name, e.g. 'Pattern'. - type_var: The type parameter, e.g. AnyStr, or the - specific type, e.g. str. - impl_type: The implementation type. - type_checker: Function that takes an impl_type instance. - and returns a value that should be a type_var instance. - """ - assert isinstance(name, str), repr(name) - assert isinstance(impl_type, type), repr(impl_type) - assert not isinstance(impl_type, TypingMeta), repr(impl_type) - assert isinstance(type_var, (type, _TypingBase)), repr(type_var) - self.name = name - self.type_var = type_var - self.impl_type = impl_type - self.type_checker = type_checker - - def __repr__(self): - return "%s[%s]" % (self.name, _type_repr(self.type_var)) - - def __getitem__(self, parameter): - if not isinstance(self.type_var, TypeVar): - raise TypeError("%s cannot be further parameterized." % self) - if self.type_var.__constraints__ and isinstance(parameter, type): - if not issubclass(parameter, self.type_var.__constraints__): - raise TypeError("%s is not a valid substitution for %s." % - (parameter, self.type_var)) - if isinstance(parameter, TypeVar) and parameter is not self.type_var: - raise TypeError("%s cannot be re-parameterized." % self) - return self.__class__(self.name, parameter, - self.impl_type, self.type_checker) - - def __eq__(self, other): - if not isinstance(other, _TypeAlias): - return NotImplemented - return self.name == other.name and self.type_var == other.type_var - - def __hash__(self): - return hash((self.name, self.type_var)) - - def __instancecheck__(self, obj): - if not isinstance(self.type_var, TypeVar): - raise TypeError("Parameterized type aliases cannot be used " - "with isinstance().") - return isinstance(obj, self.impl_type) - - def __subclasscheck__(self, cls): - if not isinstance(self.type_var, TypeVar): - raise TypeError("Parameterized type aliases cannot be used " - "with issubclass().") - return issubclass(cls, self.impl_type) - - -def _get_type_vars(types, tvars): - for t in types: - if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): - t._get_type_vars(tvars) - - -def _type_vars(types): - tvars = [] - _get_type_vars(types, tvars) - return tuple(tvars) - - -def _eval_type(t, globalns, localns): - if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): - return t._eval_type(globalns, localns) - return t - - -def _type_check(arg, msg): - """Check that the argument is a type, and return it (internal helper). - - As a special case, accept None and return type(None) instead. - Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. - - The msg argument is a human-readable error message, e.g. - - "Union[arg, ...]: arg should be a type." - - We append the repr() of the actual value (truncated to 100 chars). - """ - if arg is None: - return type(None) - if isinstance(arg, str): - arg = _ForwardRef(arg) - if (isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or - not isinstance(arg, (type, _TypingBase)) and not callable(arg)): - raise TypeError(msg + " Got %.100r." % (arg,)) - # Bare Union etc. are not valid as type arguments - if (type(arg).__name__ in ('_Union', '_Optional') - and not getattr(arg, '__origin__', None) - or isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol)): - raise TypeError("Plain %s is not valid as type argument" % arg) - return arg - - -def _type_repr(obj): - """Return the repr() of an object, special-casing types (internal helper). - - If obj is a type, we return a shorter version than the default - type.__repr__, based on the module and qualified name, which is - typically enough to uniquely identify a type. For everything - else, we fall back on repr(obj). - """ - if isinstance(obj, type) and not isinstance(obj, TypingMeta): - if obj.__module__ == 'builtins': - return _qualname(obj) - return '%s.%s' % (obj.__module__, _qualname(obj)) - if obj is ...: - return('...') - if isinstance(obj, types.FunctionType): - return obj.__name__ - return repr(obj) - - -class _Any(_FinalTypingBase, _root=True): - """Special type indicating an unconstrained type. - - - Any is compatible with every type. - - Any assumed to have all methods. - - All values assumed to be instances of Any. - - Note that all the above statements are true from the point of view of - static type checkers. At runtime, Any should not be used with instance - or class checks. - """ - - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("Any cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Any cannot be used with issubclass().") - - -Any = _Any(_root=True) - - -class TypeVar(_TypingBase, _root=True): - """Type variable. - - Usage:: - - T = TypeVar('T') # Can be anything - A = TypeVar('A', str, bytes) # Must be str or bytes - - Type variables exist primarily for the benefit of static type - checkers. They serve as the parameters for generic types as well - as for generic function definitions. See class Generic for more - information on generic types. Generic functions work as follows: - - def repeat(x: T, n: int) -> List[T]: - '''Return a list containing n references to x.''' - return [x]*n - - def longest(x: A, y: A) -> A: - '''Return the longest of two strings.''' - return x if len(x) >= len(y) else y - - The latter example's signature is essentially the overloading - of (str, str) -> str and (bytes, bytes) -> bytes. Also note - that if the arguments are instances of some subclass of str, - the return type is still plain str. - - At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. - - Type variables defined with covariant=True or contravariant=True - can be used do declare covariant or contravariant generic types. - See PEP 484 for more details. By default generic types are invariant - in all type variables. - - Type variables can be introspected. e.g.: - - T.__name__ == 'T' - T.__constraints__ == () - T.__covariant__ == False - T.__contravariant__ = False - A.__constraints__ == (str, bytes) - """ - - __slots__ = ('__name__', '__bound__', '__constraints__', - '__covariant__', '__contravariant__') - - def __init__(self, name, *constraints, bound=None, - covariant=False, contravariant=False): - super().__init__(name, *constraints, bound=bound, - covariant=covariant, contravariant=contravariant) - self.__name__ = name - if covariant and contravariant: - raise ValueError("Bivariant types are not supported.") - self.__covariant__ = bool(covariant) - self.__contravariant__ = bool(contravariant) - if constraints and bound is not None: - raise TypeError("Constraints cannot be combined with bound=...") - if constraints and len(constraints) == 1: - raise TypeError("A single constraint is not allowed") - msg = "TypeVar(name, constraint, ...): constraints must be types." - self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) - if bound: - self.__bound__ = _type_check(bound, "Bound must be a type.") - else: - self.__bound__ = None - - def _get_type_vars(self, tvars): - if self not in tvars: - tvars.append(self) - - def __repr__(self): - if self.__covariant__: - prefix = '+' - elif self.__contravariant__: - prefix = '-' - else: - prefix = '~' - return prefix + self.__name__ - - def __instancecheck__(self, instance): - raise TypeError("Type variables cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Type variables cannot be used with issubclass().") - - -# Some unconstrained type variables. These are used by the container types. -# (These are not for export.) -T = TypeVar('T') # Any type. -KT = TypeVar('KT') # Key type. -VT = TypeVar('VT') # Value type. -T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. -V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. -VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. -T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. - -# A useful type variable with constraints. This represents string types. -# (This one *is* for export!) -AnyStr = TypeVar('AnyStr', bytes, str) - - -def _replace_arg(arg, tvars, args): - """An internal helper function: replace arg if it is a type variable - found in tvars with corresponding substitution from args or - with corresponding substitution sub-tree if arg is a generic type. - """ - - if tvars is None: - tvars = [] - if hasattr(arg, '_subs_tree'): - return arg._subs_tree(tvars, args) - if isinstance(arg, TypeVar): - for i, tvar in enumerate(tvars): - if arg == tvar: - return args[i] - return arg - - -def _subs_tree(cls, tvars=None, args=None): - """An internal helper function: calculate substitution tree - for generic cls after replacing its type parameters with - substitutions in tvars -> args (if any). - Repeat the same following __origin__'s. - - Return a list of arguments with all possible substitutions - performed. Arguments that are generic classes themselves are represented - as tuples (so that no new classes are created by this function). - For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] - """ - - if cls.__origin__ is None: - return cls - # Make of chain of origins (i.e. cls -> cls.__origin__) - current = cls.__origin__ - orig_chain = [] - while current.__origin__ is not None: - orig_chain.append(current) - current = current.__origin__ - # Replace type variables in __args__ if asked ... - tree_args = [] - for arg in cls.__args__: - tree_args.append(_replace_arg(arg, tvars, args)) - # ... then continue replacing down the origin chain. - for ocls in orig_chain: - new_tree_args = [] - for i, arg in enumerate(ocls.__args__): - new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) - tree_args = new_tree_args - return tree_args - - -def _remove_dups_flatten(parameters): - """An internal helper for Union creation and substitution: flatten Union's - among parameters, then remove duplicates and strict subclasses. - """ - - # Flatten out Union[Union[...], ...]. - params = [] - for p in parameters: - if isinstance(p, _Union) and p.__origin__ is Union: - params.extend(p.__args__) - elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: - params.extend(p[1:]) - else: - params.append(p) - # Weed out strict duplicates, preserving the first of each occurrence. - all_params = set(params) - if len(all_params) < len(params): - new_params = [] - for t in params: - if t in all_params: - new_params.append(t) - all_params.remove(t) - params = new_params - assert not all_params, all_params - # Weed out subclasses. - # E.g. Union[int, Employee, Manager] == Union[int, Employee]. - # If object is present it will be sole survivor among proper classes. - # Never discard type variables. - # (In particular, Union[str, AnyStr] != AnyStr.) - all_params = set(params) - for t1 in params: - if not isinstance(t1, type): - continue - if any(isinstance(t2, type) and issubclass(t1, t2) - for t2 in all_params - {t1} - if not (isinstance(t2, GenericMeta) and - t2.__origin__ is not None)): - all_params.remove(t1) - return tuple(t for t in params if t in all_params) - - -def _check_generic(cls, parameters): - # Check correct count for parameters of a generic cls (internal helper). - if not cls.__parameters__: - raise TypeError("%s is not a generic class" % repr(cls)) - alen = len(parameters) - elen = len(cls.__parameters__) - if alen != elen: - raise TypeError("Too %s parameters for %s; actual %s, expected %s" % - ("many" if alen > elen else "few", repr(cls), alen, elen)) - - -_cleanups = [] - - -def _tp_cache(func): - """Internal wrapper caching __getitem__ of generic types with a fallback to - original function for non-hashable arguments. - """ - - cached = functools.lru_cache()(func) - _cleanups.append(cached.cache_clear) - @functools.wraps(func) - def inner(*args, **kwds): - try: - return cached(*args, **kwds) - except TypeError: - pass # All real errors (not unhashable args) are raised below. - return func(*args, **kwds) - return inner - - -class _Union(_FinalTypingBase, _root=True): - """Union type; Union[X, Y] means either X or Y. - - To define a union, use e.g. Union[int, str]. Details: - - - The arguments must be types and there must be at least one. - - - None as an argument is a special case and is replaced by - type(None). - - - Unions of unions are flattened, e.g.:: - - Union[Union[int, str], float] == Union[int, str, float] - - - Unions of a single argument vanish, e.g.:: - - Union[int] == int # The constructor actually returns int - - - Redundant arguments are skipped, e.g.:: - - Union[int, str, int] == Union[int, str] - - - When comparing unions, the argument order is ignored, e.g.:: - - Union[int, str] == Union[str, int] - - - When two arguments have a subclass relationship, the least - derived argument is kept, e.g.:: - - class Employee: pass - class Manager(Employee): pass - Union[int, Employee, Manager] == Union[int, Employee] - Union[Manager, int, Employee] == Union[int, Employee] - Union[Employee, Manager] == Employee - - - Similar for object:: - - Union[int, object] == object - - - You cannot subclass or instantiate a union. - - - You can use Optional[X] as a shorthand for Union[X, None]. - """ - - __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') - - def __new__(cls, parameters=None, origin=None, *args, _root=False): - self = super().__new__(cls, parameters, origin, *args, _root=_root) - if origin is None: - self.__parameters__ = None - self.__args__ = None - self.__origin__ = None - self.__tree_hash__ = hash(frozenset(('Union',))) - return self - if not isinstance(parameters, tuple): - raise TypeError("Expected parameters=") - if origin is Union: - parameters = _remove_dups_flatten(parameters) - # It's not a union if there's only one type left. - if len(parameters) == 1: - return parameters[0] - self.__parameters__ = _type_vars(parameters) - self.__args__ = parameters - self.__origin__ = origin - # Pre-calculate the __hash__ on instantiation. - # This improves speed for complex substitutions. - subs_tree = self._subs_tree() - if isinstance(subs_tree, tuple): - self.__tree_hash__ = hash(frozenset(subs_tree)) - else: - self.__tree_hash__ = hash(subs_tree) - return self - - def _eval_type(self, globalns, localns): - if self.__args__ is None: - return self - ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) - ev_origin = _eval_type(self.__origin__, globalns, localns) - if ev_args == self.__args__ and ev_origin == self.__origin__: - # Everything is already evaluated. - return self - return self.__class__(ev_args, ev_origin, _root=True) - - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - _get_type_vars(self.__parameters__, tvars) - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - tree = self._subs_tree() - if not isinstance(tree, tuple): - return repr(tree) - return tree[0]._tree_repr(tree) - - def _tree_repr(self, tree): - arg_list = [] - for arg in tree[1:]: - if not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - return super().__repr__() + '[%s]' % ', '.join(arg_list) - - @_tp_cache - def __getitem__(self, parameters): - if parameters == (): - raise TypeError("Cannot take a Union of no types.") - if not isinstance(parameters, tuple): - parameters = (parameters,) - if self.__origin__ is None: - msg = "Union[arg, ...]: each arg must be a type." - else: - msg = "Parameters to generic types must be types." - parameters = tuple(_type_check(p, msg) for p in parameters) - if self is not Union: - _check_generic(self, parameters) - return self.__class__(parameters, origin=self, _root=True) - - def _subs_tree(self, tvars=None, args=None): - if self is Union: - return Union # Nothing to substitute - tree_args = _subs_tree(self, tvars, args) - tree_args = _remove_dups_flatten(tree_args) - if len(tree_args) == 1: - return tree_args[0] # Union of a single type is that type - return (Union,) + tree_args - - def __eq__(self, other): - if not isinstance(other, _Union): - return self._subs_tree() == other - return self.__tree_hash__ == other.__tree_hash__ - - def __hash__(self): - return self.__tree_hash__ - - def __instancecheck__(self, obj): - raise TypeError("Unions cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Unions cannot be used with issubclass().") - - -Union = _Union(_root=True) - - -class _Optional(_FinalTypingBase, _root=True): - """Optional type. - - Optional[X] is equivalent to Union[X, None]. - """ - - __slots__ = () - - @_tp_cache - def __getitem__(self, arg): - arg = _type_check(arg, "Optional[t] requires a single type.") - return Union[arg, type(None)] - - -Optional = _Optional(_root=True) - - -def _gorg(a): - """Return the farthest origin of a generic class (internal helper).""" - assert isinstance(a, GenericMeta) - while a.__origin__ is not None: - a = a.__origin__ - return a - - -def _geqv(a, b): - """Return whether two generic classes are equivalent (internal helper). - - The intention is to consider generic class X and any of its - parameterized forms (X[T], X[int], etc.) as equivalent. - - However, X is not equivalent to a subclass of X. - - The relation is reflexive, symmetric and transitive. - """ - assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) - # Reduce each to its origin. - return _gorg(a) is _gorg(b) - - -def _next_in_mro(cls): - """Helper for Generic.__new__. - - Returns the class after the last occurrence of Generic or - Generic[...] in cls.__mro__. - """ - next_in_mro = object - # Look for the last occurrence of Generic or Generic[...]. - for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and _gorg(c) is Generic: - next_in_mro = cls.__mro__[i+1] - return next_in_mro - - -def _valid_for_check(cls): - """An internal helper to prohibit isinstance([1], List[str]) etc.""" - if cls is Generic: - raise TypeError("Class %r cannot be used with class " - "or instance checks" % cls) - if (cls.__origin__ is not None and - sys._getframe(3).f_globals['__name__'] not in ['abc', 'functools']): - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - - -def _make_subclasshook(cls): - """Construct a __subclasshook__ callable that incorporates - the associated __extra__ class in subclass checks performed - against cls. - """ - if isinstance(cls.__extra__, abc.ABCMeta): - # The logic mirrors that of ABCMeta.__subclasscheck__. - # Registered classes need not be checked here because - # cls and its extra share the same _abc_registry. - def __extrahook__(subclass): - _valid_for_check(cls) - res = cls.__extra__.__subclasshook__(subclass) - if res is not NotImplemented: - return res - if cls.__extra__ in subclass.__mro__: - return True - for scls in cls.__extra__.__subclasses__(): - if isinstance(scls, GenericMeta): - continue - if issubclass(subclass, scls): - return True - return NotImplemented - else: - # For non-ABC extras we'll just call issubclass(). - def __extrahook__(subclass): - _valid_for_check(cls) - if cls.__extra__ and issubclass(subclass, cls.__extra__): - return True - return NotImplemented - return __extrahook__ - - -def _no_slots_copy(dct): - """Internal helper: copy class __dict__ and clean slots class variables. - (They will be re-created if necessary by normal class machinery.) - """ - dict_copy = dict(dct) - if '__slots__' in dict_copy: - for slot in dict_copy['__slots__']: - dict_copy.pop(slot, None) - return dict_copy - - -class GenericMeta(TypingMeta, abc.ABCMeta): - """Metaclass for generic types.""" - - def __new__(cls, name, bases, namespace, - tvars=None, args=None, origin=None, extra=None, orig_bases=None): - if tvars is not None: - # Called from __getitem__() below. - assert origin is not None - assert all(isinstance(t, TypeVar) for t in tvars), tvars - else: - # Called from class statement. - assert tvars is None, tvars - assert args is None, args - assert origin is None, origin - - # Get the full set of tvars from the bases. - tvars = _type_vars(bases) - # Look for Generic[T1, ..., Tn]. - # If found, tvars must be a subset of it. - # If not found, tvars is it. - # Also check for and reject plain Generic, - # and reject multiple Generic[...]. - gvars = None - for base in bases: - if base is Generic: - raise TypeError("Cannot inherit from plain Generic") - if (isinstance(base, GenericMeta) and - base.__origin__ is Generic): - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...] multiple types.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - raise TypeError( - "Some type variables (%s) " - "are not listed in Generic[%s]" % - (", ".join(str(t) for t in tvars if t not in gvarset), - ", ".join(str(g) for g in gvars))) - tvars = gvars - - initial_bases = bases - if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: - bases = (extra,) + bases - bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) - - # remove bare Generic from bases if there are other generic bases - if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): - bases = tuple(b for b in bases if b is not Generic) - self = super().__new__(cls, name, bases, namespace, _root=True) - - self.__parameters__ = tvars - # Be prepared that GenericMeta will be subclassed by TupleMeta - # and CallableMeta, those two allow ..., (), or [] in __args___. - self.__args__ = tuple(... if a is _TypingEllipsis else - () if a is _TypingEmpty else - a for a in args) if args else None - self.__origin__ = origin - self.__extra__ = extra - # Speed hack (https://github.com/python/typing/issues/196). - self.__next_in_mro__ = _next_in_mro(self) - # Preserve base classes on subclassing (__bases__ are type erased now). - if orig_bases is None: - self.__orig_bases__ = initial_bases - - # This allows unparameterized generic collections to be used - # with issubclass() and isinstance() in the same way as their - # collections.abc counterparts (e.g., isinstance([], Iterable)). - if ('__subclasshook__' not in namespace and extra # allow overriding - or hasattr(self.__subclasshook__, '__name__') and - self.__subclasshook__.__name__ == '__extrahook__'): - self.__subclasshook__ = _make_subclasshook(self) - if isinstance(extra, abc.ABCMeta): - self._abc_registry = extra._abc_registry - - if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. - self.__qualname__ = origin.__qualname__ - self.__tree_hash__ = hash(self._subs_tree()) if origin else hash((self.__name__,)) - return self - - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - _get_type_vars(self.__parameters__, tvars) - - def _eval_type(self, globalns, localns): - ev_origin = (self.__origin__._eval_type(globalns, localns) - if self.__origin__ else None) - ev_args = tuple(_eval_type(a, globalns, localns) for a - in self.__args__) if self.__args__ else None - if ev_origin == self.__origin__ and ev_args == self.__args__: - return self - return self.__class__(self.__name__, - self.__bases__, - _no_slots_copy(self.__dict__), - tvars=_type_vars(ev_args) if ev_args else None, - args=ev_args, - origin=ev_origin, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - return self._tree_repr(self._subs_tree()) - - def _tree_repr(self, tree): - arg_list = [] - for arg in tree[1:]: - if arg == (): - arg_list.append('()') - elif not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - return super().__repr__() + '[%s]' % ', '.join(arg_list) - - def _subs_tree(self, tvars=None, args=None): - if self.__origin__ is None: - return self - tree_args = _subs_tree(self, tvars, args) - return (_gorg(self),) + tuple(tree_args) - - def __eq__(self, other): - if not isinstance(other, GenericMeta): - return NotImplemented - if self.__origin__ is None or other.__origin__ is None: - return self is other - return self.__tree_hash__ == other.__tree_hash__ - - def __hash__(self): - return self.__tree_hash__ - - @_tp_cache - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if not params and not _gorg(self) is Tuple: - raise TypeError( - "Parameter list to %s[...] cannot be empty" % _qualname(self)) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self is Generic: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, TypeVar) for p in params): - raise TypeError( - "Parameters to Generic[...] must all be type variables") - if len(set(params)) != len(params): - raise TypeError( - "Parameters to Generic[...] must all be unique") - tvars = params - args = params - elif self in (Tuple, Callable): - tvars = _type_vars(params) - args = params - elif self is _Protocol: - # _Protocol is internal, don't check anything. - tvars = params - args = params - elif self.__origin__ in (Generic, _Protocol): - # Can't subscript Generic[...] or _Protocol[...]. - raise TypeError("Cannot subscript already-subscripted %s" % - repr(self)) - else: - # Subscripting a regular Generic subclass. - _check_generic(self, params) - tvars = _type_vars(params) - args = params - return self.__class__(self.__name__, - self.__bases__, - _no_slots_copy(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - def __instancecheck__(self, instance): - # Since we extend ABC.__subclasscheck__ and - # ABC.__instancecheck__ inlines the cache checking done by the - # latter, we must extend __instancecheck__ too. For simplicity - # we just skip the cache check -- instance checks for generic - # classes are supposed to be rare anyways. - return issubclass(instance.__class__, self) - - def __copy__(self): - return self.__class__(self.__name__, self.__bases__, - _no_slots_copy(self.__dict__), - self.__parameters__, self.__args__, self.__origin__, - self.__extra__, self.__orig_bases__) - - -# Prevent checks for Generic to crash when defining Generic. -Generic = None - - -def _generic_new(base_cls, cls, *args, **kwds): - # Assure type is erased on instantiation, - # but attempt to store it in __orig_class__ - if cls.__origin__ is None: - return base_cls.__new__(cls) - else: - origin = _gorg(cls) - obj = base_cls.__new__(origin) - try: - obj.__orig_class__ = cls - except AttributeError: - pass - obj.__init__(*args, **kwds) - return obj - - -class Generic(metaclass=GenericMeta): - """Abstract base class for generic types. - - A generic type is typically declared by inheriting from - this class parameterized with one or more type variables. - For example, a generic mapping type might be defined as:: - - class Mapping(Generic[KT, VT]): - def __getitem__(self, key: KT) -> VT: - ... - # Etc. - - This class can then be used as follows:: - - def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: - try: - return mapping[key] - except KeyError: - return default - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Generic): - raise TypeError("Type Generic cannot be instantiated; " - "it can be used only as a base class") - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - -class _TypingEmpty: - """Internal placeholder for () or []. Used by TupleMeta and CallableMeta - to allow empty list/tuple in specific places, without allowing them - to sneak in where prohibited. - """ - - -class _TypingEllipsis: - """Internal placeholder for ... (ellipsis).""" - - -class TupleMeta(GenericMeta): - """Metaclass for Tuple (internal).""" - - @_tp_cache - def __getitem__(self, parameters): - if self.__origin__ is not None or not _geqv(self, Tuple): - # Normal generic rules apply if this is not the first subscription - # or a subscription of a subclass. - return super().__getitem__(parameters) - if parameters == (): - return super().__getitem__((_TypingEmpty,)) - if not isinstance(parameters, tuple): - parameters = (parameters,) - if len(parameters) == 2 and parameters[1] is ...: - msg = "Tuple[t, ...]: t must be a type." - p = _type_check(parameters[0], msg) - return super().__getitem__((p, _TypingEllipsis)) - msg = "Tuple[t0, t1, ...]: each t must be a type." - parameters = tuple(_type_check(p, msg) for p in parameters) - return super().__getitem__(parameters) - - def __instancecheck__(self, obj): - if self.__args__ == None: - return isinstance(obj, tuple) - raise TypeError("Parameterized Tuple cannot be used " - "with isinstance().") - - def __subclasscheck__(self, cls): - if self.__args__ == None: - return issubclass(cls, tuple) - raise TypeError("Parameterized Tuple cannot be used " - "with issubclass().") - - -class Tuple(tuple, extra=tuple, metaclass=TupleMeta): - """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. - - Example: Tuple[T1, T2] is a tuple of two elements corresponding - to type variables T1 and T2. Tuple[int, float, str] is a tuple - of an int, a float and a string. - - To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Tuple): - raise TypeError("Type Tuple cannot be instantiated; " - "use tuple() instead") - return _generic_new(tuple, cls, *args, **kwds) - - -class CallableMeta(GenericMeta): - """Metaclass for Callable (internal).""" - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - return self._tree_repr(self._subs_tree()) - - def _tree_repr(self, tree): - if _gorg(self) is not Callable: - return super()._tree_repr(tree) - # For actual Callable (not its subclass) we override - # super()._tree_repr() for nice formatting. - arg_list = [] - for arg in tree[1:]: - if not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - if arg_list[0] == '...': - return repr(tree[0]) + '[..., %s]' % arg_list[1] - return (repr(tree[0]) + - '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) - - def __getitem__(self, parameters): - """A thin wrapper around __getitem_inner__ to provide the latter - with hashable arguments to improve speed. - """ - - if self.__origin__ is not None or not _geqv(self, Callable): - return super().__getitem__(parameters) - if not isinstance(parameters, tuple) or len(parameters) != 2: - raise TypeError("Callable must be used as " - "Callable[[arg, ...], result].") - args, result = parameters - if args is Ellipsis: - parameters = (Ellipsis, result) - else: - if not isinstance(args, list): - raise TypeError("Callable[args, result]: args must be a list." - " Got %.100r." % (args,)) - parameters = (tuple(args), result) - return self.__getitem_inner__(parameters) - - @_tp_cache - def __getitem_inner__(self, parameters): - args, result = parameters - msg = "Callable[args, result]: result must be a type." - result = _type_check(result, msg) - if args is Ellipsis: - return super().__getitem__((_TypingEllipsis, result)) - msg = "Callable[[arg, ...], result]: each arg must be a type." - args = tuple(_type_check(arg, msg) for arg in args) - parameters = args + (result,) - return super().__getitem__(parameters) - - -class Callable(extra=collections_abc.Callable, metaclass = CallableMeta): - """Callable type; Callable[[int], str] is a function of (int) -> str. - - The subscription syntax must always be used with exactly two - values: the argument list and the return type. The argument list - must be a list of types or ellipsis; the return type must be a single type. - - There is no syntax to indicate optional or keyword arguments, - such function types are rarely used as callback types. - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Callable): - raise TypeError("Type Callable cannot be instantiated; " - "use a non-abstract subclass instead") - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - -class _ClassVar(_FinalTypingBase, _root=True): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats: ClassVar[Dict[str, int]] = {} # class variable - damage: int = 10 # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(_type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = _eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(_type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - -ClassVar = _ClassVar(_root=True) - - -def cast(typ, val): - """Cast a value to a type. - - This returns the value unchanged. To the type checker this - signals that the return value has the designated type, but at - runtime we intentionally don't check anything (we want this - to be as fast as possible). - """ - return val - - -def _get_defaults(func): - """Internal helper to extract the default arguments, by name.""" - try: - code = func.__code__ - except AttributeError: - # Some built-in functions don't have __code__, __defaults__, etc. - return {} - pos_count = code.co_argcount - arg_names = code.co_varnames - arg_names = arg_names[:pos_count] - defaults = func.__defaults__ or () - kwdefaults = func.__kwdefaults__ - res = dict(kwdefaults) if kwdefaults else {} - pos_offset = pos_count - len(defaults) - for name, value in zip(arg_names[pos_offset:], defaults): - assert name not in res - res[name] = value - return res - - -def get_type_hints(obj, globalns=None, localns=None): - """Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, and if necessary - adds Optional[t] if a default value equal to None is set. - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj, and these are also used as the locals. If the - object does not appear to have globals, an exception is raised. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - - if getattr(obj, '__no_type_check__', None): - return {} - if globalns is None: - globalns = getattr(obj, '__globals__', {}) - if localns is None: - localns = globalns - elif localns is None: - localns = globalns - # Classes require a special treatment. - if isinstance(obj, type): - hints = {} - for base in reversed(obj.__mro__): - ann = base.__dict__.get('__annotations__', {}) - for name, value in ann.items(): - if value is None: - value = type(None) - if isinstance(value, str): - value = _ForwardRef(value) - value = _eval_type(value, globalns, localns) - hints[name] = value - return hints - hints = getattr(obj, '__annotations__', None) - if hints is None: - # Return empty annotations for something that _could_ have them. - if (isinstance(obj, types.FunctionType) or - isinstance(obj, types.BuiltinFunctionType) or - isinstance(obj, types.MethodType) or - isinstance(obj, types.ModuleType)): - return {} - else: - raise TypeError('{!r} is not a module, class, method, ' - 'or function.'.format(obj)) - defaults = _get_defaults(obj) - hints = dict(hints) - for name, value in hints.items(): - if value is None: - value = type(None) - if isinstance(value, str): - value = _ForwardRef(value) - value = _eval_type(value, globalns, localns) - if name in defaults and defaults[name] is None: - value = Optional[value] - hints[name] = value - return hints - - -def no_type_check(arg): - """Decorator to indicate that annotations are not type hints. - - The argument must be a class or function; if it is a class, it - applies recursively to all methods and classes defined in that class - (but not to methods defined in its superclasses or subclasses). - - This mutates the function(s) or class(es) in place. - """ - if isinstance(arg, type): - arg_attrs = arg.__dict__.copy() - for attr, val in arg.__dict__.items(): - if val in arg.__bases__: - arg_attrs.pop(attr) - for obj in arg_attrs.values(): - if isinstance(obj, types.FunctionType): - obj.__no_type_check__ = True - if isinstance(obj, type): - no_type_check(obj) - try: - arg.__no_type_check__ = True - except TypeError: # built-in classes - pass - return arg - - -def no_type_check_decorator(decorator): - """Decorator to give another decorator the @no_type_check effect. - - This wraps the decorator with something that wraps the decorated - function in @no_type_check. - """ - - @functools.wraps(decorator) - def wrapped_decorator(*args, **kwds): - func = decorator(*args, **kwds) - func = no_type_check(func) - return func - - return wrapped_decorator - - -def _overload_dummy(*args, **kwds): - """Helper for @overload to raise when called.""" - raise NotImplementedError( - "You should not call an overloaded function. " - "A series of @overload-decorated functions " - "outside a stub module should always be followed " - "by an implementation that is not @overload-ed.") - - -def overload(func): - """Decorator for overloaded functions/methods. - - In a stub file, place two or more stub definitions for the same - function in a row, each decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - - In a non-stub file (i.e. a regular .py file), do the same but - follow it with an implementation. The implementation should *not* - be decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - # implementation goes here - """ - return _overload_dummy - - -class _ProtocolMeta(GenericMeta): - """Internal metaclass for _Protocol. - - This exists so _Protocol classes can be generic without deriving - from Generic. - """ - - def __instancecheck__(self, obj): - if _Protocol not in self.__bases__: - return super().__instancecheck__(obj) - raise TypeError("Protocols cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not self._is_protocol: - # No structural checks since this isn't a protocol. - return NotImplemented - - if self is _Protocol: - # Every class is a subclass of the empty protocol. - return True - - # Find all attributes defined in the protocol. - attrs = self._get_protocol_attrs() - - for attr in attrs: - if not any(attr in d.__dict__ for d in cls.__mro__): - return False - return True - - def _get_protocol_attrs(self): - # Get all Protocol base classes. - protocol_bases = [] - for c in self.__mro__: - if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': - protocol_bases.append(c) - - # Get attributes included in protocol. - attrs = set() - for base in protocol_bases: - for attr in base.__dict__.keys(): - # Include attributes not defined in any non-protocol bases. - for c in self.__mro__: - if (c is not base and attr in c.__dict__ and - not getattr(c, '_is_protocol', False)): - break - else: - if (not attr.startswith('_abc_') and - attr != '__abstractmethods__' and - attr != '__annotations__' and - attr != '__weakref__' and - attr != '_is_protocol' and - attr != '__dict__' and - attr != '__args__' and - attr != '__slots__' and - attr != '_get_protocol_attrs' and - attr != '__next_in_mro__' and - attr != '__parameters__' and - attr != '__origin__' and - attr != '__orig_bases__' and - attr != '__extra__' and - attr != '__tree_hash__' and - attr != '__module__'): - attrs.add(attr) - - return attrs - - -class _Protocol(metaclass=_ProtocolMeta): - """Internal base class for protocol classes. - - This implements a simple-minded structural issubclass check - (similar but more general than the one-offs in collections.abc - such as Hashable). - """ - - __slots__ = () - - _is_protocol = True - - -# Various ABCs mimicking those in collections.abc. -# A few are simply re-exported for completeness. - -Hashable = collections_abc.Hashable # Not generic. - - -if hasattr(collections_abc, 'Awaitable'): - class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): - __slots__ = () - - __all__.append('Awaitable') - - -if hasattr(collections_abc, 'Coroutine'): - class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co], - extra=collections_abc.Coroutine): - __slots__ = () - - __all__.append('Coroutine') - - -if hasattr(collections_abc, 'AsyncIterable'): - - class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): - __slots__ = () - - class AsyncIterator(AsyncIterable[T_co], - extra=collections_abc.AsyncIterator): - __slots__ = () - - __all__.append('AsyncIterable') - __all__.append('AsyncIterator') - - -class Iterable(Generic[T_co], extra=collections_abc.Iterable): - __slots__ = () - - -class Iterator(Iterable[T_co], extra=collections_abc.Iterator): - __slots__ = () - - -class SupportsInt(_Protocol): - __slots__ = () - - @abstractmethod - def __int__(self) -> int: - pass - - -class SupportsFloat(_Protocol): - __slots__ = () - - @abstractmethod - def __float__(self) -> float: - pass - - -class SupportsComplex(_Protocol): - __slots__ = () - - @abstractmethod - def __complex__(self) -> complex: - pass - - -class SupportsBytes(_Protocol): - __slots__ = () - - @abstractmethod - def __bytes__(self) -> bytes: - pass - - -class SupportsAbs(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __abs__(self) -> T_co: - pass - - -class SupportsRound(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __round__(self, ndigits: int = 0) -> T_co: - pass - - -if hasattr(collections_abc, 'Reversible'): - class Reversible(Iterable[T_co], extra=collections_abc.Reversible): - __slots__ = () -else: - class Reversible(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __reversed__(self) -> 'Iterator[T_co]': - pass - - -Sized = collections_abc.Sized # Not generic. - - -class Container(Generic[T_co], extra=collections_abc.Container): - __slots__ = () - - -if hasattr(collections_abc, 'Collection'): - class Collection(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Collection): - __slots__ = () - - __all__.append('Collection') - - -# Callable was defined earlier. - -if hasattr(collections_abc, 'Collection'): - class AbstractSet(Collection[T_co], - extra=collections_abc.Set): - __slots__ = () -else: - class AbstractSet(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Set): - __slots__ = () - - -class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): - __slots__ = () - - -# NOTE: It is only covariant in the value type. -if hasattr(collections_abc, 'Collection'): - class Mapping(Collection[KT], Generic[KT, VT_co], - extra=collections_abc.Mapping): - __slots__ = () -else: - class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], - extra=collections_abc.Mapping): - __slots__ = () - - -class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): - __slots__ = () - -if hasattr(collections_abc, 'Reversible'): - if hasattr(collections_abc, 'Collection'): - class Sequence(Reversible[T_co], Collection[T_co], - extra=collections_abc.Sequence): - __slots__ = () - else: - class Sequence(Sized, Reversible[T_co], Container[T_co], - extra=collections_abc.Sequence): - __slots__ = () -else: - class Sequence(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Sequence): - __slots__ = () - - -class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): - __slots__ = () - - -class ByteString(Sequence[int], extra=collections_abc.ByteString): - __slots__ = () - - -class List(list, MutableSequence[T], extra=list): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, List): - raise TypeError("Type List cannot be instantiated; " - "use list() instead") - return _generic_new(list, cls, *args, **kwds) - - -class Set(set, MutableSet[T], extra=set): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Set): - raise TypeError("Type Set cannot be instantiated; " - "use set() instead") - return _generic_new(set, cls, *args, **kwds) - - -class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, FrozenSet): - raise TypeError("Type FrozenSet cannot be instantiated; " - "use frozenset() instead") - return _generic_new(frozenset, cls, *args, **kwds) - - -class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): - __slots__ = () - - -class KeysView(MappingView[KT], AbstractSet[KT], - extra=collections_abc.KeysView): - __slots__ = () - - -class ItemsView(MappingView[Tuple[KT, VT_co]], - AbstractSet[Tuple[KT, VT_co]], - Generic[KT, VT_co], - extra=collections_abc.ItemsView): - __slots__ = () - - -class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): - __slots__ = () - - -if hasattr(contextlib, 'AbstractContextManager'): - class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): - __slots__ = () - __all__.append('ContextManager') - - -class Dict(dict, MutableMapping[KT, VT], extra=dict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Dict): - raise TypeError("Type Dict cannot be instantiated; " - "use dict() instead") - return _generic_new(dict, cls, *args, **kwds) - -class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], - extra=collections.defaultdict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, DefaultDict): - raise TypeError("Type DefaultDict cannot be instantiated; " - "use collections.defaultdict() instead") - return _generic_new(collections.defaultdict, cls, *args, **kwds) - -# Determine what base class to use for Generator. -if hasattr(collections_abc, 'Generator'): - # Sufficiently recent versions of 3.5 have a Generator ABC. - _G_base = collections_abc.Generator -else: - # Fall back on the exact type. - _G_base = types.GeneratorType - - -class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], - extra=_G_base): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Generator): - raise TypeError("Type Generator cannot be instantiated; " - "create a subclass instead") - return _generic_new(_G_base, cls, *args, **kwds) - - -# Internal type variable used for Type[]. -CT_co = TypeVar('CT_co', covariant=True, bound=type) - - -# This is not a real generic class. Don't use outside annotations. -class Type(Generic[CT_co], extra=type): - """A special construct usable to annotate class objects. - - For example, suppose we have the following classes:: - - class User: ... # Abstract base for User classes - class BasicUser(User): ... - class ProUser(User): ... - class TeamUser(User): ... - - And a function that takes a class argument that's a subclass of - User and returns an instance of the corresponding class:: - - U = TypeVar('U', bound=User) - def new_user(user_class: Type[U]) -> U: - user = user_class() - # (Here we could write the user object to a database) - return user - - joe = new_user(BasicUser) - - At this point the type checker knows that joe has type BasicUser. - """ - - __slots__ = () - - -def _make_nmtuple(name, types): - msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" - types = [(n, _type_check(t, msg)) for n, t in types] - nm_tpl = collections.namedtuple(name, [n for n, t in types]) - nm_tpl._field_types = dict(types) - try: - nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - return nm_tpl - - -_PY36 = sys.version_info[:2] >= (3, 6) - - -class NamedTupleMeta(type): - - def __new__(cls, typename, bases, ns): - if ns.get('_root', False): - return super().__new__(cls, typename, bases, ns) - if not _PY36: - raise TypeError("Class syntax for NamedTuple is only supported" - " in Python 3.6+") - types = ns.get('__annotations__', {}) - return _make_nmtuple(typename, types.items()) - -class NamedTuple(metaclass=NamedTupleMeta): - """Typed version of namedtuple. - - Usage in Python versions >= 3.6:: - - class Employee(NamedTuple): - name: str - id: int - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has one extra attribute: _field_types, - giving a dict mapping field names to types. (The field names - are in the _fields attribute, which is part of the namedtuple - API.) Alternative equivalent keyword syntax is also accepted:: - - Employee = NamedTuple('Employee', name=str, id=int) - - In Python versions <= 3.5 use:: - - Employee = NamedTuple('Employee', [('name', str), ('id', int)]) - """ - _root = True - - def __new__(self, typename, fields=None, **kwargs): - if kwargs and not _PY36: - raise TypeError("Keyword syntax for NamedTuple is only supported" - " in Python 3.6+") - if fields is None: - fields = kwargs.items() - elif kwargs: - raise TypeError("Either list of fields or keywords" - " can be provided to NamedTuple, not both") - return _make_nmtuple(typename, fields) - - -def NewType(name, tp): - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy function that simply returns its argument. Usage:: - - UserId = NewType('UserId', int) - - def name_by_id(user_id: UserId) -> str: - ... - - UserId('user') # Fails type check - - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - - num = UserId(5) + 1 # type: int - """ - - def new_type(x): - return x - - new_type.__name__ = name - new_type.__supertype__ = tp - return new_type - - -# Python-version-specific alias (Python 2: unicode; Python 3: str) -Text = str - - -# Constant that's True when type checking, but False here. -TYPE_CHECKING = False - - -class IO(Generic[AnyStr]): - """Generic base class for TextIO and BinaryIO. - - This is an abstract, generic version of the return of open(). - - NOTE: This does not distinguish between the different possible - classes (text vs. binary, read vs. write vs. read/write, - append-only, unbuffered). The TextIO and BinaryIO subclasses - below capture the distinctions between text vs. binary, which is - pervasive in the interface; however we currently do not offer a - way to track the other distinctions in the type system. - """ - - __slots__ = () - - @abstractproperty - def mode(self) -> str: - pass - - @abstractproperty - def name(self) -> str: - pass - - @abstractmethod - def close(self) -> None: - pass - - @abstractmethod - def closed(self) -> bool: - pass - - @abstractmethod - def fileno(self) -> int: - pass - - @abstractmethod - def flush(self) -> None: - pass - - @abstractmethod - def isatty(self) -> bool: - pass - - @abstractmethod - def read(self, n: int = -1) -> AnyStr: - pass - - @abstractmethod - def readable(self) -> bool: - pass - - @abstractmethod - def readline(self, limit: int = -1) -> AnyStr: - pass - - @abstractmethod - def readlines(self, hint: int = -1) -> List[AnyStr]: - pass - - @abstractmethod - def seek(self, offset: int, whence: int = 0) -> int: - pass - - @abstractmethod - def seekable(self) -> bool: - pass - - @abstractmethod - def tell(self) -> int: - pass - - @abstractmethod - def truncate(self, size: int = None) -> int: - pass - - @abstractmethod - def writable(self) -> bool: - pass - - @abstractmethod - def write(self, s: AnyStr) -> int: - pass - - @abstractmethod - def writelines(self, lines: List[AnyStr]) -> None: - pass - - @abstractmethod - def __enter__(self) -> 'IO[AnyStr]': - pass - - @abstractmethod - def __exit__(self, type, value, traceback) -> None: - pass - - -class BinaryIO(IO[bytes]): - """Typed version of the return of open() in binary mode.""" - - __slots__ = () - - @abstractmethod - def write(self, s: Union[bytes, bytearray]) -> int: - pass - - @abstractmethod - def __enter__(self) -> 'BinaryIO': - pass - - -class TextIO(IO[str]): - """Typed version of the return of open() in text mode.""" - - __slots__ = () - - @abstractproperty - def buffer(self) -> BinaryIO: - pass - - @abstractproperty - def encoding(self) -> str: - pass - - @abstractproperty - def errors(self) -> Optional[str]: - pass - - @abstractproperty - def line_buffering(self) -> bool: - pass - - @abstractproperty - def newlines(self) -> Any: - pass - - @abstractmethod - def __enter__(self) -> 'TextIO': - pass - - -class io: - """Wrapper namespace for IO generic classes.""" - - __all__ = ['IO', 'TextIO', 'BinaryIO'] - IO = IO - TextIO = TextIO - BinaryIO = BinaryIO - -io.__name__ = __name__ + '.io' -sys.modules[io.__name__] = io - - -Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), - lambda p: p.pattern) -Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), - lambda m: m.re.pattern) - - -class re: - """Wrapper namespace for re type aliases.""" - - __all__ = ['Pattern', 'Match'] - Pattern = Pattern - Match = Match - -re.__name__ = __name__ + '.re' -sys.modules[re.__name__] = re diff --git a/typing_extensions/test_data/python-3.6.1/_collections_abc.py b/typing_extensions/test_data/python-3.6.1/_collections_abc.py deleted file mode 100644 index b172f3f3..00000000 --- a/typing_extensions/test_data/python-3.6.1/_collections_abc.py +++ /dev/null @@ -1,1007 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. - -Unit tests are in test_collections. -""" - -from abc import ABCMeta, abstractmethod -import sys - -__all__ = ["Awaitable", "Coroutine", - "AsyncIterable", "AsyncIterator", "AsyncGenerator", - "Hashable", "Iterable", "Iterator", "Generator", "Reversible", - "Sized", "Container", "Callable", "Collection", - "Set", "MutableSet", - "Mapping", "MutableMapping", - "MappingView", "KeysView", "ItemsView", "ValuesView", - "Sequence", "MutableSequence", - "ByteString", - ] - -# This module has been renamed from collections.abc to _collections_abc to -# speed up interpreter startup. Some of the types such as MutableMapping are -# required early but collections module imports a lot of other modules. -# See issue #19218 -__name__ = "collections.abc" - -# Private list of types that we want to register with the various ABCs -# so that they will pass tests like: -# it = iter(somebytearray) -# assert isinstance(it, Iterable) -# Note: in other implementations, these types might not be distinct -# and they may have their own implementation specific types that -# are not included on this list. -bytes_iterator = type(iter(b'')) -bytearray_iterator = type(iter(bytearray())) -#callable_iterator = ??? -dict_keyiterator = type(iter({}.keys())) -dict_valueiterator = type(iter({}.values())) -dict_itemiterator = type(iter({}.items())) -list_iterator = type(iter([])) -list_reverseiterator = type(iter(reversed([]))) -range_iterator = type(iter(range(0))) -longrange_iterator = type(iter(range(1 << 1000))) -set_iterator = type(iter(set())) -str_iterator = type(iter("")) -tuple_iterator = type(iter(())) -zip_iterator = type(iter(zip())) -## views ## -dict_keys = type({}.keys()) -dict_values = type({}.values()) -dict_items = type({}.items()) -## misc ## -mappingproxy = type(type.__dict__) -generator = type((lambda: (yield))()) -## coroutine ## -async def _coro(): pass -_coro = _coro() -coroutine = type(_coro) -_coro.close() # Prevent ResourceWarning -del _coro -## asynchronous generator ## -async def _ag(): yield -_ag = _ag() -async_generator = type(_ag) -del _ag - - -### ONE-TRICK PONIES ### - -def _check_methods(C, *methods): - mro = C.__mro__ - for method in methods: - for B in mro: - if method in B.__dict__: - if B.__dict__[method] is None: - return NotImplemented - break - else: - return NotImplemented - return True - -class Hashable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __hash__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Hashable: - return _check_methods(C, "__hash__") - return NotImplemented - - -class Awaitable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __await__(self): - yield - - @classmethod - def __subclasshook__(cls, C): - if cls is Awaitable: - return _check_methods(C, "__await__") - return NotImplemented - - -class Coroutine(Awaitable): - - __slots__ = () - - @abstractmethod - def send(self, value): - """Send a value into the coroutine. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the coroutine. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside coroutine. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("coroutine ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Coroutine: - return _check_methods(C, '__await__', 'send', 'throw', 'close') - return NotImplemented - - -Coroutine.register(coroutine) - - -class AsyncIterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __aiter__(self): - return AsyncIterator() - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterable: - return _check_methods(C, "__aiter__") - return NotImplemented - - -class AsyncIterator(AsyncIterable): - - __slots__ = () - - @abstractmethod - async def __anext__(self): - """Return the next item or raise StopAsyncIteration when exhausted.""" - raise StopAsyncIteration - - def __aiter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncIterator: - return _check_methods(C, "__anext__", "__aiter__") - return NotImplemented - - -class AsyncGenerator(AsyncIterator): - - __slots__ = () - - async def __anext__(self): - """Return the next item from the asynchronous generator. - When exhausted, raise StopAsyncIteration. - """ - return await self.asend(None) - - @abstractmethod - async def asend(self, value): - """Send a value into the asynchronous generator. - Return next yielded value or raise StopAsyncIteration. - """ - raise StopAsyncIteration - - @abstractmethod - async def athrow(self, typ, val=None, tb=None): - """Raise an exception in the asynchronous generator. - Return next yielded value or raise StopAsyncIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - async def aclose(self): - """Raise GeneratorExit inside coroutine. - """ - try: - await self.athrow(GeneratorExit) - except (GeneratorExit, StopAsyncIteration): - pass - else: - raise RuntimeError("asynchronous generator ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncGenerator: - return _check_methods(C, '__aiter__', '__anext__', - 'asend', 'athrow', 'aclose') - return NotImplemented - - -AsyncGenerator.register(async_generator) - - -class Iterable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __iter__(self): - while False: - yield None - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterable: - return _check_methods(C, "__iter__") - return NotImplemented - - -class Iterator(Iterable): - - __slots__ = () - - @abstractmethod - def __next__(self): - 'Return the next item from the iterator. When exhausted, raise StopIteration' - raise StopIteration - - def __iter__(self): - return self - - @classmethod - def __subclasshook__(cls, C): - if cls is Iterator: - return _check_methods(C, '__iter__', '__next__') - return NotImplemented - -Iterator.register(bytes_iterator) -Iterator.register(bytearray_iterator) -#Iterator.register(callable_iterator) -Iterator.register(dict_keyiterator) -Iterator.register(dict_valueiterator) -Iterator.register(dict_itemiterator) -Iterator.register(list_iterator) -Iterator.register(list_reverseiterator) -Iterator.register(range_iterator) -Iterator.register(longrange_iterator) -Iterator.register(set_iterator) -Iterator.register(str_iterator) -Iterator.register(tuple_iterator) -Iterator.register(zip_iterator) - - -class Reversible(Iterable): - - __slots__ = () - - @abstractmethod - def __reversed__(self): - while False: - yield None - - @classmethod - def __subclasshook__(cls, C): - if cls is Reversible: - return _check_methods(C, "__reversed__", "__iter__") - return NotImplemented - - -class Generator(Iterator): - - __slots__ = () - - def __next__(self): - """Return the next item from the generator. - When exhausted, raise StopIteration. - """ - return self.send(None) - - @abstractmethod - def send(self, value): - """Send a value into the generator. - Return next yielded value or raise StopIteration. - """ - raise StopIteration - - @abstractmethod - def throw(self, typ, val=None, tb=None): - """Raise an exception in the generator. - Return next yielded value or raise StopIteration. - """ - if val is None: - if tb is None: - raise typ - val = typ() - if tb is not None: - val = val.with_traceback(tb) - raise val - - def close(self): - """Raise GeneratorExit inside generator. - """ - try: - self.throw(GeneratorExit) - except (GeneratorExit, StopIteration): - pass - else: - raise RuntimeError("generator ignored GeneratorExit") - - @classmethod - def __subclasshook__(cls, C): - if cls is Generator: - return _check_methods(C, '__iter__', '__next__', - 'send', 'throw', 'close') - return NotImplemented - -Generator.register(generator) - - -class Sized(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __len__(self): - return 0 - - @classmethod - def __subclasshook__(cls, C): - if cls is Sized: - return _check_methods(C, "__len__") - return NotImplemented - - -class Container(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __contains__(self, x): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Container: - return _check_methods(C, "__contains__") - return NotImplemented - -class Collection(Sized, Iterable, Container): - - __slots__ = () - - @classmethod - def __subclasshook__(cls, C): - if cls is Collection: - return _check_methods(C, "__len__", "__iter__", "__contains__") - return NotImplemented - -class Callable(metaclass=ABCMeta): - - __slots__ = () - - @abstractmethod - def __call__(self, *args, **kwds): - return False - - @classmethod - def __subclasshook__(cls, C): - if cls is Callable: - return _check_methods(C, "__call__") - return NotImplemented - - -### SETS ### - - -class Set(Collection): - - """A set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__ and __len__. - - To override the comparisons (presumably for speed, as the - semantics are fixed), redefine __le__ and __ge__, - then the other operations will automatically follow suit. - """ - - __slots__ = () - - def __le__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) > len(other): - return False - for elem in self: - if elem not in other: - return False - return True - - def __lt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) < len(other) and self.__le__(other) - - def __gt__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) > len(other) and self.__ge__(other) - - def __ge__(self, other): - if not isinstance(other, Set): - return NotImplemented - if len(self) < len(other): - return False - for elem in other: - if elem not in self: - return False - return True - - def __eq__(self, other): - if not isinstance(other, Set): - return NotImplemented - return len(self) == len(other) and self.__le__(other) - - @classmethod - def _from_iterable(cls, it): - '''Construct an instance of the class from any iterable input. - - Must override this method if the class constructor signature - does not accept an iterable for an input. - ''' - return cls(it) - - def __and__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - return self._from_iterable(value for value in other if value in self) - - __rand__ = __and__ - - def isdisjoint(self, other): - 'Return True if two sets have a null intersection.' - for value in other: - if value in self: - return False - return True - - def __or__(self, other): - if not isinstance(other, Iterable): - return NotImplemented - chain = (e for s in (self, other) for e in s) - return self._from_iterable(chain) - - __ror__ = __or__ - - def __sub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in self - if value not in other) - - def __rsub__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return self._from_iterable(value for value in other - if value not in self) - - def __xor__(self, other): - if not isinstance(other, Set): - if not isinstance(other, Iterable): - return NotImplemented - other = self._from_iterable(other) - return (self - other) | (other - self) - - __rxor__ = __xor__ - - def _hash(self): - """Compute the hash value of a set. - - Note that we don't define __hash__: not all sets are hashable. - But if you define a hashable set type, its __hash__ should - call this function. - - This must be compatible __eq__. - - All sets ought to compare equal if they contain the same - elements, regardless of how they are implemented, and - regardless of the order of the elements; so there's not much - freedom for __eq__ or __hash__. We match the algorithm used - by the built-in frozenset type. - """ - MAX = sys.maxsize - MASK = 2 * MAX + 1 - n = len(self) - h = 1927868237 * (n + 1) - h &= MASK - for x in self: - hx = hash(x) - h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 - h &= MASK - h = h * 69069 + 907133923 - h &= MASK - if h > MAX: - h -= MASK + 1 - if h == -1: - h = 590923713 - return h - -Set.register(frozenset) - - -class MutableSet(Set): - """A mutable set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__, __len__, - add(), and discard(). - - To override the comparisons (presumably for speed, as the - semantics are fixed), all you have to do is redefine __le__ and - then the other operations will automatically follow suit. - """ - - __slots__ = () - - @abstractmethod - def add(self, value): - """Add an element.""" - raise NotImplementedError - - @abstractmethod - def discard(self, value): - """Remove an element. Do not raise an exception if absent.""" - raise NotImplementedError - - def remove(self, value): - """Remove an element. If not a member, raise a KeyError.""" - if value not in self: - raise KeyError(value) - self.discard(value) - - def pop(self): - """Return the popped value. Raise KeyError if empty.""" - it = iter(self) - try: - value = next(it) - except StopIteration: - raise KeyError - self.discard(value) - return value - - def clear(self): - """This is slow (creates N new iterators!) but effective.""" - try: - while True: - self.pop() - except KeyError: - pass - - def __ior__(self, it): - for value in it: - self.add(value) - return self - - def __iand__(self, it): - for value in (self - it): - self.discard(value) - return self - - def __ixor__(self, it): - if it is self: - self.clear() - else: - if not isinstance(it, Set): - it = self._from_iterable(it) - for value in it: - if value in self: - self.discard(value) - else: - self.add(value) - return self - - def __isub__(self, it): - if it is self: - self.clear() - else: - for value in it: - self.discard(value) - return self - -MutableSet.register(set) - - -### MAPPINGS ### - - -class Mapping(Collection): - - __slots__ = () - - """A Mapping is a generic container for associating key/value - pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __iter__, and __len__. - - """ - - @abstractmethod - def __getitem__(self, key): - raise KeyError - - def get(self, key, default=None): - 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' - try: - return self[key] - except KeyError: - return default - - def __contains__(self, key): - try: - self[key] - except KeyError: - return False - else: - return True - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return KeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return ItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return ValuesView(self) - - def __eq__(self, other): - if not isinstance(other, Mapping): - return NotImplemented - return dict(self.items()) == dict(other.items()) - - __reversed__ = None - -Mapping.register(mappingproxy) - - -class MappingView(Sized): - - __slots__ = '_mapping', - - def __init__(self, mapping): - self._mapping = mapping - - def __len__(self): - return len(self._mapping) - - def __repr__(self): - return '{0.__class__.__name__}({0._mapping!r})'.format(self) - - -class KeysView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, key): - return key in self._mapping - - def __iter__(self): - yield from self._mapping - -KeysView.register(dict_keys) - - -class ItemsView(MappingView, Set): - - __slots__ = () - - @classmethod - def _from_iterable(self, it): - return set(it) - - def __contains__(self, item): - key, value = item - try: - v = self._mapping[key] - except KeyError: - return False - else: - return v is value or v == value - - def __iter__(self): - for key in self._mapping: - yield (key, self._mapping[key]) - -ItemsView.register(dict_items) - - -class ValuesView(MappingView): - - __slots__ = () - - def __contains__(self, value): - for key in self._mapping: - v = self._mapping[key] - if v is value or v == value: - return True - return False - - def __iter__(self): - for key in self._mapping: - yield self._mapping[key] - -ValuesView.register(dict_values) - - -class MutableMapping(Mapping): - - __slots__ = () - - """A MutableMapping is a generic container for associating - key/value pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __setitem__, __delitem__, - __iter__, and __len__. - - """ - - @abstractmethod - def __setitem__(self, key, value): - raise KeyError - - @abstractmethod - def __delitem__(self, key): - raise KeyError - - __marker = object() - - def pop(self, key, default=__marker): - '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - ''' - try: - value = self[key] - except KeyError: - if default is self.__marker: - raise - return default - else: - del self[key] - return value - - def popitem(self): - '''D.popitem() -> (k, v), remove and return some (key, value) pair - as a 2-tuple; but raise KeyError if D is empty. - ''' - try: - key = next(iter(self)) - except StopIteration: - raise KeyError - value = self[key] - del self[key] - return key, value - - def clear(self): - 'D.clear() -> None. Remove all items from D.' - try: - while True: - self.popitem() - except KeyError: - pass - - def update(*args, **kwds): - ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. - If E present and has a .keys() method, does: for k in E: D[k] = E[k] - If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v - In either case, this is followed by: for k, v in F.items(): D[k] = v - ''' - if not args: - raise TypeError("descriptor 'update' of 'MutableMapping' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('update expected at most 1 arguments, got %d' % - len(args)) - if args: - other = args[0] - if isinstance(other, Mapping): - for key in other: - self[key] = other[key] - elif hasattr(other, "keys"): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - def setdefault(self, key, default=None): - 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' - try: - return self[key] - except KeyError: - self[key] = default - return default - -MutableMapping.register(dict) - - -### SEQUENCES ### - - -class Sequence(Reversible, Collection): - - """All the operations on a read-only sequence. - - Concrete subclasses must override __new__ or __init__, - __getitem__, and __len__. - """ - - __slots__ = () - - @abstractmethod - def __getitem__(self, index): - raise IndexError - - def __iter__(self): - i = 0 - try: - while True: - v = self[i] - yield v - i += 1 - except IndexError: - return - - def __contains__(self, value): - for v in self: - if v is value or v == value: - return True - return False - - def __reversed__(self): - for i in reversed(range(len(self))): - yield self[i] - - def index(self, value, start=0, stop=None): - '''S.index(value, [start, [stop]]) -> integer -- return first index of value. - Raises ValueError if the value is not present. - ''' - if start is not None and start < 0: - start = max(len(self) + start, 0) - if stop is not None and stop < 0: - stop += len(self) - - i = start - while stop is None or i < stop: - try: - if self[i] == value: - return i - except IndexError: - break - i += 1 - raise ValueError - - def count(self, value): - 'S.count(value) -> integer -- return number of occurrences of value' - return sum(1 for v in self if v == value) - -Sequence.register(tuple) -Sequence.register(str) -Sequence.register(range) -Sequence.register(memoryview) - - -class ByteString(Sequence): - - """This unifies bytes and bytearray. - - XXX Should add all their methods. - """ - - __slots__ = () - -ByteString.register(bytes) -ByteString.register(bytearray) - - -class MutableSequence(Sequence): - - __slots__ = () - - """All the operations on a read-write sequence. - - Concrete subclasses must provide __new__ or __init__, - __getitem__, __setitem__, __delitem__, __len__, and insert(). - - """ - - @abstractmethod - def __setitem__(self, index, value): - raise IndexError - - @abstractmethod - def __delitem__(self, index): - raise IndexError - - @abstractmethod - def insert(self, index, value): - 'S.insert(index, value) -- insert value before index' - raise IndexError - - def append(self, value): - 'S.append(value) -- append value to the end of the sequence' - self.insert(len(self), value) - - def clear(self): - 'S.clear() -> None -- remove all items from S' - try: - while True: - self.pop() - except IndexError: - pass - - def reverse(self): - 'S.reverse() -- reverse *IN PLACE*' - n = len(self) - for i in range(n//2): - self[i], self[n-i-1] = self[n-i-1], self[i] - - def extend(self, values): - 'S.extend(iterable) -- extend sequence by appending elements from the iterable' - for v in values: - self.append(v) - - def pop(self, index=-1): - '''S.pop([index]) -> item -- remove and return item at index (default last). - Raise IndexError if list is empty or index is out of range. - ''' - v = self[index] - del self[index] - return v - - def remove(self, value): - '''S.remove(value) -- remove first occurrence of value. - Raise ValueError if the value is not present. - ''' - del self[self.index(value)] - - def __iadd__(self, values): - self.extend(values) - return self - -MutableSequence.register(list) -MutableSequence.register(bytearray) # Multiply inheriting, see ByteString diff --git a/typing_extensions/test_data/python-3.6.1/abc.py b/typing_extensions/test_data/python-3.6.1/abc.py deleted file mode 100644 index 1cbf96a6..00000000 --- a/typing_extensions/test_data/python-3.6.1/abc.py +++ /dev/null @@ -1,248 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Abstract Base Classes (ABCs) according to PEP 3119.""" - -from _weakrefset import WeakSet - - -def abstractmethod(funcobj): - """A decorator indicating abstract methods. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract methods are overridden. - The abstract methods can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractmethod - def my_abstract_method(self, ...): - ... - """ - funcobj.__isabstractmethod__ = True - return funcobj - - -class abstractclassmethod(classmethod): - """ - A decorator indicating abstract classmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractclassmethod - def my_abstract_classmethod(cls, ...): - ... - - 'abstractclassmethod' is deprecated. Use 'classmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractstaticmethod(staticmethod): - """ - A decorator indicating abstract staticmethods. - - Similar to abstractmethod. - - Usage: - - class C(metaclass=ABCMeta): - @abstractstaticmethod - def my_abstract_staticmethod(...): - ... - - 'abstractstaticmethod' is deprecated. Use 'staticmethod' with - 'abstractmethod' instead. - """ - - __isabstractmethod__ = True - - def __init__(self, callable): - callable.__isabstractmethod__ = True - super().__init__(callable) - - -class abstractproperty(property): - """ - A decorator indicating abstract properties. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract properties are overridden. - The abstract properties can be called using any of the normal - 'super' call mechanisms. - - Usage: - - class C(metaclass=ABCMeta): - @abstractproperty - def my_abstract_property(self): - ... - - This defines a read-only property; you can also define a read-write - abstract property using the 'long' form of property declaration: - - class C(metaclass=ABCMeta): - def getx(self): ... - def setx(self, value): ... - x = abstractproperty(getx, setx) - - 'abstractproperty' is deprecated. Use 'property' with 'abstractmethod' - instead. - """ - - __isabstractmethod__ = True - - -class ABCMeta(type): - - """Metaclass for defining Abstract Base Classes (ABCs). - - Use this metaclass to create an ABC. An ABC can be subclassed - directly, and then acts as a mix-in class. You can also register - unrelated concrete classes (even built-in classes) and unrelated - ABCs as 'virtual subclasses' -- these and their descendants will - be considered subclasses of the registering ABC by the built-in - issubclass() function, but the registering ABC won't show up in - their MRO (Method Resolution Order) nor will method - implementations defined by the registering ABC be callable (not - even via super()). - - """ - - # A global counter that is incremented each time a class is - # registered as a virtual subclass of anything. It forces the - # negative cache to be cleared before its next use. - # Note: this counter is private. Use `abc.get_cache_token()` for - # external code. - _abc_invalidation_counter = 0 - - def __new__(mcls, name, bases, namespace): - cls = super().__new__(mcls, name, bases, namespace) - # Compute set of abstract method names - abstracts = {name - for name, value in namespace.items() - if getattr(value, "__isabstractmethod__", False)} - for base in bases: - for name in getattr(base, "__abstractmethods__", set()): - value = getattr(cls, name, None) - if getattr(value, "__isabstractmethod__", False): - abstracts.add(name) - cls.__abstractmethods__ = frozenset(abstracts) - # Set up inheritance registry - cls._abc_registry = WeakSet() - cls._abc_cache = WeakSet() - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - return cls - - def register(cls, subclass): - """Register a virtual subclass of an ABC. - - Returns the subclass, to allow usage as a class decorator. - """ - if not isinstance(subclass, type): - raise TypeError("Can only register classes") - if issubclass(subclass, cls): - return subclass # Already a subclass - # Subtle: test for cycles *after* testing for "already a subclass"; - # this means we allow X.register(X) and interpret it as a no-op. - if issubclass(cls, subclass): - # This would create a cycle, which is bad for the algorithm below - raise RuntimeError("Refusing to create an inheritance cycle") - cls._abc_registry.add(subclass) - ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache - return subclass - - def _dump_registry(cls, file=None): - """Debug helper to print the ABC registry.""" - print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) - print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) - for name in sorted(cls.__dict__.keys()): - if name.startswith("_abc_"): - value = getattr(cls, name) - print("%s: %r" % (name, value), file=file) - - def __instancecheck__(cls, instance): - """Override for isinstance(instance, cls).""" - # Inline the cache checking - subclass = instance.__class__ - if subclass in cls._abc_cache: - return True - subtype = type(instance) - if subtype is subclass: - if (cls._abc_negative_cache_version == - ABCMeta._abc_invalidation_counter and - subclass in cls._abc_negative_cache): - return False - # Fall back to the subclass check. - return cls.__subclasscheck__(subclass) - return any(cls.__subclasscheck__(c) for c in {subclass, subtype}) - - def __subclasscheck__(cls, subclass): - """Override for issubclass(subclass, cls).""" - # Check cache - if subclass in cls._abc_cache: - return True - # Check negative cache; may have to invalidate - if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter: - # Invalidate the negative cache - cls._abc_negative_cache = WeakSet() - cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter - elif subclass in cls._abc_negative_cache: - return False - # Check the subclass hook - ok = cls.__subclasshook__(subclass) - if ok is not NotImplemented: - assert isinstance(ok, bool) - if ok: - cls._abc_cache.add(subclass) - else: - cls._abc_negative_cache.add(subclass) - return ok - # Check if it's a direct subclass - if cls in getattr(subclass, '__mro__', ()): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a registered class (recursive) - for rcls in cls._abc_registry: - if issubclass(subclass, rcls): - cls._abc_cache.add(subclass) - return True - # Check if it's a subclass of a subclass (recursive) - for scls in cls.__subclasses__(): - if issubclass(subclass, scls): - cls._abc_cache.add(subclass) - return True - # No dice; update negative cache - cls._abc_negative_cache.add(subclass) - return False - - -class ABC(metaclass=ABCMeta): - """Helper class that provides a standard way to create an ABC using - inheritance. - """ - pass - - -def get_cache_token(): - """Returns the current ABC cache token. - - The token is an opaque object (supporting equality testing) identifying the - current version of the ABC cache for virtual subclasses. The token changes - with every call to ``register()`` on any ABC. - """ - return ABCMeta._abc_invalidation_counter diff --git a/typing_extensions/test_data/python-3.6.1/collections/__init__.py b/typing_extensions/test_data/python-3.6.1/collections/__init__.py deleted file mode 100644 index 85b4c3c1..00000000 --- a/typing_extensions/test_data/python-3.6.1/collections/__init__.py +++ /dev/null @@ -1,1246 +0,0 @@ -'''This module implements specialized container datatypes providing -alternatives to Python's general purpose built-in containers, dict, -list, set, and tuple. - -* namedtuple factory function for creating tuple subclasses with named fields -* deque list-like container with fast appends and pops on either end -* ChainMap dict-like class for creating a single view of multiple mappings -* Counter dict subclass for counting hashable objects -* OrderedDict dict subclass that remembers the order entries were added -* defaultdict dict subclass that calls a factory function to supply missing values -* UserDict wrapper around dictionary objects for easier dict subclassing -* UserList wrapper around list objects for easier list subclassing -* UserString wrapper around string objects for easier string subclassing - -''' - -__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', - 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] - -# For backwards compatibility, continue to make the collections ABCs -# available through the collections module. -from _collections_abc import * -import _collections_abc -__all__ += _collections_abc.__all__ - -from operator import itemgetter as _itemgetter, eq as _eq -from keyword import iskeyword as _iskeyword -import sys as _sys -import heapq as _heapq -from _weakref import proxy as _proxy -from itertools import repeat as _repeat, chain as _chain, starmap as _starmap -from reprlib import recursive_repr as _recursive_repr - -try: - from _collections import deque -except ImportError: - pass -else: - MutableSequence.register(deque) - -try: - from _collections import defaultdict -except ImportError: - pass - - -################################################################################ -### OrderedDict -################################################################################ - -class _OrderedDictKeysView(KeysView): - - def __reversed__(self): - yield from reversed(self._mapping) - -class _OrderedDictItemsView(ItemsView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield (key, self._mapping[key]) - -class _OrderedDictValuesView(ValuesView): - - def __reversed__(self): - for key in reversed(self._mapping): - yield self._mapping[key] - -class _Link(object): - __slots__ = 'prev', 'next', 'key', '__weakref__' - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as regular dictionaries. - - # The internal self.__map dict maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # The sentinel is in self.__hardroot with a weakref proxy in self.__root. - # The prev links are weakref proxies (to prevent circular references). - # Individual links are kept alive by the hard reference in self.__map. - # Those hard references disappear when a key is deleted from an OrderedDict. - - def __init__(*args, **kwds): - '''Initialize an ordered dictionary. The signature is the same as - regular dictionaries, but keyword arguments are not recommended because - their insertion order is arbitrary. - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'OrderedDict' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__hardroot = _Link() - self.__root = root = _proxy(self.__hardroot) - root.prev = root.next = root - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, - dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link at the end of the linked list, - # and the inherited dictionary is updated with the new key/value pair. - if key not in self: - self.__map[key] = link = Link() - root = self.__root - last = root.prev - link.prev, link.next, link.key = last, root, key - last.next = link - root.prev = proxy(link) - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which gets - # removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link = self.__map.pop(key) - link_prev = link.prev - link_next = link.next - link_prev.next = link_next - link_next.prev = link_prev - link.prev = None - link.next = None - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - # Traverse the linked list in order. - root = self.__root - curr = root.next - while curr is not root: - yield curr.key - curr = curr.next - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - # Traverse the linked list in reverse order. - root = self.__root - curr = root.prev - while curr is not root: - yield curr.key - curr = curr.prev - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - root = self.__root - root.prev = root.next = root - self.__map.clear() - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root.prev - link_prev = link.prev - link_prev.next = root - root.prev = link_prev - else: - link = root.next - link_next = link.next - root.next = link_next - link_next.prev = root - key = link.key - del self.__map[key] - value = dict.pop(self, key) - return key, value - - def move_to_end(self, key, last=True): - '''Move an existing element to the end (or beginning if last==False). - - Raises KeyError if the element does not exist. - When last=True, acts like a fast version of self[key]=self.pop(key). - - ''' - link = self.__map[key] - link_prev = link.prev - link_next = link.next - soft_link = link_next.prev - link_prev.next = link_next - link_next.prev = link_prev - root = self.__root - if last: - last = root.prev - link.prev = last - link.next = root - root.prev = soft_link - last.next = link - else: - first = root.next - link.prev = root - link.next = first - first.prev = soft_link - root.next = link - - def __sizeof__(self): - sizeof = _sys.getsizeof - n = len(self) + 1 # number of links including root - size = sizeof(self.__dict__) # instance dictionary - size += sizeof(self.__map) * 2 # internal dict and inherited dict - size += sizeof(self.__hardroot) * n # link objects - size += sizeof(self.__root) * n # proxy objects - return size - - update = __update = MutableMapping.update - - def keys(self): - "D.keys() -> a set-like object providing a view on D's keys" - return _OrderedDictKeysView(self) - - def items(self): - "D.items() -> a set-like object providing a view on D's items" - return _OrderedDictItemsView(self) - - def values(self): - "D.values() -> an object providing a view on D's values" - return _OrderedDictValuesView(self) - - __ne__ = MutableMapping.__ne__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding - value. If key is not found, d is returned if given, otherwise KeyError - is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - @_recursive_repr() - def __repr__(self): - 'od.__repr__() <==> repr(od)' - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, list(self.items())) - - def __reduce__(self): - 'Return state information for pickling' - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - return self.__class__, (), inst_dict or None, None, iter(self.items()) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. - If not specified, the value defaults to None. - - ''' - self = cls() - for key in iterable: - self[key] = value - return self - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return dict.__eq__(self, other) and all(map(_eq, self, other)) - return dict.__eq__(self, other) - - -try: - from _collections import OrderedDict -except ImportError: - # Leave the pure Python version in place. - pass - - -################################################################################ -### namedtuple -################################################################################ - -_class_template = """\ -from builtins import property as _property, tuple as _tuple -from operator import itemgetter as _itemgetter -from collections import OrderedDict - -class {typename}(tuple): - '{typename}({arg_list})' - - __slots__ = () - - _fields = {field_names!r} - - def __new__(_cls, {arg_list}): - 'Create new instance of {typename}({arg_list})' - return _tuple.__new__(_cls, ({arg_list})) - - @classmethod - def _make(cls, iterable, new=tuple.__new__, len=len): - 'Make a new {typename} object from a sequence or iterable' - result = new(cls, iterable) - if len(result) != {num_fields:d}: - raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) - return result - - def _replace(_self, **kwds): - 'Return a new {typename} object replacing specified fields with new values' - result = _self._make(map(kwds.pop, {field_names!r}, _self)) - if kwds: - raise ValueError('Got unexpected field names: %r' % list(kwds)) - return result - - def __repr__(self): - 'Return a nicely formatted representation string' - return self.__class__.__name__ + '({repr_fmt})' % self - - def _asdict(self): - 'Return a new OrderedDict which maps field names to their values.' - return OrderedDict(zip(self._fields, self)) - - def __getnewargs__(self): - 'Return self as a plain tuple. Used by copy and pickle.' - return tuple(self) - -{field_defs} -""" - -_repr_template = '{name}=%r' - -_field_template = '''\ - {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') -''' - -def namedtuple(typename, field_names, *, verbose=False, rename=False, module=None): - """Returns a new subclass of tuple with named fields. - - >>> Point = namedtuple('Point', ['x', 'y']) - >>> Point.__doc__ # docstring for the new class - 'Point(x, y)' - >>> p = Point(11, y=22) # instantiate with positional args or keywords - >>> p[0] + p[1] # indexable like a plain tuple - 33 - >>> x, y = p # unpack like a regular tuple - >>> x, y - (11, 22) - >>> p.x + p.y # fields also accessible by name - 33 - >>> d = p._asdict() # convert to a dictionary - >>> d['x'] - 11 - >>> Point(**d) # convert from a dictionary - Point(x=11, y=22) - >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields - Point(x=100, y=22) - - """ - - # Validate the field names. At the user's option, either generate an error - # message or automatically replace the field name with a valid name. - if isinstance(field_names, str): - field_names = field_names.replace(',', ' ').split() - field_names = list(map(str, field_names)) - typename = str(typename) - if rename: - seen = set() - for index, name in enumerate(field_names): - if (not name.isidentifier() - or _iskeyword(name) - or name.startswith('_') - or name in seen): - field_names[index] = '_%d' % index - seen.add(name) - for name in [typename] + field_names: - if type(name) is not str: - raise TypeError('Type names and field names must be strings') - if not name.isidentifier(): - raise ValueError('Type names and field names must be valid ' - 'identifiers: %r' % name) - if _iskeyword(name): - raise ValueError('Type names and field names cannot be a ' - 'keyword: %r' % name) - seen = set() - for name in field_names: - if name.startswith('_') and not rename: - raise ValueError('Field names cannot start with an underscore: ' - '%r' % name) - if name in seen: - raise ValueError('Encountered duplicate field name: %r' % name) - seen.add(name) - - # Fill-in the class template - class_definition = _class_template.format( - typename = typename, - field_names = tuple(field_names), - num_fields = len(field_names), - arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], - repr_fmt = ', '.join(_repr_template.format(name=name) - for name in field_names), - field_defs = '\n'.join(_field_template.format(index=index, name=name) - for index, name in enumerate(field_names)) - ) - - # Execute the template string in a temporary namespace and support - # tracing utilities by setting a value for frame.f_globals['__name__'] - namespace = dict(__name__='namedtuple_%s' % typename) - exec(class_definition, namespace) - result = namespace[typename] - result._source = class_definition - if verbose: - print(result._source) - - # For pickling to work, the __module__ variable needs to be set to the frame - # where the named tuple is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython), or where the user has - # specified a particular module. - if module is None: - try: - module = _sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - if module is not None: - result.__module__ = module - - return result - - -######################################################################## -### Counter -######################################################################## - -def _count_elements(mapping, iterable): - 'Tally elements from the iterable.' - mapping_get = mapping.get - for elem in iterable: - mapping[elem] = mapping_get(elem, 0) + 1 - -try: # Load C helper function if available - from _collections import _count_elements -except ImportError: - pass - -class Counter(dict): - '''Dict subclass for counting hashable items. Sometimes called a bag - or multiset. Elements are stored as dictionary keys and their counts - are stored as dictionary values. - - >>> c = Counter('abcdeabcdabcaba') # count elements from a string - - >>> c.most_common(3) # three most common elements - [('a', 5), ('b', 4), ('c', 3)] - >>> sorted(c) # list all unique elements - ['a', 'b', 'c', 'd', 'e'] - >>> ''.join(sorted(c.elements())) # list elements with repetitions - 'aaaaabbbbcccdde' - >>> sum(c.values()) # total of all counts - 15 - - >>> c['a'] # count of letter 'a' - 5 - >>> for elem in 'shazam': # update counts from an iterable - ... c[elem] += 1 # by adding 1 to each element's count - >>> c['a'] # now there are seven 'a' - 7 - >>> del c['b'] # remove all 'b' - >>> c['b'] # now there are zero 'b' - 0 - - >>> d = Counter('simsalabim') # make another counter - >>> c.update(d) # add in the second counter - >>> c['a'] # now there are nine 'a' - 9 - - >>> c.clear() # empty the counter - >>> c - Counter() - - Note: If a count is set to zero or reduced to zero, it will remain - in the counter until the entry is deleted or the counter is cleared: - - >>> c = Counter('aaabbc') - >>> c['b'] -= 2 # reduce the count of 'b' by two - >>> c.most_common() # 'b' is still in, but its count is zero - [('a', 3), ('c', 1), ('b', 0)] - - ''' - # References: - # http://en.wikipedia.org/wiki/Multiset - # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html - # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm - # http://code.activestate.com/recipes/259174/ - # Knuth, TAOCP Vol. II section 4.6.3 - - def __init__(*args, **kwds): - '''Create a new, empty Counter object. And if given, count elements - from an input iterable. Or, initialize the count from another mapping - of elements to their counts. - - >>> c = Counter() # a new, empty counter - >>> c = Counter('gallahad') # a new counter from an iterable - >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping - >>> c = Counter(a=4, b=2) # a new counter from keyword args - - ''' - if not args: - raise TypeError("descriptor '__init__' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - super(Counter, self).__init__() - self.update(*args, **kwds) - - def __missing__(self, key): - 'The count of elements not in the Counter is zero.' - # Needed so that self[missing_item] does not raise KeyError - return 0 - - def most_common(self, n=None): - '''List the n most common elements and their counts from the most - common to the least. If n is None, then list all element counts. - - >>> Counter('abcdeabcdabcaba').most_common(3) - [('a', 5), ('b', 4), ('c', 3)] - - ''' - # Emulate Bag.sortedByCount from Smalltalk - if n is None: - return sorted(self.items(), key=_itemgetter(1), reverse=True) - return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) - - def elements(self): - '''Iterator over elements repeating each as many times as its count. - - >>> c = Counter('ABCABC') - >>> sorted(c.elements()) - ['A', 'A', 'B', 'B', 'C', 'C'] - - # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 - >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) - >>> product = 1 - >>> for factor in prime_factors.elements(): # loop over factors - ... product *= factor # and multiply them - >>> product - 1836 - - Note, if an element's count has been set to zero or is a negative - number, elements() will ignore it. - - ''' - # Emulate Bag.do from Smalltalk and Multiset.begin from C++. - return _chain.from_iterable(_starmap(_repeat, self.items())) - - # Override dict methods where necessary - - @classmethod - def fromkeys(cls, iterable, v=None): - # There is no equivalent method for counters because setting v=1 - # means that no element can have a count greater than one. - raise NotImplementedError( - 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') - - def update(*args, **kwds): - '''Like dict.update() but add counts instead of replacing them. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.update('witch') # add elements from another iterable - >>> d = Counter('watch') - >>> c.update(d) # add elements from another counter - >>> c['h'] # four 'h' in which, witch, and watch - 4 - - ''' - # The regular dict.update() operation makes no sense here because the - # replace behavior results in the some of original untouched counts - # being mixed-in with all of the other counts for a mismash that - # doesn't have a straight-forward interpretation in most counting - # contexts. Instead, we implement straight-addition. Both the inputs - # and outputs are allowed to contain zero and negative counts. - - if not args: - raise TypeError("descriptor 'update' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - if isinstance(iterable, Mapping): - if self: - self_get = self.get - for elem, count in iterable.items(): - self[elem] = count + self_get(elem, 0) - else: - super(Counter, self).update(iterable) # fast path when counter is empty - else: - _count_elements(self, iterable) - if kwds: - self.update(kwds) - - def subtract(*args, **kwds): - '''Like dict.update() but subtracts counts instead of replacing them. - Counts can be reduced below zero. Both the inputs and outputs are - allowed to contain zero and negative counts. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.subtract('witch') # subtract elements from another iterable - >>> c.subtract(Counter('watch')) # subtract elements from another counter - >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch - 0 - >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch - -1 - - ''' - if not args: - raise TypeError("descriptor 'subtract' of 'Counter' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - iterable = args[0] if args else None - if iterable is not None: - self_get = self.get - if isinstance(iterable, Mapping): - for elem, count in iterable.items(): - self[elem] = self_get(elem, 0) - count - else: - for elem in iterable: - self[elem] = self_get(elem, 0) - 1 - if kwds: - self.subtract(kwds) - - def copy(self): - 'Return a shallow copy.' - return self.__class__(self) - - def __reduce__(self): - return self.__class__, (dict(self),) - - def __delitem__(self, elem): - 'Like dict.__delitem__() but does not raise KeyError for missing values.' - if elem in self: - super().__delitem__(elem) - - def __repr__(self): - if not self: - return '%s()' % self.__class__.__name__ - try: - items = ', '.join(map('%r: %r'.__mod__, self.most_common())) - return '%s({%s})' % (self.__class__.__name__, items) - except TypeError: - # handle case where values are not orderable - return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) - - # Multiset-style mathematical operations discussed in: - # Knuth TAOCP Volume II section 4.6.3 exercise 19 - # and at http://en.wikipedia.org/wiki/Multiset - # - # Outputs guaranteed to only include positive counts. - # - # To strip negative and zero counts, add-in an empty counter: - # c += Counter() - - def __add__(self, other): - '''Add counts from two counters. - - >>> Counter('abbb') + Counter('bcc') - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count + other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __sub__(self, other): - ''' Subtract count, but keep only results with positive counts. - - >>> Counter('abbbc') - Counter('bccd') - Counter({'b': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - newcount = count - other[elem] - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count < 0: - result[elem] = 0 - count - return result - - def __or__(self, other): - '''Union is the maximum of value in either of the input counters. - - >>> Counter('abbb') | Counter('bcc') - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = other_count if count < other_count else count - if newcount > 0: - result[elem] = newcount - for elem, count in other.items(): - if elem not in self and count > 0: - result[elem] = count - return result - - def __and__(self, other): - ''' Intersection is the minimum of corresponding counts. - - >>> Counter('abbb') & Counter('bcc') - Counter({'b': 1}) - - ''' - if not isinstance(other, Counter): - return NotImplemented - result = Counter() - for elem, count in self.items(): - other_count = other[elem] - newcount = count if count < other_count else other_count - if newcount > 0: - result[elem] = newcount - return result - - def __pos__(self): - 'Adds an empty counter, effectively stripping negative and zero counts' - result = Counter() - for elem, count in self.items(): - if count > 0: - result[elem] = count - return result - - def __neg__(self): - '''Subtracts from an empty counter. Strips positive and zero counts, - and flips the sign on negative counts. - - ''' - result = Counter() - for elem, count in self.items(): - if count < 0: - result[elem] = 0 - count - return result - - def _keep_positive(self): - '''Internal method to strip elements with a negative or zero count''' - nonpositive = [elem for elem, count in self.items() if not count > 0] - for elem in nonpositive: - del self[elem] - return self - - def __iadd__(self, other): - '''Inplace add from another counter, keeping only positive counts. - - >>> c = Counter('abbb') - >>> c += Counter('bcc') - >>> c - Counter({'b': 4, 'c': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] += count - return self._keep_positive() - - def __isub__(self, other): - '''Inplace subtract counter, but keep only results with positive counts. - - >>> c = Counter('abbbc') - >>> c -= Counter('bccd') - >>> c - Counter({'b': 2, 'a': 1}) - - ''' - for elem, count in other.items(): - self[elem] -= count - return self._keep_positive() - - def __ior__(self, other): - '''Inplace union is the maximum of value from either counter. - - >>> c = Counter('abbb') - >>> c |= Counter('bcc') - >>> c - Counter({'b': 3, 'c': 2, 'a': 1}) - - ''' - for elem, other_count in other.items(): - count = self[elem] - if other_count > count: - self[elem] = other_count - return self._keep_positive() - - def __iand__(self, other): - '''Inplace intersection is the minimum of corresponding counts. - - >>> c = Counter('abbb') - >>> c &= Counter('bcc') - >>> c - Counter({'b': 1}) - - ''' - for elem, count in self.items(): - other_count = other[elem] - if other_count < count: - self[elem] = other_count - return self._keep_positive() - - -######################################################################## -### ChainMap -######################################################################## - -class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together - to create a single, updateable view. - - The underlying mappings are stored in a list. That list is public and can - be accessed or updated using the *maps* attribute. There is no other - state. - - Lookups search the underlying mappings successively until a key is found. - In contrast, writes, updates, and deletions only operate on the first - mapping. - - ''' - - def __init__(self, *maps): - '''Initialize a ChainMap by setting *maps* to the given mappings. - If no mappings are provided, a single empty dictionary is used. - - ''' - self.maps = list(maps) or [{}] # always at least one map - - def __missing__(self, key): - raise KeyError(key) - - def __getitem__(self, key): - for mapping in self.maps: - try: - return mapping[key] # can't use 'key in mapping' with defaultdict - except KeyError: - pass - return self.__missing__(key) # support subclasses that define __missing__ - - def get(self, key, default=None): - return self[key] if key in self else default - - def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible - - def __iter__(self): - return iter(set().union(*self.maps)) - - def __contains__(self, key): - return any(key in m for m in self.maps) - - def __bool__(self): - return any(self.maps) - - @_recursive_repr() - def __repr__(self): - return '{0.__class__.__name__}({1})'.format( - self, ', '.join(map(repr, self.maps))) - - @classmethod - def fromkeys(cls, iterable, *args): - 'Create a ChainMap with a single dict created from the iterable.' - return cls(dict.fromkeys(iterable, *args)) - - def copy(self): - 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' - return self.__class__(self.maps[0].copy(), *self.maps[1:]) - - __copy__ = copy - - def new_child(self, m=None): # like Django's Context.push() - '''New ChainMap with a new map followed by all previous maps. - If no map is provided, an empty dict is used. - ''' - if m is None: - m = {} - return self.__class__(m, *self.maps) - - @property - def parents(self): # like Django's Context.pop() - 'New ChainMap from maps[1:].' - return self.__class__(*self.maps[1:]) - - def __setitem__(self, key, value): - self.maps[0][key] = value - - def __delitem__(self, key): - try: - del self.maps[0][key] - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def popitem(self): - 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' - try: - return self.maps[0].popitem() - except KeyError: - raise KeyError('No keys found in the first mapping.') - - def pop(self, key, *args): - 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' - try: - return self.maps[0].pop(key, *args) - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def clear(self): - 'Clear maps[0], leaving maps[1:] intact.' - self.maps[0].clear() - - -################################################################################ -### UserDict -################################################################################ - -class UserDict(MutableMapping): - - # Start by filling-out the abstract methods - def __init__(*args, **kwargs): - if not args: - raise TypeError("descriptor '__init__' of 'UserDict' object " - "needs an argument") - self, *args = args - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - if args: - dict = args[0] - elif 'dict' in kwargs: - dict = kwargs.pop('dict') - import warnings - warnings.warn("Passing 'dict' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) - else: - dict = None - self.data = {} - if dict is not None: - self.update(dict) - if len(kwargs): - self.update(kwargs) - def __len__(self): return len(self.data) - def __getitem__(self, key): - if key in self.data: - return self.data[key] - if hasattr(self.__class__, "__missing__"): - return self.__class__.__missing__(self, key) - raise KeyError(key) - def __setitem__(self, key, item): self.data[key] = item - def __delitem__(self, key): del self.data[key] - def __iter__(self): - return iter(self.data) - - # Modify __contains__ to work correctly when __missing__ is present - def __contains__(self, key): - return key in self.data - - # Now, add the methods in dicts but not in MutableMapping - def __repr__(self): return repr(self.data) - def copy(self): - if self.__class__ is UserDict: - return UserDict(self.data.copy()) - import copy - data = self.data - try: - self.data = {} - c = copy.copy(self) - finally: - self.data = data - c.update(self) - return c - @classmethod - def fromkeys(cls, iterable, value=None): - d = cls() - for key in iterable: - d[key] = value - return d - - - -################################################################################ -### UserList -################################################################################ - -class UserList(MutableSequence): - """A more or less complete user-defined wrapper around list objects.""" - def __init__(self, initlist=None): - self.data = [] - if initlist is not None: - # XXX should this accept an arbitrary sequence? - if type(initlist) == type(self.data): - self.data[:] = initlist - elif isinstance(initlist, UserList): - self.data[:] = initlist.data[:] - else: - self.data = list(initlist) - def __repr__(self): return repr(self.data) - def __lt__(self, other): return self.data < self.__cast(other) - def __le__(self, other): return self.data <= self.__cast(other) - def __eq__(self, other): return self.data == self.__cast(other) - def __gt__(self, other): return self.data > self.__cast(other) - def __ge__(self, other): return self.data >= self.__cast(other) - def __cast(self, other): - return other.data if isinstance(other, UserList) else other - def __contains__(self, item): return item in self.data - def __len__(self): return len(self.data) - def __getitem__(self, i): return self.data[i] - def __setitem__(self, i, item): self.data[i] = item - def __delitem__(self, i): del self.data[i] - def __add__(self, other): - if isinstance(other, UserList): - return self.__class__(self.data + other.data) - elif isinstance(other, type(self.data)): - return self.__class__(self.data + other) - return self.__class__(self.data + list(other)) - def __radd__(self, other): - if isinstance(other, UserList): - return self.__class__(other.data + self.data) - elif isinstance(other, type(self.data)): - return self.__class__(other + self.data) - return self.__class__(list(other) + self.data) - def __iadd__(self, other): - if isinstance(other, UserList): - self.data += other.data - elif isinstance(other, type(self.data)): - self.data += other - else: - self.data += list(other) - return self - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __imul__(self, n): - self.data *= n - return self - def append(self, item): self.data.append(item) - def insert(self, i, item): self.data.insert(i, item) - def pop(self, i=-1): return self.data.pop(i) - def remove(self, item): self.data.remove(item) - def clear(self): self.data.clear() - def copy(self): return self.__class__(self) - def count(self, item): return self.data.count(item) - def index(self, item, *args): return self.data.index(item, *args) - def reverse(self): self.data.reverse() - def sort(self, *args, **kwds): self.data.sort(*args, **kwds) - def extend(self, other): - if isinstance(other, UserList): - self.data.extend(other.data) - else: - self.data.extend(other) - - - -################################################################################ -### UserString -################################################################################ - -class UserString(Sequence): - def __init__(self, seq): - if isinstance(seq, str): - self.data = seq - elif isinstance(seq, UserString): - self.data = seq.data[:] - else: - self.data = str(seq) - def __str__(self): return str(self.data) - def __repr__(self): return repr(self.data) - def __int__(self): return int(self.data) - def __float__(self): return float(self.data) - def __complex__(self): return complex(self.data) - def __hash__(self): return hash(self.data) - def __getnewargs__(self): - return (self.data[:],) - - def __eq__(self, string): - if isinstance(string, UserString): - return self.data == string.data - return self.data == string - def __lt__(self, string): - if isinstance(string, UserString): - return self.data < string.data - return self.data < string - def __le__(self, string): - if isinstance(string, UserString): - return self.data <= string.data - return self.data <= string - def __gt__(self, string): - if isinstance(string, UserString): - return self.data > string.data - return self.data > string - def __ge__(self, string): - if isinstance(string, UserString): - return self.data >= string.data - return self.data >= string - - def __contains__(self, char): - if isinstance(char, UserString): - char = char.data - return char in self.data - - def __len__(self): return len(self.data) - def __getitem__(self, index): return self.__class__(self.data[index]) - def __add__(self, other): - if isinstance(other, UserString): - return self.__class__(self.data + other.data) - elif isinstance(other, str): - return self.__class__(self.data + other) - return self.__class__(self.data + str(other)) - def __radd__(self, other): - if isinstance(other, str): - return self.__class__(other + self.data) - return self.__class__(str(other) + self.data) - def __mul__(self, n): - return self.__class__(self.data*n) - __rmul__ = __mul__ - def __mod__(self, args): - return self.__class__(self.data % args) - def __rmod__(self, format): - return self.__class__(format % args) - - # the following methods are defined in alphabetical order: - def capitalize(self): return self.__class__(self.data.capitalize()) - def casefold(self): - return self.__class__(self.data.casefold()) - def center(self, width, *args): - return self.__class__(self.data.center(width, *args)) - def count(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.count(sub, start, end) - def encode(self, encoding=None, errors=None): # XXX improve this? - if encoding: - if errors: - return self.__class__(self.data.encode(encoding, errors)) - return self.__class__(self.data.encode(encoding)) - return self.__class__(self.data.encode()) - def endswith(self, suffix, start=0, end=_sys.maxsize): - return self.data.endswith(suffix, start, end) - def expandtabs(self, tabsize=8): - return self.__class__(self.data.expandtabs(tabsize)) - def find(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.find(sub, start, end) - def format(self, *args, **kwds): - return self.data.format(*args, **kwds) - def format_map(self, mapping): - return self.data.format_map(mapping) - def index(self, sub, start=0, end=_sys.maxsize): - return self.data.index(sub, start, end) - def isalpha(self): return self.data.isalpha() - def isalnum(self): return self.data.isalnum() - def isdecimal(self): return self.data.isdecimal() - def isdigit(self): return self.data.isdigit() - def isidentifier(self): return self.data.isidentifier() - def islower(self): return self.data.islower() - def isnumeric(self): return self.data.isnumeric() - def isprintable(self): return self.data.isprintable() - def isspace(self): return self.data.isspace() - def istitle(self): return self.data.istitle() - def isupper(self): return self.data.isupper() - def join(self, seq): return self.data.join(seq) - def ljust(self, width, *args): - return self.__class__(self.data.ljust(width, *args)) - def lower(self): return self.__class__(self.data.lower()) - def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) - maketrans = str.maketrans - def partition(self, sep): - return self.data.partition(sep) - def replace(self, old, new, maxsplit=-1): - if isinstance(old, UserString): - old = old.data - if isinstance(new, UserString): - new = new.data - return self.__class__(self.data.replace(old, new, maxsplit)) - def rfind(self, sub, start=0, end=_sys.maxsize): - if isinstance(sub, UserString): - sub = sub.data - return self.data.rfind(sub, start, end) - def rindex(self, sub, start=0, end=_sys.maxsize): - return self.data.rindex(sub, start, end) - def rjust(self, width, *args): - return self.__class__(self.data.rjust(width, *args)) - def rpartition(self, sep): - return self.data.rpartition(sep) - def rstrip(self, chars=None): - return self.__class__(self.data.rstrip(chars)) - def split(self, sep=None, maxsplit=-1): - return self.data.split(sep, maxsplit) - def rsplit(self, sep=None, maxsplit=-1): - return self.data.rsplit(sep, maxsplit) - def splitlines(self, keepends=False): return self.data.splitlines(keepends) - def startswith(self, prefix, start=0, end=_sys.maxsize): - return self.data.startswith(prefix, start, end) - def strip(self, chars=None): return self.__class__(self.data.strip(chars)) - def swapcase(self): return self.__class__(self.data.swapcase()) - def title(self): return self.__class__(self.data.title()) - def translate(self, *args): - return self.__class__(self.data.translate(*args)) - def upper(self): return self.__class__(self.data.upper()) - def zfill(self, width): return self.__class__(self.data.zfill(width)) diff --git a/typing_extensions/test_data/python-3.6.1/collections/abc.py b/typing_extensions/test_data/python-3.6.1/collections/abc.py deleted file mode 100644 index 891600d1..00000000 --- a/typing_extensions/test_data/python-3.6.1/collections/abc.py +++ /dev/null @@ -1,2 +0,0 @@ -from _collections_abc import * -from _collections_abc import __all__ diff --git a/typing_extensions/test_data/python-3.6.1/typing.py b/typing_extensions/test_data/python-3.6.1/typing.py deleted file mode 100644 index 9a0f4909..00000000 --- a/typing_extensions/test_data/python-3.6.1/typing.py +++ /dev/null @@ -1,2335 +0,0 @@ -import abc -from abc import abstractmethod, abstractproperty -import collections -import contextlib -import functools -import re as stdlib_re # Avoid confusion with the re we export. -import sys -import types -try: - import collections.abc as collections_abc -except ImportError: - import collections as collections_abc # Fallback for PY3.2. -try: - from types import SlotWrapperType, MethodWrapperType, MethodDescriptorType -except ImportError: - SlotWrapperType = type(object.__init__) - MethodWrapperType = type(object().__str__) - MethodDescriptorType = type(str.join) - - -# Please keep __all__ alphabetized within each category. -__all__ = [ - # Super-special typing primitives. - 'Any', - 'Callable', - 'ClassVar', - 'Generic', - 'Optional', - 'Tuple', - 'Type', - 'TypeVar', - 'Union', - - # ABCs (from collections.abc). - 'AbstractSet', # collections.abc.Set. - 'GenericMeta', # subclass of abc.ABCMeta and a metaclass - # for 'Generic' and ABCs below. - 'ByteString', - 'Container', - 'Hashable', - 'ItemsView', - 'Iterable', - 'Iterator', - 'KeysView', - 'Mapping', - 'MappingView', - 'MutableMapping', - 'MutableSequence', - 'MutableSet', - 'Sequence', - 'Sized', - 'ValuesView', - # The following are added depending on presence - # of their non-generic counterparts in stdlib: - # Awaitable, - # AsyncIterator, - # AsyncIterable, - # Coroutine, - # Collection, - # ContextManager, - # AsyncGenerator, - - # Structural checks, a.k.a. protocols. - 'Reversible', - 'SupportsAbs', - 'SupportsFloat', - 'SupportsInt', - 'SupportsRound', - - # Concrete collection types. - 'Counter', - 'Deque', - 'Dict', - 'DefaultDict', - 'List', - 'Set', - 'FrozenSet', - 'NamedTuple', # Not really a type. - 'Generator', - - # One-off things. - 'AnyStr', - 'cast', - 'get_type_hints', - 'NewType', - 'no_type_check', - 'no_type_check_decorator', - 'overload', - 'Text', - 'TYPE_CHECKING', -] - -# The pseudo-submodules 're' and 'io' are part of the public -# namespace, but excluded from __all__ because they might stomp on -# legitimate imports of those modules. - - -def _qualname(x): - if sys.version_info[:2] >= (3, 3): - return x.__qualname__ - else: - # Fall back to just name. - return x.__name__ - - -def _trim_name(nm): - whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase') - if nm.startswith('_') and nm not in whitelist: - nm = nm[1:] - return nm - - -class TypingMeta(type): - """Metaclass for most types defined in typing module - (not a part of public API). - - This overrides __new__() to require an extra keyword parameter - '_root', which serves as a guard against naive subclassing of the - typing classes. Any legitimate class defined using a metaclass - derived from TypingMeta must pass _root=True. - - This also defines a dummy constructor (all the work for most typing - constructs is done in __new__) and a nicer repr(). - """ - - _is_protocol = False - - def __new__(cls, name, bases, namespace, *, _root=False): - if not _root: - raise TypeError("Cannot subclass %s" % - (', '.join(map(_type_repr, bases)) or '()')) - return super().__new__(cls, name, bases, namespace) - - def __init__(self, *args, **kwds): - pass - - def _eval_type(self, globalns, localns): - """Override this in subclasses to interpret forward references. - - For example, List['C'] is internally stored as - List[_ForwardRef('C')], which should evaluate to List[C], - where C is an object found in globalns or localns (searching - localns first, of course). - """ - return self - - def _get_type_vars(self, tvars): - pass - - def __repr__(self): - qname = _trim_name(_qualname(self)) - return '%s.%s' % (self.__module__, qname) - - -class _TypingBase(metaclass=TypingMeta, _root=True): - """Internal indicator of special typing constructs.""" - - __slots__ = ('__weakref__',) - - def __init__(self, *args, **kwds): - pass - - def __new__(cls, *args, **kwds): - """Constructor. - - This only exists to give a better error message in case - someone tries to subclass a special typing object (not a good idea). - """ - if (len(args) == 3 and - isinstance(args[0], str) and - isinstance(args[1], tuple)): - # Close enough. - raise TypeError("Cannot subclass %r" % cls) - return super().__new__(cls) - - # Things that are not classes also need these. - def _eval_type(self, globalns, localns): - return self - - def _get_type_vars(self, tvars): - pass - - def __repr__(self): - cls = type(self) - qname = _trim_name(_qualname(cls)) - return '%s.%s' % (cls.__module__, qname) - - def __call__(self, *args, **kwds): - raise TypeError("Cannot instantiate %r" % type(self)) - - -class _FinalTypingBase(_TypingBase, _root=True): - """Internal mix-in class to prevent instantiation. - - Prevents instantiation unless _root=True is given in class call. - It is used to create pseudo-singleton instances Any, Union, Optional, etc. - """ - - __slots__ = () - - def __new__(cls, *args, _root=False, **kwds): - self = super().__new__(cls, *args, **kwds) - if _root is True: - return self - raise TypeError("Cannot instantiate %r" % cls) - - def __reduce__(self): - return _trim_name(type(self).__name__) - - -class _ForwardRef(_TypingBase, _root=True): - """Internal wrapper to hold a forward reference.""" - - __slots__ = ('__forward_arg__', '__forward_code__', - '__forward_evaluated__', '__forward_value__') - - def __init__(self, arg): - super().__init__(arg) - if not isinstance(arg, str): - raise TypeError('Forward reference must be a string -- got %r' % (arg,)) - try: - code = compile(arg, '', 'eval') - except SyntaxError: - raise SyntaxError('Forward reference must be an expression -- got %r' % - (arg,)) - self.__forward_arg__ = arg - self.__forward_code__ = code - self.__forward_evaluated__ = False - self.__forward_value__ = None - - def _eval_type(self, globalns, localns): - if not self.__forward_evaluated__ or localns is not globalns: - if globalns is None and localns is None: - globalns = localns = {} - elif globalns is None: - globalns = localns - elif localns is None: - localns = globalns - self.__forward_value__ = _type_check( - eval(self.__forward_code__, globalns, localns), - "Forward references must evaluate to types.") - self.__forward_evaluated__ = True - return self.__forward_value__ - - def __eq__(self, other): - if not isinstance(other, _ForwardRef): - return NotImplemented - return (self.__forward_arg__ == other.__forward_arg__ and - self.__forward_value__ == other.__forward_value__) - - def __hash__(self): - return hash((self.__forward_arg__, self.__forward_value__)) - - def __instancecheck__(self, obj): - raise TypeError("Forward references cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Forward references cannot be used with issubclass().") - - def __repr__(self): - return '_ForwardRef(%r)' % (self.__forward_arg__,) - - -class _TypeAlias(_TypingBase, _root=True): - """Internal helper class for defining generic variants of concrete types. - - Note that this is not a type; let's call it a pseudo-type. It cannot - be used in instance and subclass checks in parameterized form, i.e. - ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning - ``False``. - """ - - __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') - - def __init__(self, name, type_var, impl_type, type_checker): - """Initializer. - - Args: - name: The name, e.g. 'Pattern'. - type_var: The type parameter, e.g. AnyStr, or the - specific type, e.g. str. - impl_type: The implementation type. - type_checker: Function that takes an impl_type instance. - and returns a value that should be a type_var instance. - """ - assert isinstance(name, str), repr(name) - assert isinstance(impl_type, type), repr(impl_type) - assert not isinstance(impl_type, TypingMeta), repr(impl_type) - assert isinstance(type_var, (type, _TypingBase)), repr(type_var) - self.name = name - self.type_var = type_var - self.impl_type = impl_type - self.type_checker = type_checker - - def __repr__(self): - return "%s[%s]" % (self.name, _type_repr(self.type_var)) - - def __getitem__(self, parameter): - if not isinstance(self.type_var, TypeVar): - raise TypeError("%s cannot be further parameterized." % self) - if self.type_var.__constraints__ and isinstance(parameter, type): - if not issubclass(parameter, self.type_var.__constraints__): - raise TypeError("%s is not a valid substitution for %s." % - (parameter, self.type_var)) - if isinstance(parameter, TypeVar) and parameter is not self.type_var: - raise TypeError("%s cannot be re-parameterized." % self) - return self.__class__(self.name, parameter, - self.impl_type, self.type_checker) - - def __eq__(self, other): - if not isinstance(other, _TypeAlias): - return NotImplemented - return self.name == other.name and self.type_var == other.type_var - - def __hash__(self): - return hash((self.name, self.type_var)) - - def __instancecheck__(self, obj): - if not isinstance(self.type_var, TypeVar): - raise TypeError("Parameterized type aliases cannot be used " - "with isinstance().") - return isinstance(obj, self.impl_type) - - def __subclasscheck__(self, cls): - if not isinstance(self.type_var, TypeVar): - raise TypeError("Parameterized type aliases cannot be used " - "with issubclass().") - return issubclass(cls, self.impl_type) - - -def _get_type_vars(types, tvars): - for t in types: - if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): - t._get_type_vars(tvars) - - -def _type_vars(types): - tvars = [] - _get_type_vars(types, tvars) - return tuple(tvars) - - -def _eval_type(t, globalns, localns): - if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): - return t._eval_type(globalns, localns) - return t - - -def _type_check(arg, msg): - """Check that the argument is a type, and return it (internal helper). - - As a special case, accept None and return type(None) instead. - Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. - - The msg argument is a human-readable error message, e.g. - - "Union[arg, ...]: arg should be a type." - - We append the repr() of the actual value (truncated to 100 chars). - """ - if arg is None: - return type(None) - if isinstance(arg, str): - arg = _ForwardRef(arg) - if ( - isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or - not isinstance(arg, (type, _TypingBase)) and not callable(arg) - ): - raise TypeError(msg + " Got %.100r." % (arg,)) - # Bare Union etc. are not valid as type arguments - if ( - type(arg).__name__ in ('_Union', '_Optional') and - not getattr(arg, '__origin__', None) or - isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol) - ): - raise TypeError("Plain %s is not valid as type argument" % arg) - return arg - - -def _type_repr(obj): - """Return the repr() of an object, special-casing types (internal helper). - - If obj is a type, we return a shorter version than the default - type.__repr__, based on the module and qualified name, which is - typically enough to uniquely identify a type. For everything - else, we fall back on repr(obj). - """ - if isinstance(obj, type) and not isinstance(obj, TypingMeta): - if obj.__module__ == 'builtins': - return _qualname(obj) - return '%s.%s' % (obj.__module__, _qualname(obj)) - if obj is ...: - return('...') - if isinstance(obj, types.FunctionType): - return obj.__name__ - return repr(obj) - - -class _Any(_FinalTypingBase, _root=True): - """Special type indicating an unconstrained type. - - - Any is compatible with every type. - - Any assumed to have all methods. - - All values assumed to be instances of Any. - - Note that all the above statements are true from the point of view of - static type checkers. At runtime, Any should not be used with instance - or class checks. - """ - - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("Any cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Any cannot be used with issubclass().") - - -Any = _Any(_root=True) - - -class TypeVar(_TypingBase, _root=True): - """Type variable. - - Usage:: - - T = TypeVar('T') # Can be anything - A = TypeVar('A', str, bytes) # Must be str or bytes - - Type variables exist primarily for the benefit of static type - checkers. They serve as the parameters for generic types as well - as for generic function definitions. See class Generic for more - information on generic types. Generic functions work as follows: - - def repeat(x: T, n: int) -> List[T]: - '''Return a list containing n references to x.''' - return [x]*n - - def longest(x: A, y: A) -> A: - '''Return the longest of two strings.''' - return x if len(x) >= len(y) else y - - The latter example's signature is essentially the overloading - of (str, str) -> str and (bytes, bytes) -> bytes. Also note - that if the arguments are instances of some subclass of str, - the return type is still plain str. - - At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. - - Type variables defined with covariant=True or contravariant=True - can be used do declare covariant or contravariant generic types. - See PEP 484 for more details. By default generic types are invariant - in all type variables. - - Type variables can be introspected. e.g.: - - T.__name__ == 'T' - T.__constraints__ == () - T.__covariant__ == False - T.__contravariant__ = False - A.__constraints__ == (str, bytes) - """ - - __slots__ = ('__name__', '__bound__', '__constraints__', - '__covariant__', '__contravariant__') - - def __init__(self, name, *constraints, bound=None, - covariant=False, contravariant=False): - super().__init__(name, *constraints, bound=bound, - covariant=covariant, contravariant=contravariant) - self.__name__ = name - if covariant and contravariant: - raise ValueError("Bivariant types are not supported.") - self.__covariant__ = bool(covariant) - self.__contravariant__ = bool(contravariant) - if constraints and bound is not None: - raise TypeError("Constraints cannot be combined with bound=...") - if constraints and len(constraints) == 1: - raise TypeError("A single constraint is not allowed") - msg = "TypeVar(name, constraint, ...): constraints must be types." - self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) - if bound: - self.__bound__ = _type_check(bound, "Bound must be a type.") - else: - self.__bound__ = None - - def _get_type_vars(self, tvars): - if self not in tvars: - tvars.append(self) - - def __repr__(self): - if self.__covariant__: - prefix = '+' - elif self.__contravariant__: - prefix = '-' - else: - prefix = '~' - return prefix + self.__name__ - - def __instancecheck__(self, instance): - raise TypeError("Type variables cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Type variables cannot be used with issubclass().") - - -# Some unconstrained type variables. These are used by the container types. -# (These are not for export.) -T = TypeVar('T') # Any type. -KT = TypeVar('KT') # Key type. -VT = TypeVar('VT') # Value type. -T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. -V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. -VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. -T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. - -# A useful type variable with constraints. This represents string types. -# (This one *is* for export!) -AnyStr = TypeVar('AnyStr', bytes, str) - - -def _replace_arg(arg, tvars, args): - """An internal helper function: replace arg if it is a type variable - found in tvars with corresponding substitution from args or - with corresponding substitution sub-tree if arg is a generic type. - """ - - if tvars is None: - tvars = [] - if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)): - return arg._subs_tree(tvars, args) - if isinstance(arg, TypeVar): - for i, tvar in enumerate(tvars): - if arg == tvar: - return args[i] - return arg - - -# Special typing constructs Union, Optional, Generic, Callable and Tuple -# use three special attributes for internal bookkeeping of generic types: -# * __parameters__ is a tuple of unique free type parameters of a generic -# type, for example, Dict[T, T].__parameters__ == (T,); -# * __origin__ keeps a reference to a type that was subscripted, -# e.g., Union[T, int].__origin__ == Union; -# * __args__ is a tuple of all arguments used in subscripting, -# e.g., Dict[T, int].__args__ == (T, int). - - -def _subs_tree(cls, tvars=None, args=None): - """An internal helper function: calculate substitution tree - for generic cls after replacing its type parameters with - substitutions in tvars -> args (if any). - Repeat the same following __origin__'s. - - Return a list of arguments with all possible substitutions - performed. Arguments that are generic classes themselves are represented - as tuples (so that no new classes are created by this function). - For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] - """ - - if cls.__origin__ is None: - return cls - # Make of chain of origins (i.e. cls -> cls.__origin__) - current = cls.__origin__ - orig_chain = [] - while current.__origin__ is not None: - orig_chain.append(current) - current = current.__origin__ - # Replace type variables in __args__ if asked ... - tree_args = [] - for arg in cls.__args__: - tree_args.append(_replace_arg(arg, tvars, args)) - # ... then continue replacing down the origin chain. - for ocls in orig_chain: - new_tree_args = [] - for arg in ocls.__args__: - new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) - tree_args = new_tree_args - return tree_args - - -def _remove_dups_flatten(parameters): - """An internal helper for Union creation and substitution: flatten Union's - among parameters, then remove duplicates and strict subclasses. - """ - - # Flatten out Union[Union[...], ...]. - params = [] - for p in parameters: - if isinstance(p, _Union) and p.__origin__ is Union: - params.extend(p.__args__) - elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: - params.extend(p[1:]) - else: - params.append(p) - # Weed out strict duplicates, preserving the first of each occurrence. - all_params = set(params) - if len(all_params) < len(params): - new_params = [] - for t in params: - if t in all_params: - new_params.append(t) - all_params.remove(t) - params = new_params - assert not all_params, all_params - # Weed out subclasses. - # E.g. Union[int, Employee, Manager] == Union[int, Employee]. - # If object is present it will be sole survivor among proper classes. - # Never discard type variables. - # (In particular, Union[str, AnyStr] != AnyStr.) - all_params = set(params) - for t1 in params: - if not isinstance(t1, type): - continue - if any(isinstance(t2, type) and issubclass(t1, t2) - for t2 in all_params - {t1} - if not (isinstance(t2, GenericMeta) and - t2.__origin__ is not None)): - all_params.remove(t1) - return tuple(t for t in params if t in all_params) - - -def _check_generic(cls, parameters): - # Check correct count for parameters of a generic cls (internal helper). - if not cls.__parameters__: - raise TypeError("%s is not a generic class" % repr(cls)) - alen = len(parameters) - elen = len(cls.__parameters__) - if alen != elen: - raise TypeError("Too %s parameters for %s; actual %s, expected %s" % - ("many" if alen > elen else "few", repr(cls), alen, elen)) - - -_cleanups = [] - - -def _tp_cache(func): - """Internal wrapper caching __getitem__ of generic types with a fallback to - original function for non-hashable arguments. - """ - - cached = functools.lru_cache()(func) - _cleanups.append(cached.cache_clear) - - @functools.wraps(func) - def inner(*args, **kwds): - try: - return cached(*args, **kwds) - except TypeError: - pass # All real errors (not unhashable args) are raised below. - return func(*args, **kwds) - return inner - - -class _Union(_FinalTypingBase, _root=True): - """Union type; Union[X, Y] means either X or Y. - - To define a union, use e.g. Union[int, str]. Details: - - - The arguments must be types and there must be at least one. - - - None as an argument is a special case and is replaced by - type(None). - - - Unions of unions are flattened, e.g.:: - - Union[Union[int, str], float] == Union[int, str, float] - - - Unions of a single argument vanish, e.g.:: - - Union[int] == int # The constructor actually returns int - - - Redundant arguments are skipped, e.g.:: - - Union[int, str, int] == Union[int, str] - - - When comparing unions, the argument order is ignored, e.g.:: - - Union[int, str] == Union[str, int] - - - When two arguments have a subclass relationship, the least - derived argument is kept, e.g.:: - - class Employee: pass - class Manager(Employee): pass - Union[int, Employee, Manager] == Union[int, Employee] - Union[Manager, int, Employee] == Union[int, Employee] - Union[Employee, Manager] == Employee - - - Similar for object:: - - Union[int, object] == object - - - You cannot subclass or instantiate a union. - - - You can use Optional[X] as a shorthand for Union[X, None]. - """ - - __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') - - def __new__(cls, parameters=None, origin=None, *args, _root=False): - self = super().__new__(cls, parameters, origin, *args, _root=_root) - if origin is None: - self.__parameters__ = None - self.__args__ = None - self.__origin__ = None - self.__tree_hash__ = hash(frozenset(('Union',))) - return self - if not isinstance(parameters, tuple): - raise TypeError("Expected parameters=") - if origin is Union: - parameters = _remove_dups_flatten(parameters) - # It's not a union if there's only one type left. - if len(parameters) == 1: - return parameters[0] - self.__parameters__ = _type_vars(parameters) - self.__args__ = parameters - self.__origin__ = origin - # Pre-calculate the __hash__ on instantiation. - # This improves speed for complex substitutions. - subs_tree = self._subs_tree() - if isinstance(subs_tree, tuple): - self.__tree_hash__ = hash(frozenset(subs_tree)) - else: - self.__tree_hash__ = hash(subs_tree) - return self - - def _eval_type(self, globalns, localns): - if self.__args__ is None: - return self - ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) - ev_origin = _eval_type(self.__origin__, globalns, localns) - if ev_args == self.__args__ and ev_origin == self.__origin__: - # Everything is already evaluated. - return self - return self.__class__(ev_args, ev_origin, _root=True) - - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - _get_type_vars(self.__parameters__, tvars) - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - tree = self._subs_tree() - if not isinstance(tree, tuple): - return repr(tree) - return tree[0]._tree_repr(tree) - - def _tree_repr(self, tree): - arg_list = [] - for arg in tree[1:]: - if not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - return super().__repr__() + '[%s]' % ', '.join(arg_list) - - @_tp_cache - def __getitem__(self, parameters): - if parameters == (): - raise TypeError("Cannot take a Union of no types.") - if not isinstance(parameters, tuple): - parameters = (parameters,) - if self.__origin__ is None: - msg = "Union[arg, ...]: each arg must be a type." - else: - msg = "Parameters to generic types must be types." - parameters = tuple(_type_check(p, msg) for p in parameters) - if self is not Union: - _check_generic(self, parameters) - return self.__class__(parameters, origin=self, _root=True) - - def _subs_tree(self, tvars=None, args=None): - if self is Union: - return Union # Nothing to substitute - tree_args = _subs_tree(self, tvars, args) - tree_args = _remove_dups_flatten(tree_args) - if len(tree_args) == 1: - return tree_args[0] # Union of a single type is that type - return (Union,) + tree_args - - def __eq__(self, other): - if isinstance(other, _Union): - return self.__tree_hash__ == other.__tree_hash__ - elif self is not Union: - return self._subs_tree() == other - else: - return self is other - - def __hash__(self): - return self.__tree_hash__ - - def __instancecheck__(self, obj): - raise TypeError("Unions cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Unions cannot be used with issubclass().") - - -Union = _Union(_root=True) - - -class _Optional(_FinalTypingBase, _root=True): - """Optional type. - - Optional[X] is equivalent to Union[X, None]. - """ - - __slots__ = () - - @_tp_cache - def __getitem__(self, arg): - arg = _type_check(arg, "Optional[t] requires a single type.") - return Union[arg, type(None)] - - -Optional = _Optional(_root=True) - - -def _gorg(a): - """Return the farthest origin of a generic class (internal helper).""" - assert isinstance(a, GenericMeta) - while a.__origin__ is not None: - a = a.__origin__ - return a - - -def _geqv(a, b): - """Return whether two generic classes are equivalent (internal helper). - - The intention is to consider generic class X and any of its - parameterized forms (X[T], X[int], etc.) as equivalent. - - However, X is not equivalent to a subclass of X. - - The relation is reflexive, symmetric and transitive. - """ - assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) - # Reduce each to its origin. - return _gorg(a) is _gorg(b) - - -def _next_in_mro(cls): - """Helper for Generic.__new__. - - Returns the class after the last occurrence of Generic or - Generic[...] in cls.__mro__. - """ - next_in_mro = object - # Look for the last occurrence of Generic or Generic[...]. - for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and _gorg(c) is Generic: - next_in_mro = cls.__mro__[i + 1] - return next_in_mro - - -def _make_subclasshook(cls): - """Construct a __subclasshook__ callable that incorporates - the associated __extra__ class in subclass checks performed - against cls. - """ - if isinstance(cls.__extra__, abc.ABCMeta): - # The logic mirrors that of ABCMeta.__subclasscheck__. - # Registered classes need not be checked here because - # cls and its extra share the same _abc_registry. - def __extrahook__(subclass): - res = cls.__extra__.__subclasshook__(subclass) - if res is not NotImplemented: - return res - if cls.__extra__ in subclass.__mro__: - return True - for scls in cls.__extra__.__subclasses__(): - if isinstance(scls, GenericMeta): - continue - if issubclass(subclass, scls): - return True - return NotImplemented - else: - # For non-ABC extras we'll just call issubclass(). - def __extrahook__(subclass): - if cls.__extra__ and issubclass(subclass, cls.__extra__): - return True - return NotImplemented - return __extrahook__ - - -def _no_slots_copy(dct): - """Internal helper: copy class __dict__ and clean slots class variables. - (They will be re-created if necessary by normal class machinery.) - """ - dict_copy = dict(dct) - if '__slots__' in dict_copy: - for slot in dict_copy['__slots__']: - dict_copy.pop(slot, None) - return dict_copy - - -class GenericMeta(TypingMeta, abc.ABCMeta): - """Metaclass for generic types. - - This is a metaclass for typing.Generic and generic ABCs defined in - typing module. User defined subclasses of GenericMeta can override - __new__ and invoke super().__new__. Note that GenericMeta.__new__ - has strict rules on what is allowed in its bases argument: - * plain Generic is disallowed in bases; - * Generic[...] should appear in bases at most once; - * if Generic[...] is present, then it should list all type variables - that appear in other bases. - In addition, type of all generic bases is erased, e.g., C[int] is - stripped to plain C. - """ - - def __new__(cls, name, bases, namespace, - tvars=None, args=None, origin=None, extra=None, orig_bases=None): - """Create a new generic class. GenericMeta.__new__ accepts - keyword arguments that are used for internal bookkeeping, therefore - an override should pass unused keyword arguments to super(). - """ - if tvars is not None: - # Called from __getitem__() below. - assert origin is not None - assert all(isinstance(t, TypeVar) for t in tvars), tvars - else: - # Called from class statement. - assert tvars is None, tvars - assert args is None, args - assert origin is None, origin - - # Get the full set of tvars from the bases. - tvars = _type_vars(bases) - # Look for Generic[T1, ..., Tn]. - # If found, tvars must be a subset of it. - # If not found, tvars is it. - # Also check for and reject plain Generic, - # and reject multiple Generic[...]. - gvars = None - for base in bases: - if base is Generic: - raise TypeError("Cannot inherit from plain Generic") - if (isinstance(base, GenericMeta) and - base.__origin__ is Generic): - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...] multiple types.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - raise TypeError( - "Some type variables (%s) " - "are not listed in Generic[%s]" % - (", ".join(str(t) for t in tvars if t not in gvarset), - ", ".join(str(g) for g in gvars))) - tvars = gvars - - initial_bases = bases - if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: - bases = (extra,) + bases - bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) - - # remove bare Generic from bases if there are other generic bases - if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): - bases = tuple(b for b in bases if b is not Generic) - namespace.update({'__origin__': origin, '__extra__': extra}) - self = super().__new__(cls, name, bases, namespace, _root=True) - - self.__parameters__ = tvars - # Be prepared that GenericMeta will be subclassed by TupleMeta - # and CallableMeta, those two allow ..., (), or [] in __args___. - self.__args__ = tuple(... if a is _TypingEllipsis else - () if a is _TypingEmpty else - a for a in args) if args else None - # Speed hack (https://github.com/python/typing/issues/196). - self.__next_in_mro__ = _next_in_mro(self) - # Preserve base classes on subclassing (__bases__ are type erased now). - if orig_bases is None: - self.__orig_bases__ = initial_bases - - # This allows unparameterized generic collections to be used - # with issubclass() and isinstance() in the same way as their - # collections.abc counterparts (e.g., isinstance([], Iterable)). - if ( - '__subclasshook__' not in namespace and extra or - # allow overriding - getattr(self.__subclasshook__, '__name__', '') == '__extrahook__' - ): - self.__subclasshook__ = _make_subclasshook(self) - if isinstance(extra, abc.ABCMeta): - self._abc_registry = extra._abc_registry - self._abc_cache = extra._abc_cache - elif origin is not None: - self._abc_registry = origin._abc_registry - self._abc_cache = origin._abc_cache - - if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. - self.__qualname__ = origin.__qualname__ - self.__tree_hash__ = (hash(self._subs_tree()) if origin else - super(GenericMeta, self).__hash__()) - return self - - # _abc_negative_cache and _abc_negative_cache_version - # realised as descriptors, since GenClass[t1, t2, ...] always - # share subclass info with GenClass. - # This is an important memory optimization. - @property - def _abc_negative_cache(self): - if isinstance(self.__extra__, abc.ABCMeta): - return self.__extra__._abc_negative_cache - return _gorg(self)._abc_generic_negative_cache - - @_abc_negative_cache.setter - def _abc_negative_cache(self, value): - if self.__origin__ is None: - if isinstance(self.__extra__, abc.ABCMeta): - self.__extra__._abc_negative_cache = value - else: - self._abc_generic_negative_cache = value - - @property - def _abc_negative_cache_version(self): - if isinstance(self.__extra__, abc.ABCMeta): - return self.__extra__._abc_negative_cache_version - return _gorg(self)._abc_generic_negative_cache_version - - @_abc_negative_cache_version.setter - def _abc_negative_cache_version(self, value): - if self.__origin__ is None: - if isinstance(self.__extra__, abc.ABCMeta): - self.__extra__._abc_negative_cache_version = value - else: - self._abc_generic_negative_cache_version = value - - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - _get_type_vars(self.__parameters__, tvars) - - def _eval_type(self, globalns, localns): - ev_origin = (self.__origin__._eval_type(globalns, localns) - if self.__origin__ else None) - ev_args = tuple(_eval_type(a, globalns, localns) for a - in self.__args__) if self.__args__ else None - if ev_origin == self.__origin__ and ev_args == self.__args__: - return self - return self.__class__(self.__name__, - self.__bases__, - _no_slots_copy(self.__dict__), - tvars=_type_vars(ev_args) if ev_args else None, - args=ev_args, - origin=ev_origin, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - return self._tree_repr(self._subs_tree()) - - def _tree_repr(self, tree): - arg_list = [] - for arg in tree[1:]: - if arg == (): - arg_list.append('()') - elif not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - return super().__repr__() + '[%s]' % ', '.join(arg_list) - - def _subs_tree(self, tvars=None, args=None): - if self.__origin__ is None: - return self - tree_args = _subs_tree(self, tvars, args) - return (_gorg(self),) + tuple(tree_args) - - def __eq__(self, other): - if not isinstance(other, GenericMeta): - return NotImplemented - if self.__origin__ is None or other.__origin__ is None: - return self is other - return self.__tree_hash__ == other.__tree_hash__ - - def __hash__(self): - return self.__tree_hash__ - - @_tp_cache - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if not params and not _gorg(self) is Tuple: - raise TypeError( - "Parameter list to %s[...] cannot be empty" % _qualname(self)) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self is Generic: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, TypeVar) for p in params): - raise TypeError( - "Parameters to Generic[...] must all be type variables") - if len(set(params)) != len(params): - raise TypeError( - "Parameters to Generic[...] must all be unique") - tvars = params - args = params - elif self in (Tuple, Callable): - tvars = _type_vars(params) - args = params - elif self is _Protocol: - # _Protocol is internal, don't check anything. - tvars = params - args = params - elif self.__origin__ in (Generic, _Protocol): - # Can't subscript Generic[...] or _Protocol[...]. - raise TypeError("Cannot subscript already-subscripted %s" % - repr(self)) - else: - # Subscripting a regular Generic subclass. - _check_generic(self, params) - tvars = _type_vars(params) - args = params - - prepend = (self,) if self.__origin__ is None else () - return self.__class__(self.__name__, - prepend + self.__bases__, - _no_slots_copy(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - def __subclasscheck__(self, cls): - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - return False - if self is Generic: - raise TypeError("Class %r cannot be used with class " - "or instance checks" % self) - return super().__subclasscheck__(cls) - - def __instancecheck__(self, instance): - # Since we extend ABC.__subclasscheck__ and - # ABC.__instancecheck__ inlines the cache checking done by the - # latter, we must extend __instancecheck__ too. For simplicity - # we just skip the cache check -- instance checks for generic - # classes are supposed to be rare anyways. - return issubclass(instance.__class__, self) - - def __copy__(self): - return self.__class__(self.__name__, self.__bases__, - _no_slots_copy(self.__dict__), - self.__parameters__, self.__args__, self.__origin__, - self.__extra__, self.__orig_bases__) - - def __setattr__(self, attr, value): - # We consider all the subscripted genrics as proxies for original class - if ( - attr.startswith('__') and attr.endswith('__') or - attr.startswith('_abc_') - ): - super(GenericMeta, self).__setattr__(attr, value) - else: - super(GenericMeta, _gorg(self)).__setattr__(attr, value) - - -# Prevent checks for Generic to crash when defining Generic. -Generic = None - - -def _generic_new(base_cls, cls, *args, **kwds): - # Assure type is erased on instantiation, - # but attempt to store it in __orig_class__ - if cls.__origin__ is None: - return base_cls.__new__(cls) - else: - origin = _gorg(cls) - obj = base_cls.__new__(origin) - try: - obj.__orig_class__ = cls - except AttributeError: - pass - obj.__init__(*args, **kwds) - return obj - - -class Generic(metaclass=GenericMeta): - """Abstract base class for generic types. - - A generic type is typically declared by inheriting from - this class parameterized with one or more type variables. - For example, a generic mapping type might be defined as:: - - class Mapping(Generic[KT, VT]): - def __getitem__(self, key: KT) -> VT: - ... - # Etc. - - This class can then be used as follows:: - - def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: - try: - return mapping[key] - except KeyError: - return default - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Generic): - raise TypeError("Type Generic cannot be instantiated; " - "it can be used only as a base class") - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - -class _TypingEmpty: - """Internal placeholder for () or []. Used by TupleMeta and CallableMeta - to allow empty list/tuple in specific places, without allowing them - to sneak in where prohibited. - """ - - -class _TypingEllipsis: - """Internal placeholder for ... (ellipsis).""" - - -class TupleMeta(GenericMeta): - """Metaclass for Tuple (internal).""" - - @_tp_cache - def __getitem__(self, parameters): - if self.__origin__ is not None or not _geqv(self, Tuple): - # Normal generic rules apply if this is not the first subscription - # or a subscription of a subclass. - return super().__getitem__(parameters) - if parameters == (): - return super().__getitem__((_TypingEmpty,)) - if not isinstance(parameters, tuple): - parameters = (parameters,) - if len(parameters) == 2 and parameters[1] is ...: - msg = "Tuple[t, ...]: t must be a type." - p = _type_check(parameters[0], msg) - return super().__getitem__((p, _TypingEllipsis)) - msg = "Tuple[t0, t1, ...]: each t must be a type." - parameters = tuple(_type_check(p, msg) for p in parameters) - return super().__getitem__(parameters) - - def __instancecheck__(self, obj): - if self.__args__ is None: - return isinstance(obj, tuple) - raise TypeError("Parameterized Tuple cannot be used " - "with isinstance().") - - def __subclasscheck__(self, cls): - if self.__args__ is None: - return issubclass(cls, tuple) - raise TypeError("Parameterized Tuple cannot be used " - "with issubclass().") - - -class Tuple(tuple, extra=tuple, metaclass=TupleMeta): - """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. - - Example: Tuple[T1, T2] is a tuple of two elements corresponding - to type variables T1 and T2. Tuple[int, float, str] is a tuple - of an int, a float and a string. - - To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Tuple): - raise TypeError("Type Tuple cannot be instantiated; " - "use tuple() instead") - return _generic_new(tuple, cls, *args, **kwds) - - -class CallableMeta(GenericMeta): - """Metaclass for Callable (internal).""" - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - return self._tree_repr(self._subs_tree()) - - def _tree_repr(self, tree): - if _gorg(self) is not Callable: - return super()._tree_repr(tree) - # For actual Callable (not its subclass) we override - # super()._tree_repr() for nice formatting. - arg_list = [] - for arg in tree[1:]: - if not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - if arg_list[0] == '...': - return repr(tree[0]) + '[..., %s]' % arg_list[1] - return (repr(tree[0]) + - '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) - - def __getitem__(self, parameters): - """A thin wrapper around __getitem_inner__ to provide the latter - with hashable arguments to improve speed. - """ - - if self.__origin__ is not None or not _geqv(self, Callable): - return super().__getitem__(parameters) - if not isinstance(parameters, tuple) or len(parameters) != 2: - raise TypeError("Callable must be used as " - "Callable[[arg, ...], result].") - args, result = parameters - if args is Ellipsis: - parameters = (Ellipsis, result) - else: - if not isinstance(args, list): - raise TypeError("Callable[args, result]: args must be a list." - " Got %.100r." % (args,)) - parameters = (tuple(args), result) - return self.__getitem_inner__(parameters) - - @_tp_cache - def __getitem_inner__(self, parameters): - args, result = parameters - msg = "Callable[args, result]: result must be a type." - result = _type_check(result, msg) - if args is Ellipsis: - return super().__getitem__((_TypingEllipsis, result)) - msg = "Callable[[arg, ...], result]: each arg must be a type." - args = tuple(_type_check(arg, msg) for arg in args) - parameters = args + (result,) - return super().__getitem__(parameters) - - -class Callable(extra=collections_abc.Callable, metaclass=CallableMeta): - """Callable type; Callable[[int], str] is a function of (int) -> str. - - The subscription syntax must always be used with exactly two - values: the argument list and the return type. The argument list - must be a list of types or ellipsis; the return type must be a single type. - - There is no syntax to indicate optional or keyword arguments, - such function types are rarely used as callback types. - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Callable): - raise TypeError("Type Callable cannot be instantiated; " - "use a non-abstract subclass instead") - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - -class _ClassVar(_FinalTypingBase, _root=True): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats: ClassVar[Dict[str, int]] = {} # class variable - damage: int = 10 # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(_type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = _eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(_type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - -ClassVar = _ClassVar(_root=True) - - -def cast(typ, val): - """Cast a value to a type. - - This returns the value unchanged. To the type checker this - signals that the return value has the designated type, but at - runtime we intentionally don't check anything (we want this - to be as fast as possible). - """ - return val - - -def _get_defaults(func): - """Internal helper to extract the default arguments, by name.""" - try: - code = func.__code__ - except AttributeError: - # Some built-in functions don't have __code__, __defaults__, etc. - return {} - pos_count = code.co_argcount - arg_names = code.co_varnames - arg_names = arg_names[:pos_count] - defaults = func.__defaults__ or () - kwdefaults = func.__kwdefaults__ - res = dict(kwdefaults) if kwdefaults else {} - pos_offset = pos_count - len(defaults) - for name, value in zip(arg_names[pos_offset:], defaults): - assert name not in res - res[name] = value - return res - - -_allowed_types = (types.FunctionType, types.BuiltinFunctionType, - types.MethodType, types.ModuleType, - SlotWrapperType, MethodWrapperType, MethodDescriptorType) - - -def get_type_hints(obj, globalns=None, localns=None): - """Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, and if necessary - adds Optional[t] if a default value equal to None is set. - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj, and these are also used as the locals. If the - object does not appear to have globals, an exception is raised. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - - if getattr(obj, '__no_type_check__', None): - return {} - if globalns is None: - globalns = getattr(obj, '__globals__', {}) - if localns is None: - localns = globalns - elif localns is None: - localns = globalns - # Classes require a special treatment. - if isinstance(obj, type): - hints = {} - for base in reversed(obj.__mro__): - ann = base.__dict__.get('__annotations__', {}) - for name, value in ann.items(): - if value is None: - value = type(None) - if isinstance(value, str): - value = _ForwardRef(value) - value = _eval_type(value, globalns, localns) - hints[name] = value - return hints - hints = getattr(obj, '__annotations__', None) - if hints is None: - # Return empty annotations for something that _could_ have them. - if isinstance(obj, _allowed_types): - return {} - else: - raise TypeError('{!r} is not a module, class, method, ' - 'or function.'.format(obj)) - defaults = _get_defaults(obj) - hints = dict(hints) - for name, value in hints.items(): - if value is None: - value = type(None) - if isinstance(value, str): - value = _ForwardRef(value) - value = _eval_type(value, globalns, localns) - if name in defaults and defaults[name] is None: - value = Optional[value] - hints[name] = value - return hints - - -def no_type_check(arg): - """Decorator to indicate that annotations are not type hints. - - The argument must be a class or function; if it is a class, it - applies recursively to all methods and classes defined in that class - (but not to methods defined in its superclasses or subclasses). - - This mutates the function(s) or class(es) in place. - """ - if isinstance(arg, type): - arg_attrs = arg.__dict__.copy() - for attr, val in arg.__dict__.items(): - if val in arg.__bases__: - arg_attrs.pop(attr) - for obj in arg_attrs.values(): - if isinstance(obj, types.FunctionType): - obj.__no_type_check__ = True - if isinstance(obj, type): - no_type_check(obj) - try: - arg.__no_type_check__ = True - except TypeError: # built-in classes - pass - return arg - - -def no_type_check_decorator(decorator): - """Decorator to give another decorator the @no_type_check effect. - - This wraps the decorator with something that wraps the decorated - function in @no_type_check. - """ - - @functools.wraps(decorator) - def wrapped_decorator(*args, **kwds): - func = decorator(*args, **kwds) - func = no_type_check(func) - return func - - return wrapped_decorator - - -def _overload_dummy(*args, **kwds): - """Helper for @overload to raise when called.""" - raise NotImplementedError( - "You should not call an overloaded function. " - "A series of @overload-decorated functions " - "outside a stub module should always be followed " - "by an implementation that is not @overload-ed.") - - -def overload(func): - """Decorator for overloaded functions/methods. - - In a stub file, place two or more stub definitions for the same - function in a row, each decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - - In a non-stub file (i.e. a regular .py file), do the same but - follow it with an implementation. The implementation should *not* - be decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - # implementation goes here - """ - return _overload_dummy - - -class _ProtocolMeta(GenericMeta): - """Internal metaclass for _Protocol. - - This exists so _Protocol classes can be generic without deriving - from Generic. - """ - - def __instancecheck__(self, obj): - if _Protocol not in self.__bases__: - return super().__instancecheck__(obj) - raise TypeError("Protocols cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not self._is_protocol: - # No structural checks since this isn't a protocol. - return NotImplemented - - if self is _Protocol: - # Every class is a subclass of the empty protocol. - return True - - # Find all attributes defined in the protocol. - attrs = self._get_protocol_attrs() - - for attr in attrs: - if not any(attr in d.__dict__ for d in cls.__mro__): - return False - return True - - def _get_protocol_attrs(self): - # Get all Protocol base classes. - protocol_bases = [] - for c in self.__mro__: - if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': - protocol_bases.append(c) - - # Get attributes included in protocol. - attrs = set() - for base in protocol_bases: - for attr in base.__dict__.keys(): - # Include attributes not defined in any non-protocol bases. - for c in self.__mro__: - if (c is not base and attr in c.__dict__ and - not getattr(c, '_is_protocol', False)): - break - else: - if (not attr.startswith('_abc_') and - attr != '__abstractmethods__' and - attr != '__annotations__' and - attr != '__weakref__' and - attr != '_is_protocol' and - attr != '__dict__' and - attr != '__args__' and - attr != '__slots__' and - attr != '_get_protocol_attrs' and - attr != '__next_in_mro__' and - attr != '__parameters__' and - attr != '__origin__' and - attr != '__orig_bases__' and - attr != '__extra__' and - attr != '__tree_hash__' and - attr != '__module__'): - attrs.add(attr) - - return attrs - - -class _Protocol(metaclass=_ProtocolMeta): - """Internal base class for protocol classes. - - This implements a simple-minded structural issubclass check - (similar but more general than the one-offs in collections.abc - such as Hashable). - """ - - __slots__ = () - - _is_protocol = True - - -# Various ABCs mimicking those in collections.abc. -# A few are simply re-exported for completeness. - -Hashable = collections_abc.Hashable # Not generic. - - -if hasattr(collections_abc, 'Awaitable'): - class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): - __slots__ = () - - __all__.append('Awaitable') - - -if hasattr(collections_abc, 'Coroutine'): - class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co], - extra=collections_abc.Coroutine): - __slots__ = () - - __all__.append('Coroutine') - - -if hasattr(collections_abc, 'AsyncIterable'): - - class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): - __slots__ = () - - class AsyncIterator(AsyncIterable[T_co], - extra=collections_abc.AsyncIterator): - __slots__ = () - - __all__.append('AsyncIterable') - __all__.append('AsyncIterator') - - -class Iterable(Generic[T_co], extra=collections_abc.Iterable): - __slots__ = () - - -class Iterator(Iterable[T_co], extra=collections_abc.Iterator): - __slots__ = () - - -class SupportsInt(_Protocol): - __slots__ = () - - @abstractmethod - def __int__(self) -> int: - pass - - -class SupportsFloat(_Protocol): - __slots__ = () - - @abstractmethod - def __float__(self) -> float: - pass - - -class SupportsComplex(_Protocol): - __slots__ = () - - @abstractmethod - def __complex__(self) -> complex: - pass - - -class SupportsBytes(_Protocol): - __slots__ = () - - @abstractmethod - def __bytes__(self) -> bytes: - pass - - -class SupportsAbs(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __abs__(self) -> T_co: - pass - - -class SupportsRound(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __round__(self, ndigits: int = 0) -> T_co: - pass - - -if hasattr(collections_abc, 'Reversible'): - class Reversible(Iterable[T_co], extra=collections_abc.Reversible): - __slots__ = () -else: - class Reversible(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __reversed__(self) -> 'Iterator[T_co]': - pass - - -Sized = collections_abc.Sized # Not generic. - - -class Container(Generic[T_co], extra=collections_abc.Container): - __slots__ = () - - -if hasattr(collections_abc, 'Collection'): - class Collection(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Collection): - __slots__ = () - - __all__.append('Collection') - - -# Callable was defined earlier. - -if hasattr(collections_abc, 'Collection'): - class AbstractSet(Collection[T_co], - extra=collections_abc.Set): - __slots__ = () -else: - class AbstractSet(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Set): - __slots__ = () - - -class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): - __slots__ = () - - -# NOTE: It is only covariant in the value type. -if hasattr(collections_abc, 'Collection'): - class Mapping(Collection[KT], Generic[KT, VT_co], - extra=collections_abc.Mapping): - __slots__ = () -else: - class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], - extra=collections_abc.Mapping): - __slots__ = () - - -class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): - __slots__ = () - - -if hasattr(collections_abc, 'Reversible'): - if hasattr(collections_abc, 'Collection'): - class Sequence(Reversible[T_co], Collection[T_co], - extra=collections_abc.Sequence): - __slots__ = () - else: - class Sequence(Sized, Reversible[T_co], Container[T_co], - extra=collections_abc.Sequence): - __slots__ = () -else: - class Sequence(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Sequence): - __slots__ = () - - -class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): - __slots__ = () - - -class ByteString(Sequence[int], extra=collections_abc.ByteString): - __slots__ = () - - -class List(list, MutableSequence[T], extra=list): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, List): - raise TypeError("Type List cannot be instantiated; " - "use list() instead") - return _generic_new(list, cls, *args, **kwds) - - -class Deque(collections.deque, MutableSequence[T], extra=collections.deque): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Deque): - return collections.deque(*args, **kwds) - return _generic_new(collections.deque, cls, *args, **kwds) - - -class Set(set, MutableSet[T], extra=set): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Set): - raise TypeError("Type Set cannot be instantiated; " - "use set() instead") - return _generic_new(set, cls, *args, **kwds) - - -class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, FrozenSet): - raise TypeError("Type FrozenSet cannot be instantiated; " - "use frozenset() instead") - return _generic_new(frozenset, cls, *args, **kwds) - - -class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): - __slots__ = () - - -class KeysView(MappingView[KT], AbstractSet[KT], - extra=collections_abc.KeysView): - __slots__ = () - - -class ItemsView(MappingView[Tuple[KT, VT_co]], - AbstractSet[Tuple[KT, VT_co]], - Generic[KT, VT_co], - extra=collections_abc.ItemsView): - __slots__ = () - - -class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): - __slots__ = () - - -if hasattr(contextlib, 'AbstractContextManager'): - class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): - __slots__ = () - __all__.append('ContextManager') - - -class Dict(dict, MutableMapping[KT, VT], extra=dict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Dict): - raise TypeError("Type Dict cannot be instantiated; " - "use dict() instead") - return _generic_new(dict, cls, *args, **kwds) - - -class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], - extra=collections.defaultdict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, DefaultDict): - return collections.defaultdict(*args, **kwds) - return _generic_new(collections.defaultdict, cls, *args, **kwds) - - -class Counter(collections.Counter, Dict[T, int], extra=collections.Counter): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Counter): - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - - -if hasattr(collections, 'ChainMap'): - # ChainMap only exists in 3.3+ - __all__.append('ChainMap') - - class ChainMap(collections.ChainMap, MutableMapping[KT, VT], - extra=collections.ChainMap): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, ChainMap): - return collections.ChainMap(*args, **kwds) - return _generic_new(collections.ChainMap, cls, *args, **kwds) - - -# Determine what base class to use for Generator. -if hasattr(collections_abc, 'Generator'): - # Sufficiently recent versions of 3.5 have a Generator ABC. - _G_base = collections_abc.Generator -else: - # Fall back on the exact type. - _G_base = types.GeneratorType - - -class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], - extra=_G_base): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Generator): - raise TypeError("Type Generator cannot be instantiated; " - "create a subclass instead") - return _generic_new(_G_base, cls, *args, **kwds) - - -if hasattr(collections_abc, 'AsyncGenerator'): - class AsyncGenerator(AsyncIterator[T_co], Generic[T_co, T_contra], - extra=collections_abc.AsyncGenerator): - __slots__ = () - - __all__.append('AsyncGenerator') - - -# Internal type variable used for Type[]. -CT_co = TypeVar('CT_co', covariant=True, bound=type) - - -# This is not a real generic class. Don't use outside annotations. -class Type(Generic[CT_co], extra=type): - """A special construct usable to annotate class objects. - - For example, suppose we have the following classes:: - - class User: ... # Abstract base for User classes - class BasicUser(User): ... - class ProUser(User): ... - class TeamUser(User): ... - - And a function that takes a class argument that's a subclass of - User and returns an instance of the corresponding class:: - - U = TypeVar('U', bound=User) - def new_user(user_class: Type[U]) -> U: - user = user_class() - # (Here we could write the user object to a database) - return user - - joe = new_user(BasicUser) - - At this point the type checker knows that joe has type BasicUser. - """ - - __slots__ = () - - -def _make_nmtuple(name, types): - msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" - types = [(n, _type_check(t, msg)) for n, t in types] - nm_tpl = collections.namedtuple(name, [n for n, t in types]) - # Prior to PEP 526, only _field_types attribute was assigned. - # Now, both __annotations__ and _field_types are used to maintain compatibility. - nm_tpl.__annotations__ = nm_tpl._field_types = collections.OrderedDict(types) - try: - nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - return nm_tpl - - -_PY36 = sys.version_info[:2] >= (3, 6) - -# attributes prohibited to set in NamedTuple class syntax -_prohibited = ('__new__', '__init__', '__slots__', '__getnewargs__', - '_fields', '_field_defaults', '_field_types', - '_make', '_replace', '_asdict') - -_special = ('__module__', '__name__', '__qualname__', '__annotations__') - - -class NamedTupleMeta(type): - - def __new__(cls, typename, bases, ns): - if ns.get('_root', False): - return super().__new__(cls, typename, bases, ns) - if not _PY36: - raise TypeError("Class syntax for NamedTuple is only supported" - " in Python 3.6+") - types = ns.get('__annotations__', {}) - nm_tpl = _make_nmtuple(typename, types.items()) - defaults = [] - defaults_dict = {} - for field_name in types: - if field_name in ns: - default_value = ns[field_name] - defaults.append(default_value) - defaults_dict[field_name] = default_value - elif defaults: - raise TypeError("Non-default namedtuple field {field_name} cannot " - "follow default field(s) {default_names}" - .format(field_name=field_name, - default_names=', '.join(defaults_dict.keys()))) - nm_tpl.__new__.__defaults__ = tuple(defaults) - nm_tpl._field_defaults = defaults_dict - # update from user namespace without overriding special namedtuple attributes - for key in ns: - if key in _prohibited: - raise AttributeError("Cannot overwrite NamedTuple attribute " + key) - elif key not in _special and key not in nm_tpl._fields: - setattr(nm_tpl, key, ns[key]) - return nm_tpl - - -class NamedTuple(metaclass=NamedTupleMeta): - """Typed version of namedtuple. - - Usage in Python versions >= 3.6:: - - class Employee(NamedTuple): - name: str - id: int - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has extra __annotations__ and _field_types - attributes, giving an ordered dict mapping field names to types. - __annotations__ should be preferred, while _field_types - is kept to maintain pre PEP 526 compatibility. (The field names - are in the _fields attribute, which is part of the namedtuple - API.) Alternative equivalent keyword syntax is also accepted:: - - Employee = NamedTuple('Employee', name=str, id=int) - - In Python versions <= 3.5 use:: - - Employee = NamedTuple('Employee', [('name', str), ('id', int)]) - """ - _root = True - - def __new__(self, typename, fields=None, **kwargs): - if kwargs and not _PY36: - raise TypeError("Keyword syntax for NamedTuple is only supported" - " in Python 3.6+") - if fields is None: - fields = kwargs.items() - elif kwargs: - raise TypeError("Either list of fields or keywords" - " can be provided to NamedTuple, not both") - return _make_nmtuple(typename, fields) - - -def NewType(name, tp): - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy function that simply returns its argument. Usage:: - - UserId = NewType('UserId', int) - - def name_by_id(user_id: UserId) -> str: - ... - - UserId('user') # Fails type check - - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - - num = UserId(5) + 1 # type: int - """ - - def new_type(x): - return x - - new_type.__name__ = name - new_type.__supertype__ = tp - return new_type - - -# Python-version-specific alias (Python 2: unicode; Python 3: str) -Text = str - - -# Constant that's True when type checking, but False here. -TYPE_CHECKING = False - - -class IO(Generic[AnyStr]): - """Generic base class for TextIO and BinaryIO. - - This is an abstract, generic version of the return of open(). - - NOTE: This does not distinguish between the different possible - classes (text vs. binary, read vs. write vs. read/write, - append-only, unbuffered). The TextIO and BinaryIO subclasses - below capture the distinctions between text vs. binary, which is - pervasive in the interface; however we currently do not offer a - way to track the other distinctions in the type system. - """ - - __slots__ = () - - @abstractproperty - def mode(self) -> str: - pass - - @abstractproperty - def name(self) -> str: - pass - - @abstractmethod - def close(self) -> None: - pass - - @abstractmethod - def closed(self) -> bool: - pass - - @abstractmethod - def fileno(self) -> int: - pass - - @abstractmethod - def flush(self) -> None: - pass - - @abstractmethod - def isatty(self) -> bool: - pass - - @abstractmethod - def read(self, n: int = -1) -> AnyStr: - pass - - @abstractmethod - def readable(self) -> bool: - pass - - @abstractmethod - def readline(self, limit: int = -1) -> AnyStr: - pass - - @abstractmethod - def readlines(self, hint: int = -1) -> List[AnyStr]: - pass - - @abstractmethod - def seek(self, offset: int, whence: int = 0) -> int: - pass - - @abstractmethod - def seekable(self) -> bool: - pass - - @abstractmethod - def tell(self) -> int: - pass - - @abstractmethod - def truncate(self, size: int = None) -> int: - pass - - @abstractmethod - def writable(self) -> bool: - pass - - @abstractmethod - def write(self, s: AnyStr) -> int: - pass - - @abstractmethod - def writelines(self, lines: List[AnyStr]) -> None: - pass - - @abstractmethod - def __enter__(self) -> 'IO[AnyStr]': - pass - - @abstractmethod - def __exit__(self, type, value, traceback) -> None: - pass - - -class BinaryIO(IO[bytes]): - """Typed version of the return of open() in binary mode.""" - - __slots__ = () - - @abstractmethod - def write(self, s: Union[bytes, bytearray]) -> int: - pass - - @abstractmethod - def __enter__(self) -> 'BinaryIO': - pass - - -class TextIO(IO[str]): - """Typed version of the return of open() in text mode.""" - - __slots__ = () - - @abstractproperty - def buffer(self) -> BinaryIO: - pass - - @abstractproperty - def encoding(self) -> str: - pass - - @abstractproperty - def errors(self) -> Optional[str]: - pass - - @abstractproperty - def line_buffering(self) -> bool: - pass - - @abstractproperty - def newlines(self) -> Any: - pass - - @abstractmethod - def __enter__(self) -> 'TextIO': - pass - - -class io: - """Wrapper namespace for IO generic classes.""" - - __all__ = ['IO', 'TextIO', 'BinaryIO'] - IO = IO - TextIO = TextIO - BinaryIO = BinaryIO - - -io.__name__ = __name__ + '.io' -sys.modules[io.__name__] = io - - -Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), - lambda p: p.pattern) -Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), - lambda m: m.re.pattern) - - -class re: - """Wrapper namespace for re type aliases.""" - - __all__ = ['Pattern', 'Match'] - Pattern = Pattern - Match = Match - - -re.__name__ = __name__ + '.re' -sys.modules[re.__name__] = re From 292deb91f4c4158c2eaaca3419a76617675689c2 Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 7 Jul 2017 11:39:11 -0700 Subject: [PATCH 10/23] Address parts of code review --- .travis.yml | 4 +- test-requirements.txt | 1 + tox.ini | 3 - typing_extensions/setup.py | 19 +-- .../src_py2/test_typing_extensions.py | 160 +++++++++++++++--- .../src_py3/test_typing_extensions.py | 10 +- .../src_py3/typing_extensions.py | 5 +- 7 files changed, 153 insertions(+), 49 deletions(-) diff --git a/.travis.yml b/.travis.yml index aae74db4..0c5b465b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,8 +18,8 @@ install: script: - export PYTHONPATH=`python -c "import sys; print('python2' if sys.version.startswith('2') else 'src')"`; py.test $PYTHONPATH - - python setup.py install; - export PYTHONPATH=`python -c "import sys; print('typing_extensions/src_py2' if sys.version.startswith('2') else 'typing_extensions/src_py3')"`; + - if [[ $TRAVIS_PYTHON_VERSION < '3.5' ]]; then python setup.py install; fi + - export PYTHONPATH=`python -c "import sys; print('typing_extensions/src_py2' if sys.version.startswith('2') else 'typing_extensions/src_py3')"`; py.test $PYTHONPATH; - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8; fi - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then flake8 --config=.flake8-tests src/test_typing.py python2/test_typing.py typing_extensions/src_py2/test_typing_extensions.py typing_extensions/src_py3/test_typing_extensions.py; fi diff --git a/test-requirements.txt b/test-requirements.txt index f4566b45..9524d619 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -4,3 +4,4 @@ flake8-pyi; python_version >= '3.6' pytest>=2.8; python_version >= '3.3' pytest-xdist>=1.13; python_version >= '3.3' pytest-cov>=2.4.0; python_version >= '3.3' +typing>=3.6.1; python_version <= '3.4' diff --git a/tox.ini b/tox.ini index 4b029a40..5f92822c 100644 --- a/tox.ini +++ b/tox.ini @@ -25,6 +25,3 @@ exclude = src/test_typing.py, typing_extensions/src_py2/test_typing_extensions.py, typing_extensions/src_py3/test_typing_extensions.py, - # test_data should contain an exact snapshot of the standard - # library and so should be exempt from checks - typing_extensions/test_data, diff --git a/typing_extensions/setup.py b/typing_extensions/setup.py index d8fe490e..beeb187b 100644 --- a/typing_extensions/setup.py +++ b/typing_extensions/setup.py @@ -14,22 +14,22 @@ long_description = '''\ Typing -- Type Hints for Python -This is a backport of the standard library typing module to Python -versions 3.5+. The typing module has seen several changes since it was -first added in Python 3.5.0, which means people who are using 3.5.0+ -but are unable to upgrade to the latest version of Python are unable -to take advantage of some new features of the typing library, such as -typing.Type or typing.Coroutine. +This is a backport of the 'typing' module, which was provisionally added +to the standard library in Python 3.5. The typing module has seen +several changes since it was first added in Python 3.5.0, which means +people who are using 3.5+ but are unable to upgrade to the latest +version of Python cannot take advantage of some new features of the +typing library, such as typing.Type or typing.Coroutine. This module allows those users to use the latest additions to the typing module without worrying about naming conflicts with the standard library. Users of Python 2.7, 3.3, and 3.4 should install the typing module -from pypi and use that directly, except when writing code that needs to +from PyPi and use that directly, except when writing code that needs to be compatible across multiple versions of Python. ''' classifiers = [ - 'Development Status :: 5 - Production/Stable', + 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: Python Software Foundation License', @@ -59,8 +59,7 @@ long_description=long_description, author='Guido van Rossum, Jukka Lehtosalo, Lukasz Langa, Michael Lee', author_email='jukka.lehtosalo@iki.fi', - # TODO: Change URL - url='https://github.com/michael0x2a/typing_extensions', + url='https://github.com/python/typing', license='PSF', keywords='typing function annotations type hints hinting checking ' 'checker typehints typehinting typechecking backport', diff --git a/typing_extensions/src_py2/test_typing_extensions.py b/typing_extensions/src_py2/test_typing_extensions.py index c9f1be2a..8fc5dd84 100644 --- a/typing_extensions/src_py2/test_typing_extensions.py +++ b/typing_extensions/src_py2/test_typing_extensions.py @@ -5,12 +5,15 @@ import collections from unittest import TestCase, main, skipUnless -from typing_extensions import NoReturn +from typing_extensions import NoReturn, ClassVar +from typing_extensions import Collection, ContextManager, Counter, Deque, DefaultDict +from typing_extensions import NewType, overload import typing import typing_extensions class BaseTestCase(TestCase): + def assertIsSubclass(self, cls, class_or_tuple, msg=None): if not issubclass(cls, class_or_tuple): message = '%r is not a subclass of %r' % (cls, class_or_tuple) @@ -25,10 +28,6 @@ def assertNotIsSubclass(self, cls, class_or_tuple, msg=None): message += ' : %s' % msg raise self.failureException(message) - def clear_caches(self): - for f in typing._cleanups: - f() - class Employee: pass @@ -71,28 +70,64 @@ def test_cannot_instantiate(self): type(NoReturn)() +class ClassVarTests(BaseTestCase): + + def test_basics(self): + with self.assertRaises(TypeError): + ClassVar[1] + with self.assertRaises(TypeError): + ClassVar[int, str] + with self.assertRaises(TypeError): + ClassVar[int][str] + + def test_repr(self): + self.assertEqual(repr(ClassVar), 'typing.ClassVar') + cv = ClassVar[int] + self.assertEqual(repr(cv), 'typing.ClassVar[int]') + cv = ClassVar[Employee] + self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__) + + def test_cannot_subclass(self): + with self.assertRaises(TypeError): + class C(type(ClassVar)): + pass + with self.assertRaises(TypeError): + class C(type(ClassVar[int])): + pass + + def test_cannot_init(self): + with self.assertRaises(TypeError): + ClassVar() + with self.assertRaises(TypeError): + type(ClassVar)() + with self.assertRaises(TypeError): + type(ClassVar[Optional[int]])() + + def test_no_isinstance(self): + with self.assertRaises(TypeError): + isinstance(1, ClassVar[int]) + with self.assertRaises(TypeError): + issubclass(int, ClassVar) + + class CollectionsAbcTests(BaseTestCase): def test_collection(self): - self.assertIsInstance(tuple(), typing_extensions.Collection) - self.assertIsInstance(frozenset(), typing_extensions.Collection) - self.assertIsSubclass(dict, typing_extensions.Collection) - self.assertNotIsInstance(42, typing_extensions.Collection) + self.assertIsInstance(tuple(), Collection) + self.assertIsInstance(frozenset(), Collection) + self.assertIsSubclass(dict, Collection) + self.assertNotIsInstance(42, Collection) def test_collection_instantiation(self): - class MyAbstractCollection(typing_extensions.Collection[int]): + class MyAbstractCollection(Collection[int]): pass - class MyCollection(typing_extensions.Collection[int]): + class MyCollection(Collection[int]): def __contains__(self, item): pass def __iter__(self): pass def __len__(self): pass - self.assertIsSubclass( - type(MyCollection()), - typing_extensions.Collection) - self.assertIsSubclass( - MyCollection, - typing_extensions.Collection) + self.assertIsSubclass(type(MyCollection()), Collection) + self.assertIsSubclass(MyCollection, Collection) with self.assertRaises(TypeError): MyAbstractCollection() @@ -102,11 +137,95 @@ def manager(): yield 42 cm = manager() - self.assertIsInstance(cm, typing_extensions.ContextManager) - self.assertNotIsInstance(42, typing_extensions.ContextManager) + self.assertIsInstance(cm, ContextManager) + self.assertNotIsInstance(42, ContextManager) + + with self.assertRaises(TypeError): + isinstance(42, ContextManager[int]) + with self.assertRaises(TypeError): + isinstance(cm, ContextManager[int]) + with self.assertRaises(TypeError): + issubclass(type(cm), ContextManager[int]) + + def test_counter(self): + self.assertIsSubclass(collections.Counter, Counter) + self.assertIs(type(Counter()), collections.Counter) + self.assertIs(type(Counter[T]()), collections.Counter) + self.assertIs(type(Counter[int]()), collections.Counter) + + class A(Counter[int]): pass + class B(Counter[T]): pass + + self.assertIsInstance(A(), collections.deque) + self.assertIs(type(B[int]()), GenericSubclass) + + def test_deque(self): + self.assertIsSubclass(collections.deque, Deque) + self.assertIs(type(Deque()), collections.deque) + self.assertIs(type(Deque[T]()), collections.deque) + self.assertIs(type(Deque[int]()), collections.deque) + + class A(Deque[int]): pass + class B(Deque[T]): pass + + self.assertIsInstance(A(), collections.deque) + self.assertIs(type(B[int]()), GenericSubclass) + + def test_defaultdict_instantiation(self): + self.assertIsSubclass(collections.defaultdict, DefaultDict) + self.assertIs(type(DefaultDict()), collections.defaultdict) + self.assertIs(type(DefaultDict[KT, VT]()), collections.defaultdict) + self.assertIs(type(DefaultDict[str, int]()), collections.defaultdict) + + class A(DefaultDict[str, int]): pass + class B(DefaultDict[KT, VT])): pass + + self.assertIsInstance(A(), collections.defaultdict) + self.assertIs(type(B[str, int]()), collections.defaultdict) + + +class NewTypeTests(BaseTestCase): + + def test_basic(self): + UserId = NewType('UserId', int) + UserName = NewType('UserName', str) + self.assertIsInstance(UserId(5), int) + self.assertIsInstance(UserName('Joe'), type('Joe')) + self.assertEqual(UserId(5) + 1, 6) + + def test_errors(self): + UserId = NewType('UserId', int) + UserName = NewType('UserName', str) + with self.assertRaises(TypeError): + issubclass(UserId, int) + with self.assertRaises(TypeError): + class D(UserName): + pass + + +class OverloadTests(BaseTestCase): + + def test_overload_fails(self): + with self.assertRaises(RuntimeError): + @overload + def blah(): + pass + + blah() + + def test_overload_succeeds(self): + @overload + def blah(): + pass + + def blah(): + pass + + blah() class AllTests(BaseTestCase): + def test_typing_extensions_includes_standard(self): a = typing_extensions.__all__ self.assertIn('ClassVar', a) @@ -118,7 +237,6 @@ def test_typing_extensions_includes_standard(self): self.assertIn('overload', a) self.assertIn('Text', a) self.assertIn('TYPE_CHECKING', a) - self.assertIn('Collection', a) def test_typing_extensions_defers_when_possible(self): @@ -131,4 +249,4 @@ def test_typing_extensions_defers_when_possible(self): if __name__ == '__main__': - main(argv=[sys.argv[0]] + sys.argv[2:]) + main() diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index f0588fe9..56afaedf 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -13,10 +13,7 @@ from typing_extensions import NoReturn, ClassVar, Type, NewType import typing import typing_extensions -try: - import collections.abc as collections_abc -except ImportError: - import collections as collections_abc # Fallback for PY3.2. +import collections.abc as collections_abc TYPING_V2 = sys.version_info >= (3, 5, 1) TYPING_V3 = sys.version_info >= (3, 5, 3) @@ -38,10 +35,6 @@ def assertNotIsSubclass(self, cls, class_or_tuple, msg=None): message += ' : %s' % msg raise self.failureException(message) - def clear_caches(self): - for f in typing._cleanups: - f() - class Employee: pass @@ -632,5 +625,4 @@ def test_typing_extensions_defers_when_possible(self): if __name__ == '__main__': - # main(argv=[sys.argv[0]] + sys.argv[2:]) main() diff --git a/typing_extensions/src_py3/typing_extensions.py b/typing_extensions/src_py3/typing_extensions.py index 32f6fc9f..3d077202 100644 --- a/typing_extensions/src_py3/typing_extensions.py +++ b/typing_extensions/src_py3/typing_extensions.py @@ -3,10 +3,7 @@ import contextlib import sys import typing -try: - import collections.abc as collections_abc -except ImportError: - import collections as collections_abc # Fallback for PY3.2. +import collections.abc as collections_abc if hasattr(typing, '_generic_new'): _generic_new = typing._generic_new From 11e98eeda279fa794a0407639c36f7b8717db4cd Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 7 Jul 2017 12:01:39 -0700 Subject: [PATCH 11/23] Fix tests; tweak setup.py text --- typing_extensions/setup.py | 9 ++++----- typing_extensions/src_py2/test_typing_extensions.py | 2 +- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/typing_extensions/setup.py b/typing_extensions/setup.py index beeb187b..3834c4a7 100644 --- a/typing_extensions/setup.py +++ b/typing_extensions/setup.py @@ -15,11 +15,10 @@ Typing -- Type Hints for Python This is a backport of the 'typing' module, which was provisionally added -to the standard library in Python 3.5. The typing module has seen -several changes since it was first added in Python 3.5.0, which means -people who are using 3.5+ but are unable to upgrade to the latest -version of Python cannot take advantage of some new features of the -typing library, such as typing.Type or typing.Coroutine. +to the standard library in Python 3.5. The provisional status will be +liftend in Python 3.7, but people who are using Python 3.5 - 3.6 and are +unable to upgrade cannot take advantage of some new additions to the typing +library, such as typing.Text or typing.Coroutine. This module allows those users to use the latest additions to the typing module without worrying about naming conflicts with the standard library. diff --git a/typing_extensions/src_py2/test_typing_extensions.py b/typing_extensions/src_py2/test_typing_extensions.py index 8fc5dd84..efc39cbe 100644 --- a/typing_extensions/src_py2/test_typing_extensions.py +++ b/typing_extensions/src_py2/test_typing_extensions.py @@ -178,7 +178,7 @@ def test_defaultdict_instantiation(self): self.assertIs(type(DefaultDict[str, int]()), collections.defaultdict) class A(DefaultDict[str, int]): pass - class B(DefaultDict[KT, VT])): pass + class B(DefaultDict[KT, VT]): pass self.assertIsInstance(A(), collections.defaultdict) self.assertIs(type(B[str, int]()), collections.defaultdict) From 9cc623688a193eb40fc1beab80607d48a7a7ce5a Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 7 Jul 2017 12:20:17 -0700 Subject: [PATCH 12/23] Add missing definitions in Python 2 tests --- .../src_py2/test_typing_extensions.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/typing_extensions/src_py2/test_typing_extensions.py b/typing_extensions/src_py2/test_typing_extensions.py index efc39cbe..b1226610 100644 --- a/typing_extensions/src_py2/test_typing_extensions.py +++ b/typing_extensions/src_py2/test_typing_extensions.py @@ -81,11 +81,15 @@ def test_basics(self): ClassVar[int][str] def test_repr(self): - self.assertEqual(repr(ClassVar), 'typing.ClassVar') + if hasattr(typing, 'ClassVar'): + mod_name = 'typing' + else: + mod_name = 'typing_extensions' + self.assertEqual(repr(ClassVar), mod_name + '.ClassVar') cv = ClassVar[int] - self.assertEqual(repr(cv), 'typing.ClassVar[int]') + self.assertEqual(repr(cv), mod_name + '.ClassVar[int]') cv = ClassVar[Employee] - self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__) + self.assertEqual(repr(cv), mod_name + '.ClassVar[%s.Employee]' % __name__) def test_cannot_subclass(self): with self.assertRaises(TypeError): @@ -110,6 +114,11 @@ def test_no_isinstance(self): issubclass(int, ClassVar) +T = typing.TypeVar('T') +KT = typing.TypeVar('KT') +VT = typing.TypeVar('VT') + + class CollectionsAbcTests(BaseTestCase): def test_collection(self): From ea77bf90c091209f03c14055d2b88aacee061fb1 Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 7 Jul 2017 12:33:23 -0700 Subject: [PATCH 13/23] A little closer --- .../src_py2/test_typing_extensions.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/typing_extensions/src_py2/test_typing_extensions.py b/typing_extensions/src_py2/test_typing_extensions.py index b1226610..f37799b4 100644 --- a/typing_extensions/src_py2/test_typing_extensions.py +++ b/typing_extensions/src_py2/test_typing_extensions.py @@ -12,6 +12,11 @@ import typing_extensions +T = typing.TypeVar('T') +KT = typing.TypeVar('KT') +VT = typing.TypeVar('VT') + + class BaseTestCase(TestCase): def assertIsSubclass(self, cls, class_or_tuple, msg=None): @@ -105,7 +110,7 @@ def test_cannot_init(self): with self.assertRaises(TypeError): type(ClassVar)() with self.assertRaises(TypeError): - type(ClassVar[Optional[int]])() + type(ClassVar[typing.Optional[int]])() def test_no_isinstance(self): with self.assertRaises(TypeError): @@ -114,11 +119,6 @@ def test_no_isinstance(self): issubclass(int, ClassVar) -T = typing.TypeVar('T') -KT = typing.TypeVar('KT') -VT = typing.TypeVar('VT') - - class CollectionsAbcTests(BaseTestCase): def test_collection(self): @@ -165,8 +165,8 @@ def test_counter(self): class A(Counter[int]): pass class B(Counter[T]): pass - self.assertIsInstance(A(), collections.deque) - self.assertIs(type(B[int]()), GenericSubclass) + self.assertIsInstance(A(), collections.Counter) + self.assertIs(type(B[int]()), B) def test_deque(self): self.assertIsSubclass(collections.deque, Deque) @@ -178,7 +178,7 @@ class A(Deque[int]): pass class B(Deque[T]): pass self.assertIsInstance(A(), collections.deque) - self.assertIs(type(B[int]()), GenericSubclass) + self.assertIs(type(B[int]()), B) def test_defaultdict_instantiation(self): self.assertIsSubclass(collections.defaultdict, DefaultDict) @@ -190,7 +190,7 @@ class A(DefaultDict[str, int]): pass class B(DefaultDict[KT, VT]): pass self.assertIsInstance(A(), collections.defaultdict) - self.assertIs(type(B[str, int]()), collections.defaultdict) + self.assertIs(type(B[str, int]()), B) class NewTypeTests(BaseTestCase): From 5e243edcd4ba3e55690abb82141ec2c9f9c8e3fc Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 7 Jul 2017 13:02:25 -0700 Subject: [PATCH 14/23] Whoops, forgot old-style classes were a thing --- typing_extensions/src_py2/test_typing_extensions.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/typing_extensions/src_py2/test_typing_extensions.py b/typing_extensions/src_py2/test_typing_extensions.py index f37799b4..c8b3ed59 100644 --- a/typing_extensions/src_py2/test_typing_extensions.py +++ b/typing_extensions/src_py2/test_typing_extensions.py @@ -34,7 +34,7 @@ def assertNotIsSubclass(self, cls, class_or_tuple, msg=None): raise self.failureException(message) -class Employee: +class Employee(object): pass @@ -86,15 +86,11 @@ def test_basics(self): ClassVar[int][str] def test_repr(self): - if hasattr(typing, 'ClassVar'): - mod_name = 'typing' - else: - mod_name = 'typing_extensions' - self.assertEqual(repr(ClassVar), mod_name + '.ClassVar') + self.assertEqual(repr(ClassVar), 'typing.ClassVar') cv = ClassVar[int] - self.assertEqual(repr(cv), mod_name + '.ClassVar[int]') + self.assertEqual(repr(cv), 'typing.ClassVar[int]') cv = ClassVar[Employee] - self.assertEqual(repr(cv), mod_name + '.ClassVar[%s.Employee]' % __name__) + self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__) def test_cannot_subclass(self): with self.assertRaises(TypeError): From eac513cde2831255c5b99c82f168133369026224 Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 7 Jul 2017 13:21:50 -0700 Subject: [PATCH 15/23] Add check to see if cls.__bases__ is erased --- typing_extensions/src_py3/test_typing_extensions.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index 56afaedf..10d1b501 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -442,6 +442,8 @@ class C(typing_extensions.Counter[T]): ... if TYPING_V3: self.assertIs(type(C[int]()), C) + self.assertEqual(C.__bases__, (typing.Counter,)) + def test_counter_subclass_instantiation(self): class MyCounter(typing_extensions.Counter[int]): From ea52ad82b8417167bb03c834eb0c849051b08ae8 Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 7 Jul 2017 13:28:42 -0700 Subject: [PATCH 16/23] Fix import --- typing_extensions/README.md | 2 +- typing_extensions/src_py3/test_typing_extensions.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/typing_extensions/README.md b/typing_extensions/README.md index 0360fe1a..ee61568a 100644 --- a/typing_extensions/README.md +++ b/typing_extensions/README.md @@ -1,7 +1,7 @@ # Typing Extensions The `typing_extensions` module contains backports of recent changes -to the `typing` module that were not present in older versions of +to the `typing` module that are not present in older versions of `typing`. This module is intended to be used mainly by people who are using diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index 10d1b501..cc2927c3 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -442,7 +442,7 @@ class C(typing_extensions.Counter[T]): ... if TYPING_V3: self.assertIs(type(C[int]()), C) - self.assertEqual(C.__bases__, (typing.Counter,)) + self.assertEqual(C.__bases__, (typing_extensions.Counter,)) def test_counter_subclass_instantiation(self): From dd9f31cd16230b7b8f4709bc45da41ef09ec5abc Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 7 Jul 2017 14:33:48 -0700 Subject: [PATCH 17/23] Consolidate and document version checks in py3 tests --- .../src_py3/test_typing_extensions.py | 41 +++++++++++++------ 1 file changed, 28 insertions(+), 13 deletions(-) diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index cc2927c3..6ef52c25 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -15,9 +15,29 @@ import typing_extensions import collections.abc as collections_abc -TYPING_V2 = sys.version_info >= (3, 5, 1) -TYPING_V3 = sys.version_info >= (3, 5, 3) -TYPING_V4 = sys.version_info >= (3, 6, 1) +TYPING_LATEST = sys.version_info[:3] < (3, 5, 0) +TYPING_V2 = TYPING_LATEST or sys.version_info[:3] >= (3, 5, 1) +TYPING_V3 = TYPING_LATEST or sys.version_info[:3] >= (3, 5, 3) +TYPING_V4 = TYPING_LATEST or sys.version_info[:3] >= (3, 6, 1) + +# For typing versions where issubclass(...) and +# isinstance(...) checks are forbidden. +# +# See https://github.com/python/typing/issues/136 +# and https://github.com/python/typing/pull/283 +SUBCLASS_CHECK_FORBIDDEN = TYPING_V3 + +# For typing versions where instantiating collection +# types are allowed. +# +# See https://github.com/python/typing/issues/367 +CAN_INSTANTIATE_COLLECTIONS = TYPING_V4 + +# For Python versions supporting async/await and friends. +ASYNCIO = sys.version_info[:3] >= (3, 5, 0) + +# For checks reliant on Python 3.6 syntax changes (e.g. classvar) +PY36 = sys.version_info[:3] >= (3, 6, 0) class BaseTestCase(TestCase): @@ -50,7 +70,7 @@ def test_noreturn_subclass_type_error_1(self): with self.assertRaises(TypeError): issubclass(Employee, NoReturn) - @skipUnless(TYPING_V3, "Behavior added in typing v3") + @skipUnless(SUBCLASS_CHECK_FORBIDDEN, "Behavior added in typing v3") def test_noreturn_subclass_type_error_2(self): with self.assertRaises(TypeError): issubclass(NoReturn, Employee) @@ -69,7 +89,7 @@ def test_cannot_subclass(self): with self.assertRaises(TypeError): class A(NoReturn): pass - if TYPING_V3: + if SUBCLASS_CHECK_FORBIDDEN: with self.assertRaises(TypeError): class A(type(NoReturn)): pass @@ -102,7 +122,7 @@ def test_repr(self): cv = ClassVar[Employee] self.assertEqual(repr(cv), mod_name + '.ClassVar[%s.Employee]' % __name__) - @skipUnless(TYPING_V3, "Behavior added in typing v3") + @skipUnless(SUBCLASS_CHECK_FORBIDDEN, "Behavior added in typing v3") def test_cannot_subclass(self): with self.assertRaises(TypeError): class C(type(ClassVar)): @@ -152,8 +172,6 @@ def blah(): blah() -ASYNCIO = sys.version_info[:2] >= (3, 5, 0) - ASYNCIO_TESTS = """ import asyncio from typing import Iterable @@ -203,8 +221,6 @@ async def __aexit__(self, etype, eval, tb): asyncio = None AwaitableWrapper = AsyncIteratorWrapper = ACM = object -PY36 = sys.version_info[:2] >= (3, 6) - PY36_TESTS = """ from test import ann_module, ann_module2, ann_module3 from typing_extensions import AsyncContextManager @@ -382,7 +398,7 @@ class MyDeque(typing_extensions.Deque[int]): ... def test_counter(self): self.assertIsSubclass(collections.Counter, typing_extensions.Counter) - @skipUnless(TYPING_V4, "Behavior added in typing v4") + @skipUnless(CAN_INSTANTIATE_COLLECTIONS, "Behavior added in typing v4") def test_defaultdict_instantiation(self): self.assertIs( type(typing_extensions.DefaultDict()), @@ -441,8 +457,7 @@ def test_counter_instantiation(self): class C(typing_extensions.Counter[T]): ... if TYPING_V3: self.assertIs(type(C[int]()), C) - - self.assertEqual(C.__bases__, (typing_extensions.Counter,)) + self.assertEqual(C.__bases__, (typing_extensions.Counter,)) def test_counter_subclass_instantiation(self): From 31de56114afbdead76d3f18c230d121664afadb0 Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 7 Jul 2017 14:51:25 -0700 Subject: [PATCH 18/23] Test version check pin --- typing_extensions/src_py3/test_typing_extensions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index 6ef52c25..1ee9c3fc 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -34,10 +34,10 @@ CAN_INSTANTIATE_COLLECTIONS = TYPING_V4 # For Python versions supporting async/await and friends. -ASYNCIO = sys.version_info[:3] >= (3, 5, 0) +ASYNCIO = sys.version_info[:2] >= (3, 5) # For checks reliant on Python 3.6 syntax changes (e.g. classvar) -PY36 = sys.version_info[:3] >= (3, 6, 0) +PY36 = sys.version_info[:2] >= (3, 6) class BaseTestCase(TestCase): From 67a20cb54f8b100fba6700a98c986819b6ade3a8 Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 7 Jul 2017 15:01:52 -0700 Subject: [PATCH 19/23] Special-case quirk with AsyncIterator --- .../src_py3/test_typing_extensions.py | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index 1ee9c3fc..5b7c31af 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -16,22 +16,22 @@ import collections.abc as collections_abc TYPING_LATEST = sys.version_info[:3] < (3, 5, 0) -TYPING_V2 = TYPING_LATEST or sys.version_info[:3] >= (3, 5, 1) -TYPING_V3 = TYPING_LATEST or sys.version_info[:3] >= (3, 5, 3) -TYPING_V4 = TYPING_LATEST or sys.version_info[:3] >= (3, 6, 1) +TYPING_3_5_1 = TYPING_LATEST or sys.version_info[:3] >= (3, 5, 1) +TYPING_3_5_3 = TYPING_LATEST or sys.version_info[:3] >= (3, 5, 3) +TYPING_3_6_1 = TYPING_LATEST or sys.version_info[:3] >= (3, 6, 1) # For typing versions where issubclass(...) and # isinstance(...) checks are forbidden. # # See https://github.com/python/typing/issues/136 # and https://github.com/python/typing/pull/283 -SUBCLASS_CHECK_FORBIDDEN = TYPING_V3 +SUBCLASS_CHECK_FORBIDDEN = TYPING_3_5_3 # For typing versions where instantiating collection # types are allowed. # # See https://github.com/python/typing/issues/367 -CAN_INSTANTIATE_COLLECTIONS = TYPING_V4 +CAN_INSTANTIATE_COLLECTIONS = TYPING_3_6_1 # For Python versions supporting async/await and friends. ASYNCIO = sys.version_info[:2] >= (3, 5) @@ -70,7 +70,7 @@ def test_noreturn_subclass_type_error_1(self): with self.assertRaises(TypeError): issubclass(Employee, NoReturn) - @skipUnless(SUBCLASS_CHECK_FORBIDDEN, "Behavior added in typing v3") + @skipUnless(SUBCLASS_CHECK_FORBIDDEN, "Behavior added in typing 3.5.3") def test_noreturn_subclass_type_error_2(self): with self.assertRaises(TypeError): issubclass(NoReturn, Employee) @@ -122,7 +122,7 @@ def test_repr(self): cv = ClassVar[Employee] self.assertEqual(repr(cv), mod_name + '.ClassVar[%s.Employee]' % __name__) - @skipUnless(SUBCLASS_CHECK_FORBIDDEN, "Behavior added in typing v3") + @skipUnless(SUBCLASS_CHECK_FORBIDDEN, "Behavior added in typing 3.5.3") def test_cannot_subclass(self): with self.assertRaises(TypeError): class C(type(ClassVar)): @@ -367,7 +367,8 @@ def test_async_iterable(self): def test_async_iterator(self): base_it = range(10) # type: Iterator[int] it = AsyncIteratorWrapper(base_it) - self.assertIsInstance(it, typing_extensions.AsyncIterator) + if TYPING_3_5_1: + self.assertIsInstance(it, typing_extensions.AsyncIterator) self.assertNotIsInstance(42, typing_extensions.AsyncIterator) def test_collection(self): @@ -376,7 +377,7 @@ def test_collection(self): self.assertIsSubclass(dict, typing_extensions.Collection) self.assertNotIsInstance(42, typing_extensions.Collection) - @skipUnless(TYPING_V2, "Behavior added in typing v2") + @skipUnless(TYPING_3_5_1, "Behavior added in typing 3.5.1+") def test_collection_instantiation(self): class MyCollection(typing_extensions.Collection[int]): def __contains__(self, item): ... @@ -398,7 +399,7 @@ class MyDeque(typing_extensions.Deque[int]): ... def test_counter(self): self.assertIsSubclass(collections.Counter, typing_extensions.Counter) - @skipUnless(CAN_INSTANTIATE_COLLECTIONS, "Behavior added in typing v4") + @skipUnless(CAN_INSTANTIATE_COLLECTIONS, "Behavior added in typing 3.6.1") def test_defaultdict_instantiation(self): self.assertIs( type(typing_extensions.DefaultDict()), @@ -419,7 +420,7 @@ class MyDefDict(typing_extensions.DefaultDict[str, int]): self.assertIsInstance(dd, MyDefDict) self.assertIsSubclass(MyDefDict, collections.defaultdict) - if TYPING_V3: + if TYPING_3_5_3: self.assertNotIsSubclass(collections.defaultdict, MyDefDict) def test_chainmap_instantiation(self): @@ -427,7 +428,7 @@ def test_chainmap_instantiation(self): self.assertIs(type(typing_extensions.ChainMap[KT, VT]()), collections.ChainMap) self.assertIs(type(typing_extensions.ChainMap[str, int]()), collections.ChainMap) class CM(typing_extensions.ChainMap[KT, VT]): ... - if TYPING_V3: + if TYPING_3_5_3: self.assertIs(type(CM[int, str]()), CM) def test_chainmap_subclass(self): @@ -439,7 +440,7 @@ class MyChainMap(typing_extensions.ChainMap[str, int]): self.assertIsInstance(cm, MyChainMap) self.assertIsSubclass(MyChainMap, collections.ChainMap) - if TYPING_V3: + if TYPING_3_5_3: self.assertNotIsSubclass(collections.ChainMap, MyChainMap) def test_deque_instantiation(self): @@ -447,7 +448,7 @@ def test_deque_instantiation(self): self.assertIs(type(typing_extensions.Deque[T]()), collections.deque) self.assertIs(type(typing_extensions.Deque[int]()), collections.deque) class D(typing_extensions.Deque[T]): ... - if TYPING_V3: + if TYPING_3_5_3: self.assertIs(type(D[int]()), D) def test_counter_instantiation(self): @@ -455,7 +456,7 @@ def test_counter_instantiation(self): self.assertIs(type(typing_extensions.Counter[T]()), collections.Counter) self.assertIs(type(typing_extensions.Counter[int]()), collections.Counter) class C(typing_extensions.Counter[T]): ... - if TYPING_V3: + if TYPING_3_5_3: self.assertIs(type(C[int]()), C) self.assertEqual(C.__bases__, (typing_extensions.Counter,)) @@ -467,7 +468,7 @@ class MyCounter(typing_extensions.Counter[int]): d = MyCounter() self.assertIsInstance(d, MyCounter) self.assertIsInstance(d, collections.Counter) - if TYPING_V2: + if TYPING_3_5_1: self.assertIsInstance(d, typing_extensions.Counter) @skipUnless(PY36, 'Python 3.6 required') @@ -536,9 +537,9 @@ def manager(): cm = manager() self.assertNotIsInstance(cm, typing_extensions.AsyncContextManager) - if TYPING_V3: + if TYPING_3_5_3: self.assertEqual(typing_extensions.AsyncContextManager[int].__args__, (int,)) - if TYPING_V4: + if TYPING_3_6_1: with self.assertRaises(TypeError): isinstance(42, typing_extensions.AsyncContextManager[int]) with self.assertRaises(TypeError): From 69872471a961e122806ff2e6c57404cb6fab6aac Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 7 Jul 2017 16:04:59 -0700 Subject: [PATCH 20/23] Convert readme to RST format; adjust text --- typing_extensions/README.md | 66 --------------------------- typing_extensions/README.rst | 86 ++++++++++++++++++++++++++++++++++++ typing_extensions/setup.py | 29 +++++++----- 3 files changed, 103 insertions(+), 78 deletions(-) delete mode 100644 typing_extensions/README.md create mode 100644 typing_extensions/README.rst diff --git a/typing_extensions/README.md b/typing_extensions/README.md deleted file mode 100644 index ee61568a..00000000 --- a/typing_extensions/README.md +++ /dev/null @@ -1,66 +0,0 @@ -# Typing Extensions - -The `typing_extensions` module contains backports of recent changes -to the `typing` module that are not present in older versions of -`typing`. - -This module is intended to be used mainly by people who are using -Python 3.5+, where the `typing` module is a part of the standard -library and cannot be updated to the latest version on PyPi. - -Users of other Python versions should continue to install and use -use the `typing` module from PyPi instead of using this one, unless -they are specifically writing code intended to be compatible with -multiple versions of Python. - -## Backported items - -This module contains the following backported items: - -### All Python versions: - -- `ClassVar` -- `Collection` -- `ContextManager` -- `Counter` -- `DefaultDict` -- `Deque` -- `NewType` -- `NoReturn` -- `overload` (note that older versions of `typing` only let you use `overload` in stubs) -- `Text` -- `Type` -- `TYPE_CHECKING` - -### Python 3.3+ only: - -- `ChainMap` - -### Python 3.5+ only: - -- `AsyncIterable` -- `AsyncIterator` -- `AsyncContextManager` -- `Awaitable` -- `Coroutine` - -### Python 3.6+ only: - -- `AsyncGenerator` - -## Other Notes and Limitations - -There are a few types who's interface was modified between different -versions of typing. For example, `typing.Sequence` was modified to -subclass `typing.Reversible` as of Python 3.5.3. - -These changes are _not_ backported to prevent subtle compatibility -issues when mixing the differing implementations of modified classes. - -## Running tests - -To run tests, navigate into the appropriate source directory and run -`test_typing_extensions.py`. You will also need to install the latest -version of `typing` if you are using a version of Python that does not -include `typing` as a part of the standard library. - diff --git a/typing_extensions/README.rst b/typing_extensions/README.rst new file mode 100644 index 00000000..15bbc3a7 --- /dev/null +++ b/typing_extensions/README.rst @@ -0,0 +1,86 @@ +================= +Typing Extensions +================= + +.. image:: https://badges.gitter.im/python/typing.svg + :alt: Chat at https://gitter.im/python/typing + :target: https://gitter.im/python/typing?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge + +Overview +======== + +The ``typing_extensions`` module contains both backports of changes made to +the ``typing`` module since Python 3.5.0 as well as experimental types that +will be eventually added to the ``typing`` module once stabilized. + +This module is intended to be used by people who: + +1. Are using Python 3.5+ and cannot upgrade to newer versions of Python. + Since the ``typing`` module was (provisionally) added to the Python standard + library in 3.5, users who are unable to upgrade cannot take advantage of + new additions to typing such as ``typing.Text`` or ``typing.Coroutine``. +2. Are interested in using experimental additions to the ``typing`` module. + +Users of other Python versions should continue to install and use +use the ``typing`` module from PyPi instead of using this one unless +specifically writing code that must be compatible with multiple Python +versions or requires experimental types. + +Included items +============== + +This module currently contains the following: + +All Python versions: +-------------------- + +- ``ClassVar`` +- ``Collection`` +- ``ContextManager`` +- ``Counter`` +- ``DefaultDict`` +- ``Deque`` +- ``NewType`` +- ``NoReturn`` +- ``overload`` (note that older versions of ``typing`` only let you use ``overload`` in stubs) +- ``Text`` +- ``Type`` +- ``TYPE_CHECKING`` + +Python 3.3+ only: +----------------- + +- ``ChainMap`` + +Python 3.5+ only: +----------------- + +- ``AsyncIterable`` +- ``AsyncIterator`` +- ``AsyncContextManager`` +- ``Awaitable`` +- ``Coroutine`` + +Python 3.6+ only: +----------------- + +- ``AsyncGenerator`` + +Other Notes and Limitations +=========================== + +There are a few types whose interface was modified between different +versions of typing. For example, ``typing.Sequence`` was modified to +subclass ``typing.Reversible`` as of Python 3.5.3. + +These changes are _not_ backported to prevent subtle compatibility +issues when mixing the differing implementations of modified classes. + +Running tests +============= + +To run tests, navigate into the appropriate source directory and run +``test_typing_extensions.py``. You will also need to install the latest +version of ``typing`` if you are using a version of Python that does not +include ``typing`` as a part of the standard library. + diff --git a/typing_extensions/setup.py b/typing_extensions/setup.py index 3834c4a7..76b68fe0 100644 --- a/typing_extensions/setup.py +++ b/typing_extensions/setup.py @@ -10,21 +10,26 @@ exit(1) version = '3.6.1' -description = 'Type Hint backports for Python 3.5+' +description = 'Backported and Experimental Type Hints for Python 3.5+' long_description = '''\ -Typing -- Type Hints for Python +Typing Extensions -- Backported and Experimental Type Hints for Python -This is a backport of the 'typing' module, which was provisionally added -to the standard library in Python 3.5. The provisional status will be -liftend in Python 3.7, but people who are using Python 3.5 - 3.6 and are -unable to upgrade cannot take advantage of some new additions to the typing -library, such as typing.Text or typing.Coroutine. +This module contains both backports of changes made to the ``typing`` +module since Python 3.5.0 as well as experimental types that will be +eventually added to the ``typing`` module once stabilized. -This module allows those users to use the latest additions to the typing -module without worrying about naming conflicts with the standard library. -Users of Python 2.7, 3.3, and 3.4 should install the typing module -from PyPi and use that directly, except when writing code that needs to -be compatible across multiple versions of Python. +This module is intended to be used by people who: + +1. Are using Python 3.5+ and cannot upgrade to newer versions of Python. + Since the ``typing`` module was (provisionally) added to the Python standard + library in 3.5, users who are unable to upgrade cannot take advantage of + new additions to typing such as ``typing.Text`` or ``typing.Coroutine``. +2. Are interested in using experimental additions to the ``typing`` module. + +Users of other Python versions should continue to install and use +use the ``typing`` module from PyPi instead of using this one unless +specifically writing code that must be compatible with multiple Python +versions or requires experimental types. ''' classifiers = [ From eab214c19bb9f767b5e8e00c40c50eaac81806db Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 7 Jul 2017 21:58:45 -0700 Subject: [PATCH 21/23] Remove Collections backport In retrospect, attempting to backport typing.Collection was a mistake. It's difficult in practice to use it in a consistently meaningful way because the typing module deliberately does not have types like Sequence or List subclass anything resembling the Collections type for older versions of Python. This means that the following code will not typecheck on Python 2 and on older versions of Python 3: from mypy_extensions import Collection def foo(x): # type: Collection[int] -> int return sum(x) foo([1, 2, 3]) ...because `List` is not defined to be a subclass of `Collection` in Python 2 and Python 3.x - 3.5 at runtime and within the typing stubs in typeshed. I think this is potentially repairable with some careful surgery on the typing stubs, but doing that seems highly questionable to me, so I'm thinking attempting to backport `Collection` is out-of-scope for this pull request and should be handled later (or potentially never). --- typing_extensions/README.rst | 1 - .../src_py2/test_typing_extensions.py | 22 +-------------- .../src_py2/typing_extensions.py | 22 --------------- .../src_py3/test_typing_extensions.py | 22 --------------- .../src_py3/typing_extensions.py | 28 ------------------- 5 files changed, 1 insertion(+), 94 deletions(-) diff --git a/typing_extensions/README.rst b/typing_extensions/README.rst index 15bbc3a7..ff89d24e 100644 --- a/typing_extensions/README.rst +++ b/typing_extensions/README.rst @@ -35,7 +35,6 @@ All Python versions: -------------------- - ``ClassVar`` -- ``Collection`` - ``ContextManager`` - ``Counter`` - ``DefaultDict`` diff --git a/typing_extensions/src_py2/test_typing_extensions.py b/typing_extensions/src_py2/test_typing_extensions.py index c8b3ed59..3f799c35 100644 --- a/typing_extensions/src_py2/test_typing_extensions.py +++ b/typing_extensions/src_py2/test_typing_extensions.py @@ -6,7 +6,7 @@ from unittest import TestCase, main, skipUnless from typing_extensions import NoReturn, ClassVar -from typing_extensions import Collection, ContextManager, Counter, Deque, DefaultDict +from typing_extensions import ContextManager, Counter, Deque, DefaultDict from typing_extensions import NewType, overload import typing import typing_extensions @@ -117,25 +117,6 @@ def test_no_isinstance(self): class CollectionsAbcTests(BaseTestCase): - def test_collection(self): - self.assertIsInstance(tuple(), Collection) - self.assertIsInstance(frozenset(), Collection) - self.assertIsSubclass(dict, Collection) - self.assertNotIsInstance(42, Collection) - - def test_collection_instantiation(self): - class MyAbstractCollection(Collection[int]): - pass - class MyCollection(Collection[int]): - def __contains__(self, item): pass - def __iter__(self): pass - def __len__(self): pass - - self.assertIsSubclass(type(MyCollection()), Collection) - self.assertIsSubclass(MyCollection, Collection) - with self.assertRaises(TypeError): - MyAbstractCollection() - def test_contextmanager(self): @contextlib.contextmanager def manager(): @@ -242,7 +223,6 @@ def test_typing_extensions_includes_standard(self): self.assertIn('overload', a) self.assertIn('Text', a) self.assertIn('TYPE_CHECKING', a) - self.assertIn('Collection', a) def test_typing_extensions_defers_when_possible(self): exclude = {'overload', 'Text', 'TYPE_CHECKING'} diff --git a/typing_extensions/src_py2/typing_extensions.py b/typing_extensions/src_py2/typing_extensions.py index 3ba9a651..29858ca5 100644 --- a/typing_extensions/src_py2/typing_extensions.py +++ b/typing_extensions/src_py2/typing_extensions.py @@ -14,7 +14,6 @@ 'Type', # Concrete collection types. - 'Collection', 'ContextManager', 'Counter', 'Deque', @@ -90,24 +89,3 @@ def __subclasshook__(cls, C): return True return NotImplemented - -# Backport collections.Collection -class _CollectionAbc(collections.Sized, - collections.Iterable, - collections.Container): - pass - - -_CollectionAbc.register(list) -_CollectionAbc.register(tuple) -_CollectionAbc.register(set) -_CollectionAbc.register(frozenset) -_CollectionAbc.register(basestring) -_CollectionAbc.register(dict) - - -class Collection(typing.Sized, - typing.Iterable[T_co], - typing.Container[T_co]): - __slots__ = () - __extra__ = _CollectionAbc diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index 5b7c31af..55baba54 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -371,26 +371,6 @@ def test_async_iterator(self): self.assertIsInstance(it, typing_extensions.AsyncIterator) self.assertNotIsInstance(42, typing_extensions.AsyncIterator) - def test_collection(self): - self.assertIsInstance(tuple(), typing_extensions.Collection) - self.assertIsInstance(frozenset(), typing_extensions.Collection) - self.assertIsSubclass(dict, typing_extensions.Collection) - self.assertNotIsInstance(42, typing_extensions.Collection) - - @skipUnless(TYPING_3_5_1, "Behavior added in typing 3.5.1+") - def test_collection_instantiation(self): - class MyCollection(typing_extensions.Collection[int]): - def __contains__(self, item): ... - def __iter__(self): ... - def __len__(self): ... - - self.assertIsSubclass( - type(MyCollection()), - typing_extensions.Collection) - self.assertIsSubclass( - MyCollection, - typing_extensions.Collection) - def test_deque(self): self.assertIsSubclass(collections.deque, typing_extensions.Deque) class MyDeque(typing_extensions.Deque[int]): ... @@ -630,8 +610,6 @@ def test_typing_extensions_includes_standard(self): if PY36: self.assertIn('AsyncGenerator', a) - if hasattr(collections_abc, 'Collection'): - self.assertIn('Collection', a) def test_typing_extensions_defers_when_possible(self): exclude = {'overload', 'Text', 'TYPE_CHECKING'} diff --git a/typing_extensions/src_py3/typing_extensions.py b/typing_extensions/src_py3/typing_extensions.py index 3d077202..6749402e 100644 --- a/typing_extensions/src_py3/typing_extensions.py +++ b/typing_extensions/src_py3/typing_extensions.py @@ -46,7 +46,6 @@ def _check_methods_in_mro(C, *methods): # 'ChainMap', # Concrete collection types. - 'Collection', 'ContextManager', 'Counter', 'Deque', @@ -391,33 +390,6 @@ class AsyncIterator(AsyncIterable[T_co], __slots__ = () -if hasattr(typing, 'Collection'): - Collection = typing.Collection -else: - __all__.append('Collection') - - # Backport collections.abc.Collections - class _CollectionAbc(collections.Sized, - collections.Iterable, - collections.Container): - __slots__ = () - - @classmethod - def __subclasshook__(cls, C): - if cls is _CollectionAbc: - return _check_methods_in_mro( - C, "__len__", "__iter__", "__contains__") - return NotImplemented - - extra = getattr(collections_abc, 'Collection', _CollectionAbc) - - class Collection(typing.Sized, - typing.Iterable[T_co], - typing.Container[T_co], - extra=extra): - __slots__ = () - - if hasattr(typing, 'Deque'): Deque = typing.Deque else: From 11c0fcfb9a193670b03fbc02b26923f67f06677d Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Mon, 17 Jul 2017 08:35:13 -0700 Subject: [PATCH 22/23] Incorporate speed improvements for typing Incorporates change made by https://github.com/python/typing/pull/439, which was recently merged. --- .../src_py3/typing_extensions.py | 94 ++++++++++++------- 1 file changed, 61 insertions(+), 33 deletions(-) diff --git a/typing_extensions/src_py3/typing_extensions.py b/typing_extensions/src_py3/typing_extensions.py index 6749402e..82ce35a5 100644 --- a/typing_extensions/src_py3/typing_extensions.py +++ b/typing_extensions/src_py3/typing_extensions.py @@ -11,6 +11,14 @@ def _generic_new(base_cls, cls, *args, **kwargs): return base_cls.__new__(cls, *args, **kwargs) +# See https://github.com/python/typing/pull/439 +if hasattr(typing, '_geqv'): + from typing import _geqv + _geqv_defined = True +else: + _geqv = None + _geqv_defined = False + if sys.version_info[:2] >= (3, 6): import _collections_abc _check_methods_in_mro = _collections_abc._check_methods @@ -122,29 +130,6 @@ def stop() -> NoReturn: T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. -def _gorg(a): - """Return the farthest origin of a generic class (internal helper).""" - assert isinstance(a, typing.GenericMeta) - while a.__origin__ is not None: - a = a.__origin__ - return a - - -def _geqv(a, b): - """Return whether two generic classes are equivalent (internal helper). - - The intention is to consider generic class X and any of its - parameterized forms (X[T], X[int], etc.) as equivalent. - - However, X is not equivalent to a subclass of X. - - The relation is reflexive, symmetric and transitive. - """ - assert isinstance(a, typing.GenericMeta) and isinstance(b, typing.GenericMeta) - # Reduce each to its origin. - return _gorg(a) is _gorg(b) - - if hasattr(typing, 'ClassVar'): ClassVar = typing.ClassVar elif hasattr(typing, '_FinalTypingBase'): @@ -392,7 +377,7 @@ class AsyncIterator(AsyncIterable[T_co], if hasattr(typing, 'Deque'): Deque = typing.Deque -else: +elif _geqv_defined: class Deque(collections.deque, typing.MutableSequence[T], extra=collections.deque): __slots__ = () @@ -401,6 +386,15 @@ def __new__(cls, *args, **kwds): if _geqv(cls, Deque): return collections.deque(*args, **kwds) return _generic_new(collections.deque, cls, *args, **kwds) +else: + class Deque(collections.deque, typing.MutableSequence[T], + extra=collections.deque): + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Deque: + return collections.deque(*args, **kwds) + return _generic_new(collections.deque, cls, *args, **kwds) if hasattr(typing, 'ContextManager'): @@ -467,7 +461,7 @@ def __subclasshook__(cls, C): if hasattr(typing, 'DefaultDict'): DefaultDict = typing.DefaultDict -else: +elif _geqv_defined: class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT], extra=collections.defaultdict): @@ -477,11 +471,22 @@ def __new__(cls, *args, **kwds): if _geqv(cls, DefaultDict): return collections.defaultdict(*args, **kwds) return _generic_new(collections.defaultdict, cls, *args, **kwds) +else: + class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT], + extra=collections.defaultdict): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is DefaultDict: + return collections.defaultdict(*args, **kwds) + return _generic_new(collections.defaultdict, cls, *args, **kwds) if hasattr(typing, 'Counter'): Counter = typing.Counter elif (3, 5, 0) <= sys.version_info[:3] <= (3, 5, 1): + assert _geqv_defined _TInt = typing.TypeVar('_TInt') class CounterMeta(typing.GenericMeta): @@ -501,7 +506,7 @@ def __new__(cls, *args, **kwds): return collections.Counter(*args, **kwds) return _generic_new(collections.Counter, cls, *args, **kwds) -else: +elif _geqv_defined: class Counter(collections.Counter, typing.Dict[T, int], extra=collections.Counter): @@ -513,21 +518,44 @@ def __new__(cls, *args, **kwds): return collections.Counter(*args, **kwds) return _generic_new(collections.Counter, cls, *args, **kwds) +else: + class Counter(collections.Counter, + typing.Dict[T, int], + extra=collections.Counter): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Counter: + return collections.Counter(*args, **kwds) + return _generic_new(collections.Counter, cls, *args, **kwds) + if hasattr(typing, 'ChainMap'): ChainMap = typing.ChainMap __all__.append('ChainMap') elif hasattr(collections, 'ChainMap'): # ChainMap only exists in 3.3+ - class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], - extra=collections.ChainMap): + if _geqv_defined: + class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], + extra=collections.ChainMap): - __slots__ = () + __slots__ = () - def __new__(cls, *args, **kwds): - if _geqv(cls, ChainMap): - return collections.ChainMap(*args, **kwds) - return _generic_new(collections.ChainMap, cls, *args, **kwds) + def __new__(cls, *args, **kwds): + if _geqv(cls, ChainMap): + return collections.ChainMap(*args, **kwds) + return _generic_new(collections.ChainMap, cls, *args, **kwds) + else: + class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], + extra=collections.ChainMap): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is ChainMap: + return collections.ChainMap(*args, **kwds) + return _generic_new(collections.ChainMap, cls, *args, **kwds) __all__.append('ChainMap') From 76cc99d76b337b29ec42a2a2477e3df5cf0a5a5f Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Wed, 19 Jul 2017 23:18:07 -0700 Subject: [PATCH 23/23] Respond to 2nd review feedback - Reworded README/description in setup.py to include note about typing's provisional status ending in Python 3.7. - Made the README/description in setup.py a little shorter in general - Added some explanatory comments - Added a test to check Counter.__bases__ is erased in Python 2.7 --- typing_extensions/README.rst | 20 +++++++++---------- typing_extensions/setup.py | 18 ++++++++--------- .../src_py2/test_typing_extensions.py | 1 + .../src_py3/test_typing_extensions.py | 8 ++++++++ .../src_py3/typing_extensions.py | 13 ++++++++++++ 5 files changed, 39 insertions(+), 21 deletions(-) diff --git a/typing_extensions/README.rst b/typing_extensions/README.rst index ff89d24e..9090402e 100644 --- a/typing_extensions/README.rst +++ b/typing_extensions/README.rst @@ -9,17 +9,15 @@ Typing Extensions Overview ======== -The ``typing_extensions`` module contains both backports of changes made to -the ``typing`` module since Python 3.5.0 as well as experimental types that -will be eventually added to the ``typing`` module once stabilized. - -This module is intended to be used by people who: - -1. Are using Python 3.5+ and cannot upgrade to newer versions of Python. - Since the ``typing`` module was (provisionally) added to the Python standard - library in 3.5, users who are unable to upgrade cannot take advantage of - new additions to typing such as ``typing.Text`` or ``typing.Coroutine``. -2. Are interested in using experimental additions to the ``typing`` module. +The ``typing`` module was added to the standard library in Python 3.5 on +a provisional basis and will no longer be provisional in Python 3.7. However, +this means users of Python 3.5 - 3.6 who are unable to upgrade will not be +able to take advantage of new types added to the ``typing`` module, such as +``typing.Text`` or ``typing.Coroutine``. + +The ``typing_extensions`` module contains both backports of these changes +as well as experimental types that will eventually be added to the ``typing`` +module. Users of other Python versions should continue to install and use use the ``typing`` module from PyPi instead of using this one unless diff --git a/typing_extensions/setup.py b/typing_extensions/setup.py index 76b68fe0..2e9f1dbd 100644 --- a/typing_extensions/setup.py +++ b/typing_extensions/setup.py @@ -14,17 +14,15 @@ long_description = '''\ Typing Extensions -- Backported and Experimental Type Hints for Python -This module contains both backports of changes made to the ``typing`` -module since Python 3.5.0 as well as experimental types that will be -eventually added to the ``typing`` module once stabilized. +The ``typing`` module was added to the standard library in Python 3.5 on +a provisional basis and will no longer be provisional in Python 3.7. However, +this means users of Python 3.5 - 3.6 who are unable to upgrade will not be +able to take advantage of new types added to the ``typing`` module, such as +``typing.Text`` or ``typing.Coroutine``. -This module is intended to be used by people who: - -1. Are using Python 3.5+ and cannot upgrade to newer versions of Python. - Since the ``typing`` module was (provisionally) added to the Python standard - library in 3.5, users who are unable to upgrade cannot take advantage of - new additions to typing such as ``typing.Text`` or ``typing.Coroutine``. -2. Are interested in using experimental additions to the ``typing`` module. +The ``typing_extensions`` module contains both backports of these changes +as well as experimental types that will eventually be added to the ``typing`` +module. Users of other Python versions should continue to install and use use the ``typing`` module from PyPi instead of using this one unless diff --git a/typing_extensions/src_py2/test_typing_extensions.py b/typing_extensions/src_py2/test_typing_extensions.py index 3f799c35..1597d47f 100644 --- a/typing_extensions/src_py2/test_typing_extensions.py +++ b/typing_extensions/src_py2/test_typing_extensions.py @@ -144,6 +144,7 @@ class B(Counter[T]): pass self.assertIsInstance(A(), collections.Counter) self.assertIs(type(B[int]()), B) + self.assertEqual(B.__bases__, (typing_extensions.Counter,)) def test_deque(self): self.assertIsSubclass(collections.deque, Deque) diff --git a/typing_extensions/src_py3/test_typing_extensions.py b/typing_extensions/src_py3/test_typing_extensions.py index 55baba54..4e6f724f 100644 --- a/typing_extensions/src_py3/test_typing_extensions.py +++ b/typing_extensions/src_py3/test_typing_extensions.py @@ -15,7 +15,15 @@ import typing_extensions import collections.abc as collections_abc +# We assume Python versions *below* 3.5.0 will have the most +# up-to-date version of the typing module installed. Since +# the typing module isn't a part of the standard library in older +# versions of Python, those users are likely to have a reasonably +# modern version of `typing` installed from PyPi. TYPING_LATEST = sys.version_info[:3] < (3, 5, 0) + +# Flags used to mark tests that only apply after a specific +# version of the typing module. TYPING_3_5_1 = TYPING_LATEST or sys.version_info[:3] >= (3, 5, 1) TYPING_3_5_3 = TYPING_LATEST or sys.version_info[:3] >= (3, 5, 3) TYPING_3_6_1 = TYPING_LATEST or sys.version_info[:3] >= (3, 6, 1) diff --git a/typing_extensions/src_py3/typing_extensions.py b/typing_extensions/src_py3/typing_extensions.py index 82ce35a5..53e2def2 100644 --- a/typing_extensions/src_py3/typing_extensions.py +++ b/typing_extensions/src_py3/typing_extensions.py @@ -8,6 +8,19 @@ if hasattr(typing, '_generic_new'): _generic_new = typing._generic_new else: + # Note: The '_generic_new(...)' function is used as a part of the + # process of creating a generic type and was added to the typing module + # as of Python 3.5.3. + # + # We've defined '_generic_new(...)' below to exactly match the behavior + # implemented in older versions of 'typing' bundled with Python 3.5.0 to + # 3.5.2. This helps eliminate redundancy when defining collection types + # like 'Deque' later. + # + # See https://github.com/python/typing/pull/308 for more details -- in + # particular, compare and contrast the definition of types like + # 'typing.List' before and after the merge. + def _generic_new(base_cls, cls, *args, **kwargs): return base_cls.__new__(cls, *args, **kwargs)