ZTWHHH commited on
Commit
accc4fe
·
verified ·
1 Parent(s): 5952604

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +2 -0
  2. evalkit_llava/bin/lzcat +3 -0
  3. evalkit_llava/lib/libtinfow.a +3 -0
  4. evalkit_llava/lib/python3.10/encodings/__pycache__/cp874.cpython-310.pyc +0 -0
  5. evalkit_llava/lib/python3.10/importlib/_abc.py +54 -0
  6. evalkit_llava/lib/python3.10/importlib/_adapters.py +83 -0
  7. evalkit_llava/lib/python3.10/importlib/readers.py +123 -0
  8. evalkit_llava/lib/python3.10/logging/__init__.py +2261 -0
  9. evalkit_llava/lib/python3.10/logging/__pycache__/__init__.cpython-310.pyc +0 -0
  10. evalkit_llava/lib/python3.10/logging/__pycache__/config.cpython-310.pyc +0 -0
  11. evalkit_llava/lib/python3.10/logging/__pycache__/handlers.cpython-310.pyc +0 -0
  12. evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/__init__.py +456 -0
  13. evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-310.pyc +0 -0
  14. evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-310.pyc +0 -0
  15. evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-310.pyc +0 -0
  16. evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/base.cpython-310.pyc +0 -0
  17. evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/_distutils.py +172 -0
  18. evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/_sysconfig.py +214 -0
  19. evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/base.py +81 -0
  20. evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc +0 -0
  21. evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc +0 -0
  22. evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc +0 -0
  23. evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/index.cpython-310.pyc +0 -0
  24. evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-310.pyc +0 -0
  25. evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc +0 -0
  26. evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/__init__.py +0 -0
  27. evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/_jaraco_text.py +109 -0
  28. evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/_log.py +38 -0
  29. evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/appdirs.py +52 -0
  30. evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/compat.py +79 -0
  31. evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/datetime.py +11 -0
  32. evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/deprecation.py +124 -0
  33. evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/hashes.py +147 -0
  34. evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/packaging.py +58 -0
  35. evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/retry.py +42 -0
  36. evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/setuptools_build.py +146 -0
  37. evalkit_llava/lib/python3.10/turtledemo/__pycache__/chaos.cpython-310.pyc +0 -0
  38. evalkit_llava/lib/python3.10/turtledemo/__pycache__/colormixer.cpython-310.pyc +0 -0
  39. evalkit_llava/lib/python3.10/turtledemo/__pycache__/forest.cpython-310.pyc +0 -0
  40. evalkit_llava/lib/python3.10/turtledemo/__pycache__/fractalcurves.cpython-310.pyc +0 -0
  41. evalkit_llava/lib/python3.10/turtledemo/__pycache__/lindenmayer.cpython-310.pyc +0 -0
  42. evalkit_llava/lib/python3.10/turtledemo/__pycache__/minimal_hanoi.cpython-310.pyc +0 -0
  43. evalkit_llava/lib/python3.10/turtledemo/__pycache__/nim.cpython-310.pyc +0 -0
  44. evalkit_llava/lib/python3.10/turtledemo/__pycache__/paint.cpython-310.pyc +0 -0
  45. evalkit_llava/lib/python3.10/turtledemo/__pycache__/penrose.cpython-310.pyc +0 -0
  46. evalkit_llava/lib/python3.10/turtledemo/__pycache__/planet_and_moon.cpython-310.pyc +0 -0
  47. evalkit_llava/lib/python3.10/turtledemo/__pycache__/rosette.cpython-310.pyc +0 -0
  48. evalkit_llava/lib/python3.10/turtledemo/__pycache__/sorting_animate.cpython-310.pyc +0 -0
  49. evalkit_llava/lib/python3.10/turtledemo/__pycache__/tree.cpython-310.pyc +0 -0
  50. evalkit_llava/lib/python3.10/turtledemo/__pycache__/two_canvases.cpython-310.pyc +0 -0
.gitattributes CHANGED
@@ -61,3 +61,5 @@ evalkit_llava/bin/bzip2 filter=lfs diff=lfs merge=lfs -text
61
  evalkit_llava/bin/xz filter=lfs diff=lfs merge=lfs -text
62
  evalkit_llava/lib/libncurses.so filter=lfs diff=lfs merge=lfs -text
63
  evalkit_llava/lib/libatomic.so filter=lfs diff=lfs merge=lfs -text
 
 
 
61
  evalkit_llava/bin/xz filter=lfs diff=lfs merge=lfs -text
62
  evalkit_llava/lib/libncurses.so filter=lfs diff=lfs merge=lfs -text
63
  evalkit_llava/lib/libatomic.so filter=lfs diff=lfs merge=lfs -text
64
+ evalkit_llava/bin/lzcat filter=lfs diff=lfs merge=lfs -text
65
+ evalkit_llava/lib/libtinfow.a filter=lfs diff=lfs merge=lfs -text
evalkit_llava/bin/lzcat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5cc86d36933372b94af4bd9ed22ad711f57b4e16175675627edcd4cb9ea46a61
3
+ size 108336
evalkit_llava/lib/libtinfow.a ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0fbc52e45ca37cfc852e5dad032291d0008db33fe9f61795c2e7c1e2d0ece7b3
3
+ size 489850
evalkit_llava/lib/python3.10/encodings/__pycache__/cp874.cpython-310.pyc ADDED
Binary file (2.73 kB). View file
 
evalkit_llava/lib/python3.10/importlib/_abc.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Subset of importlib.abc used to reduce importlib.util imports."""
2
+ from . import _bootstrap
3
+ import abc
4
+ import warnings
5
+
6
+
7
+ class Loader(metaclass=abc.ABCMeta):
8
+
9
+ """Abstract base class for import loaders."""
10
+
11
+ def create_module(self, spec):
12
+ """Return a module to initialize and into which to load.
13
+
14
+ This method should raise ImportError if anything prevents it
15
+ from creating a new module. It may return None to indicate
16
+ that the spec should create the new module.
17
+ """
18
+ # By default, defer to default semantics for the new module.
19
+ return None
20
+
21
+ # We don't define exec_module() here since that would break
22
+ # hasattr checks we do to support backward compatibility.
23
+
24
+ def load_module(self, fullname):
25
+ """Return the loaded module.
26
+
27
+ The module must be added to sys.modules and have import-related
28
+ attributes set properly. The fullname is a str.
29
+
30
+ ImportError is raised on failure.
31
+
32
+ This method is deprecated in favor of loader.exec_module(). If
33
+ exec_module() exists then it is used to provide a backwards-compatible
34
+ functionality for this method.
35
+
36
+ """
37
+ if not hasattr(self, 'exec_module'):
38
+ raise ImportError
39
+ # Warning implemented in _load_module_shim().
40
+ return _bootstrap._load_module_shim(self, fullname)
41
+
42
+ def module_repr(self, module):
43
+ """Return a module's repr.
44
+
45
+ Used by the module type when the method does not raise
46
+ NotImplementedError.
47
+
48
+ This method is deprecated.
49
+
50
+ """
51
+ warnings.warn("importlib.abc.Loader.module_repr() is deprecated and "
52
+ "slated for removal in Python 3.12", DeprecationWarning)
53
+ # The exception will cause ModuleType.__repr__ to ignore this method.
54
+ raise NotImplementedError
evalkit_llava/lib/python3.10/importlib/_adapters.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from contextlib import suppress
2
+
3
+ from . import abc
4
+
5
+
6
+ class SpecLoaderAdapter:
7
+ """
8
+ Adapt a package spec to adapt the underlying loader.
9
+ """
10
+
11
+ def __init__(self, spec, adapter=lambda spec: spec.loader):
12
+ self.spec = spec
13
+ self.loader = adapter(spec)
14
+
15
+ def __getattr__(self, name):
16
+ return getattr(self.spec, name)
17
+
18
+
19
+ class TraversableResourcesLoader:
20
+ """
21
+ Adapt a loader to provide TraversableResources.
22
+ """
23
+
24
+ def __init__(self, spec):
25
+ self.spec = spec
26
+
27
+ def get_resource_reader(self, name):
28
+ return DegenerateFiles(self.spec)._native()
29
+
30
+
31
+ class DegenerateFiles:
32
+ """
33
+ Adapter for an existing or non-existant resource reader
34
+ to provide a degenerate .files().
35
+ """
36
+
37
+ class Path(abc.Traversable):
38
+ def iterdir(self):
39
+ return iter(())
40
+
41
+ def is_dir(self):
42
+ return False
43
+
44
+ is_file = exists = is_dir # type: ignore
45
+
46
+ def joinpath(self, other):
47
+ return DegenerateFiles.Path()
48
+
49
+ @property
50
+ def name(self):
51
+ return ''
52
+
53
+ def open(self, mode='rb', *args, **kwargs):
54
+ raise ValueError()
55
+
56
+ def __init__(self, spec):
57
+ self.spec = spec
58
+
59
+ @property
60
+ def _reader(self):
61
+ with suppress(AttributeError):
62
+ return self.spec.loader.get_resource_reader(self.spec.name)
63
+
64
+ def _native(self):
65
+ """
66
+ Return the native reader if it supports files().
67
+ """
68
+ reader = self._reader
69
+ return reader if hasattr(reader, 'files') else self
70
+
71
+ def __getattr__(self, attr):
72
+ return getattr(self._reader, attr)
73
+
74
+ def files(self):
75
+ return DegenerateFiles.Path()
76
+
77
+
78
+ def wrap_spec(package):
79
+ """
80
+ Construct a package spec with traversable compatibility
81
+ on the spec/loader/reader.
82
+ """
83
+ return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
evalkit_llava/lib/python3.10/importlib/readers.py ADDED
@@ -0,0 +1,123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections
2
+ import zipfile
3
+ import pathlib
4
+ from . import abc
5
+
6
+
7
+ def remove_duplicates(items):
8
+ return iter(collections.OrderedDict.fromkeys(items))
9
+
10
+
11
+ class FileReader(abc.TraversableResources):
12
+ def __init__(self, loader):
13
+ self.path = pathlib.Path(loader.path).parent
14
+
15
+ def resource_path(self, resource):
16
+ """
17
+ Return the file system path to prevent
18
+ `resources.path()` from creating a temporary
19
+ copy.
20
+ """
21
+ return str(self.path.joinpath(resource))
22
+
23
+ def files(self):
24
+ return self.path
25
+
26
+
27
+ class ZipReader(abc.TraversableResources):
28
+ def __init__(self, loader, module):
29
+ _, _, name = module.rpartition('.')
30
+ self.prefix = loader.prefix.replace('\\', '/') + name + '/'
31
+ self.archive = loader.archive
32
+
33
+ def open_resource(self, resource):
34
+ try:
35
+ return super().open_resource(resource)
36
+ except KeyError as exc:
37
+ raise FileNotFoundError(exc.args[0])
38
+
39
+ def is_resource(self, path):
40
+ # workaround for `zipfile.Path.is_file` returning true
41
+ # for non-existent paths.
42
+ target = self.files().joinpath(path)
43
+ return target.is_file() and target.exists()
44
+
45
+ def files(self):
46
+ return zipfile.Path(self.archive, self.prefix)
47
+
48
+
49
+ class MultiplexedPath(abc.Traversable):
50
+ """
51
+ Given a series of Traversable objects, implement a merged
52
+ version of the interface across all objects. Useful for
53
+ namespace packages which may be multihomed at a single
54
+ name.
55
+ """
56
+
57
+ def __init__(self, *paths):
58
+ self._paths = list(map(pathlib.Path, remove_duplicates(paths)))
59
+ if not self._paths:
60
+ message = 'MultiplexedPath must contain at least one path'
61
+ raise FileNotFoundError(message)
62
+ if not all(path.is_dir() for path in self._paths):
63
+ raise NotADirectoryError('MultiplexedPath only supports directories')
64
+
65
+ def iterdir(self):
66
+ visited = []
67
+ for path in self._paths:
68
+ for file in path.iterdir():
69
+ if file.name in visited:
70
+ continue
71
+ visited.append(file.name)
72
+ yield file
73
+
74
+ def read_bytes(self):
75
+ raise FileNotFoundError(f'{self} is not a file')
76
+
77
+ def read_text(self, *args, **kwargs):
78
+ raise FileNotFoundError(f'{self} is not a file')
79
+
80
+ def is_dir(self):
81
+ return True
82
+
83
+ def is_file(self):
84
+ return False
85
+
86
+ def joinpath(self, child):
87
+ # first try to find child in current paths
88
+ for file in self.iterdir():
89
+ if file.name == child:
90
+ return file
91
+ # if it does not exist, construct it with the first path
92
+ return self._paths[0] / child
93
+
94
+ __truediv__ = joinpath
95
+
96
+ def open(self, *args, **kwargs):
97
+ raise FileNotFoundError(f'{self} is not a file')
98
+
99
+ @property
100
+ def name(self):
101
+ return self._paths[0].name
102
+
103
+ def __repr__(self):
104
+ paths = ', '.join(f"'{path}'" for path in self._paths)
105
+ return f'MultiplexedPath({paths})'
106
+
107
+
108
+ class NamespaceReader(abc.TraversableResources):
109
+ def __init__(self, namespace_path):
110
+ if 'NamespacePath' not in str(namespace_path):
111
+ raise ValueError('Invalid path')
112
+ self.path = MultiplexedPath(*list(namespace_path))
113
+
114
+ def resource_path(self, resource):
115
+ """
116
+ Return the file system path to prevent
117
+ `resources.path()` from creating a temporary
118
+ copy.
119
+ """
120
+ return str(self.path.joinpath(resource))
121
+
122
+ def files(self):
123
+ return self.path
evalkit_llava/lib/python3.10/logging/__init__.py ADDED
@@ -0,0 +1,2261 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2001-2019 by Vinay Sajip. All Rights Reserved.
2
+ #
3
+ # Permission to use, copy, modify, and distribute this software and its
4
+ # documentation for any purpose and without fee is hereby granted,
5
+ # provided that the above copyright notice appear in all copies and that
6
+ # both that copyright notice and this permission notice appear in
7
+ # supporting documentation, and that the name of Vinay Sajip
8
+ # not be used in advertising or publicity pertaining to distribution
9
+ # of the software without specific, written prior permission.
10
+ # VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
11
+ # ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
12
+ # VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
13
+ # ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
14
+ # IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
15
+ # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16
+
17
+ """
18
+ Logging package for Python. Based on PEP 282 and comments thereto in
19
+ comp.lang.python.
20
+
21
+ Copyright (C) 2001-2019 Vinay Sajip. All Rights Reserved.
22
+
23
+ To use, simply 'import logging' and log away!
24
+ """
25
+
26
+ import sys, os, time, io, re, traceback, warnings, weakref, collections.abc
27
+
28
+ from string import Template
29
+ from string import Formatter as StrFormatter
30
+
31
+
32
+ __all__ = ['BASIC_FORMAT', 'BufferingFormatter', 'CRITICAL', 'DEBUG', 'ERROR',
33
+ 'FATAL', 'FileHandler', 'Filter', 'Formatter', 'Handler', 'INFO',
34
+ 'LogRecord', 'Logger', 'LoggerAdapter', 'NOTSET', 'NullHandler',
35
+ 'StreamHandler', 'WARN', 'WARNING', 'addLevelName', 'basicConfig',
36
+ 'captureWarnings', 'critical', 'debug', 'disable', 'error',
37
+ 'exception', 'fatal', 'getLevelName', 'getLogger', 'getLoggerClass',
38
+ 'info', 'log', 'makeLogRecord', 'setLoggerClass', 'shutdown',
39
+ 'warn', 'warning', 'getLogRecordFactory', 'setLogRecordFactory',
40
+ 'lastResort', 'raiseExceptions']
41
+
42
+ import threading
43
+
44
+ __author__ = "Vinay Sajip <vinay_sajip@red-dove.com>"
45
+ __status__ = "production"
46
+ # The following module attributes are no longer updated.
47
+ __version__ = "0.5.1.2"
48
+ __date__ = "07 February 2010"
49
+
50
+ #---------------------------------------------------------------------------
51
+ # Miscellaneous module data
52
+ #---------------------------------------------------------------------------
53
+
54
+ #
55
+ #_startTime is used as the base when calculating the relative time of events
56
+ #
57
+ _startTime = time.time()
58
+
59
+ #
60
+ #raiseExceptions is used to see if exceptions during handling should be
61
+ #propagated
62
+ #
63
+ raiseExceptions = True
64
+
65
+ #
66
+ # If you don't want threading information in the log, set this to zero
67
+ #
68
+ logThreads = True
69
+
70
+ #
71
+ # If you don't want multiprocessing information in the log, set this to zero
72
+ #
73
+ logMultiprocessing = True
74
+
75
+ #
76
+ # If you don't want process information in the log, set this to zero
77
+ #
78
+ logProcesses = True
79
+
80
+ #---------------------------------------------------------------------------
81
+ # Level related stuff
82
+ #---------------------------------------------------------------------------
83
+ #
84
+ # Default levels and level names, these can be replaced with any positive set
85
+ # of values having corresponding names. There is a pseudo-level, NOTSET, which
86
+ # is only really there as a lower limit for user-defined levels. Handlers and
87
+ # loggers are initialized with NOTSET so that they will log all messages, even
88
+ # at user-defined levels.
89
+ #
90
+
91
+ CRITICAL = 50
92
+ FATAL = CRITICAL
93
+ ERROR = 40
94
+ WARNING = 30
95
+ WARN = WARNING
96
+ INFO = 20
97
+ DEBUG = 10
98
+ NOTSET = 0
99
+
100
+ _levelToName = {
101
+ CRITICAL: 'CRITICAL',
102
+ ERROR: 'ERROR',
103
+ WARNING: 'WARNING',
104
+ INFO: 'INFO',
105
+ DEBUG: 'DEBUG',
106
+ NOTSET: 'NOTSET',
107
+ }
108
+ _nameToLevel = {
109
+ 'CRITICAL': CRITICAL,
110
+ 'FATAL': FATAL,
111
+ 'ERROR': ERROR,
112
+ 'WARN': WARNING,
113
+ 'WARNING': WARNING,
114
+ 'INFO': INFO,
115
+ 'DEBUG': DEBUG,
116
+ 'NOTSET': NOTSET,
117
+ }
118
+
119
+ def getLevelName(level):
120
+ """
121
+ Return the textual or numeric representation of logging level 'level'.
122
+
123
+ If the level is one of the predefined levels (CRITICAL, ERROR, WARNING,
124
+ INFO, DEBUG) then you get the corresponding string. If you have
125
+ associated levels with names using addLevelName then the name you have
126
+ associated with 'level' is returned.
127
+
128
+ If a numeric value corresponding to one of the defined levels is passed
129
+ in, the corresponding string representation is returned.
130
+
131
+ If a string representation of the level is passed in, the corresponding
132
+ numeric value is returned.
133
+
134
+ If no matching numeric or string value is passed in, the string
135
+ 'Level %s' % level is returned.
136
+ """
137
+ # See Issues #22386, #27937 and #29220 for why it's this way
138
+ result = _levelToName.get(level)
139
+ if result is not None:
140
+ return result
141
+ result = _nameToLevel.get(level)
142
+ if result is not None:
143
+ return result
144
+ return "Level %s" % level
145
+
146
+ def addLevelName(level, levelName):
147
+ """
148
+ Associate 'levelName' with 'level'.
149
+
150
+ This is used when converting levels to text during message formatting.
151
+ """
152
+ _acquireLock()
153
+ try: #unlikely to cause an exception, but you never know...
154
+ _levelToName[level] = levelName
155
+ _nameToLevel[levelName] = level
156
+ finally:
157
+ _releaseLock()
158
+
159
+ if hasattr(sys, '_getframe'):
160
+ currentframe = lambda: sys._getframe(3)
161
+ else: #pragma: no cover
162
+ def currentframe():
163
+ """Return the frame object for the caller's stack frame."""
164
+ try:
165
+ raise Exception
166
+ except Exception:
167
+ return sys.exc_info()[2].tb_frame.f_back
168
+
169
+ #
170
+ # _srcfile is used when walking the stack to check when we've got the first
171
+ # caller stack frame, by skipping frames whose filename is that of this
172
+ # module's source. It therefore should contain the filename of this module's
173
+ # source file.
174
+ #
175
+ # Ordinarily we would use __file__ for this, but frozen modules don't always
176
+ # have __file__ set, for some reason (see Issue #21736). Thus, we get the
177
+ # filename from a handy code object from a function defined in this module.
178
+ # (There's no particular reason for picking addLevelName.)
179
+ #
180
+
181
+ _srcfile = os.path.normcase(addLevelName.__code__.co_filename)
182
+
183
+ # _srcfile is only used in conjunction with sys._getframe().
184
+ # To provide compatibility with older versions of Python, set _srcfile
185
+ # to None if _getframe() is not available; this value will prevent
186
+ # findCaller() from being called. You can also do this if you want to avoid
187
+ # the overhead of fetching caller information, even when _getframe() is
188
+ # available.
189
+ #if not hasattr(sys, '_getframe'):
190
+ # _srcfile = None
191
+
192
+
193
+ def _checkLevel(level):
194
+ if isinstance(level, int):
195
+ rv = level
196
+ elif str(level) == level:
197
+ if level not in _nameToLevel:
198
+ raise ValueError("Unknown level: %r" % level)
199
+ rv = _nameToLevel[level]
200
+ else:
201
+ raise TypeError("Level not an integer or a valid string: %r"
202
+ % (level,))
203
+ return rv
204
+
205
+ #---------------------------------------------------------------------------
206
+ # Thread-related stuff
207
+ #---------------------------------------------------------------------------
208
+
209
+ #
210
+ #_lock is used to serialize access to shared data structures in this module.
211
+ #This needs to be an RLock because fileConfig() creates and configures
212
+ #Handlers, and so might arbitrary user threads. Since Handler code updates the
213
+ #shared dictionary _handlers, it needs to acquire the lock. But if configuring,
214
+ #the lock would already have been acquired - so we need an RLock.
215
+ #The same argument applies to Loggers and Manager.loggerDict.
216
+ #
217
+ _lock = threading.RLock()
218
+
219
+ def _acquireLock():
220
+ """
221
+ Acquire the module-level lock for serializing access to shared data.
222
+
223
+ This should be released with _releaseLock().
224
+ """
225
+ if _lock:
226
+ _lock.acquire()
227
+
228
+ def _releaseLock():
229
+ """
230
+ Release the module-level lock acquired by calling _acquireLock().
231
+ """
232
+ if _lock:
233
+ _lock.release()
234
+
235
+
236
+ # Prevent a held logging lock from blocking a child from logging.
237
+
238
+ if not hasattr(os, 'register_at_fork'): # Windows and friends.
239
+ def _register_at_fork_reinit_lock(instance):
240
+ pass # no-op when os.register_at_fork does not exist.
241
+ else:
242
+ # A collection of instances with a _at_fork_reinit method (logging.Handler)
243
+ # to be called in the child after forking. The weakref avoids us keeping
244
+ # discarded Handler instances alive.
245
+ _at_fork_reinit_lock_weakset = weakref.WeakSet()
246
+
247
+ def _register_at_fork_reinit_lock(instance):
248
+ _acquireLock()
249
+ try:
250
+ _at_fork_reinit_lock_weakset.add(instance)
251
+ finally:
252
+ _releaseLock()
253
+
254
+ def _after_at_fork_child_reinit_locks():
255
+ for handler in _at_fork_reinit_lock_weakset:
256
+ handler._at_fork_reinit()
257
+
258
+ # _acquireLock() was called in the parent before forking.
259
+ # The lock is reinitialized to unlocked state.
260
+ _lock._at_fork_reinit()
261
+
262
+ os.register_at_fork(before=_acquireLock,
263
+ after_in_child=_after_at_fork_child_reinit_locks,
264
+ after_in_parent=_releaseLock)
265
+
266
+
267
+ #---------------------------------------------------------------------------
268
+ # The logging record
269
+ #---------------------------------------------------------------------------
270
+
271
+ class LogRecord(object):
272
+ """
273
+ A LogRecord instance represents an event being logged.
274
+
275
+ LogRecord instances are created every time something is logged. They
276
+ contain all the information pertinent to the event being logged. The
277
+ main information passed in is in msg and args, which are combined
278
+ using str(msg) % args to create the message field of the record. The
279
+ record also includes information such as when the record was created,
280
+ the source line where the logging call was made, and any exception
281
+ information to be logged.
282
+ """
283
+ def __init__(self, name, level, pathname, lineno,
284
+ msg, args, exc_info, func=None, sinfo=None, **kwargs):
285
+ """
286
+ Initialize a logging record with interesting information.
287
+ """
288
+ ct = time.time()
289
+ self.name = name
290
+ self.msg = msg
291
+ #
292
+ # The following statement allows passing of a dictionary as a sole
293
+ # argument, so that you can do something like
294
+ # logging.debug("a %(a)d b %(b)s", {'a':1, 'b':2})
295
+ # Suggested by Stefan Behnel.
296
+ # Note that without the test for args[0], we get a problem because
297
+ # during formatting, we test to see if the arg is present using
298
+ # 'if self.args:'. If the event being logged is e.g. 'Value is %d'
299
+ # and if the passed arg fails 'if self.args:' then no formatting
300
+ # is done. For example, logger.warning('Value is %d', 0) would log
301
+ # 'Value is %d' instead of 'Value is 0'.
302
+ # For the use case of passing a dictionary, this should not be a
303
+ # problem.
304
+ # Issue #21172: a request was made to relax the isinstance check
305
+ # to hasattr(args[0], '__getitem__'). However, the docs on string
306
+ # formatting still seem to suggest a mapping object is required.
307
+ # Thus, while not removing the isinstance check, it does now look
308
+ # for collections.abc.Mapping rather than, as before, dict.
309
+ if (args and len(args) == 1 and isinstance(args[0], collections.abc.Mapping)
310
+ and args[0]):
311
+ args = args[0]
312
+ self.args = args
313
+ self.levelname = getLevelName(level)
314
+ self.levelno = level
315
+ self.pathname = pathname
316
+ try:
317
+ self.filename = os.path.basename(pathname)
318
+ self.module = os.path.splitext(self.filename)[0]
319
+ except (TypeError, ValueError, AttributeError):
320
+ self.filename = pathname
321
+ self.module = "Unknown module"
322
+ self.exc_info = exc_info
323
+ self.exc_text = None # used to cache the traceback text
324
+ self.stack_info = sinfo
325
+ self.lineno = lineno
326
+ self.funcName = func
327
+ self.created = ct
328
+ self.msecs = int((ct - int(ct)) * 1000) + 0.0 # see gh-89047
329
+ self.relativeCreated = (self.created - _startTime) * 1000
330
+ if logThreads:
331
+ self.thread = threading.get_ident()
332
+ self.threadName = threading.current_thread().name
333
+ else: # pragma: no cover
334
+ self.thread = None
335
+ self.threadName = None
336
+ if not logMultiprocessing: # pragma: no cover
337
+ self.processName = None
338
+ else:
339
+ self.processName = 'MainProcess'
340
+ mp = sys.modules.get('multiprocessing')
341
+ if mp is not None:
342
+ # Errors may occur if multiprocessing has not finished loading
343
+ # yet - e.g. if a custom import hook causes third-party code
344
+ # to run when multiprocessing calls import. See issue 8200
345
+ # for an example
346
+ try:
347
+ self.processName = mp.current_process().name
348
+ except Exception: #pragma: no cover
349
+ pass
350
+ if logProcesses and hasattr(os, 'getpid'):
351
+ self.process = os.getpid()
352
+ else:
353
+ self.process = None
354
+
355
+ def __repr__(self):
356
+ return '<LogRecord: %s, %s, %s, %s, "%s">'%(self.name, self.levelno,
357
+ self.pathname, self.lineno, self.msg)
358
+
359
+ def getMessage(self):
360
+ """
361
+ Return the message for this LogRecord.
362
+
363
+ Return the message for this LogRecord after merging any user-supplied
364
+ arguments with the message.
365
+ """
366
+ msg = str(self.msg)
367
+ if self.args:
368
+ msg = msg % self.args
369
+ return msg
370
+
371
+ #
372
+ # Determine which class to use when instantiating log records.
373
+ #
374
+ _logRecordFactory = LogRecord
375
+
376
+ def setLogRecordFactory(factory):
377
+ """
378
+ Set the factory to be used when instantiating a log record.
379
+
380
+ :param factory: A callable which will be called to instantiate
381
+ a log record.
382
+ """
383
+ global _logRecordFactory
384
+ _logRecordFactory = factory
385
+
386
+ def getLogRecordFactory():
387
+ """
388
+ Return the factory to be used when instantiating a log record.
389
+ """
390
+
391
+ return _logRecordFactory
392
+
393
+ def makeLogRecord(dict):
394
+ """
395
+ Make a LogRecord whose attributes are defined by the specified dictionary,
396
+ This function is useful for converting a logging event received over
397
+ a socket connection (which is sent as a dictionary) into a LogRecord
398
+ instance.
399
+ """
400
+ rv = _logRecordFactory(None, None, "", 0, "", (), None, None)
401
+ rv.__dict__.update(dict)
402
+ return rv
403
+
404
+
405
+ #---------------------------------------------------------------------------
406
+ # Formatter classes and functions
407
+ #---------------------------------------------------------------------------
408
+ _str_formatter = StrFormatter()
409
+ del StrFormatter
410
+
411
+
412
+ class PercentStyle(object):
413
+
414
+ default_format = '%(message)s'
415
+ asctime_format = '%(asctime)s'
416
+ asctime_search = '%(asctime)'
417
+ validation_pattern = re.compile(r'%\(\w+\)[#0+ -]*(\*|\d+)?(\.(\*|\d+))?[diouxefgcrsa%]', re.I)
418
+
419
+ def __init__(self, fmt, *, defaults=None):
420
+ self._fmt = fmt or self.default_format
421
+ self._defaults = defaults
422
+
423
+ def usesTime(self):
424
+ return self._fmt.find(self.asctime_search) >= 0
425
+
426
+ def validate(self):
427
+ """Validate the input format, ensure it matches the correct style"""
428
+ if not self.validation_pattern.search(self._fmt):
429
+ raise ValueError("Invalid format '%s' for '%s' style" % (self._fmt, self.default_format[0]))
430
+
431
+ def _format(self, record):
432
+ if defaults := self._defaults:
433
+ values = defaults | record.__dict__
434
+ else:
435
+ values = record.__dict__
436
+ return self._fmt % values
437
+
438
+ def format(self, record):
439
+ try:
440
+ return self._format(record)
441
+ except KeyError as e:
442
+ raise ValueError('Formatting field not found in record: %s' % e)
443
+
444
+
445
+ class StrFormatStyle(PercentStyle):
446
+ default_format = '{message}'
447
+ asctime_format = '{asctime}'
448
+ asctime_search = '{asctime'
449
+
450
+ fmt_spec = re.compile(r'^(.?[<>=^])?[+ -]?#?0?(\d+|{\w+})?[,_]?(\.(\d+|{\w+}))?[bcdefgnosx%]?$', re.I)
451
+ field_spec = re.compile(r'^(\d+|\w+)(\.\w+|\[[^]]+\])*$')
452
+
453
+ def _format(self, record):
454
+ if defaults := self._defaults:
455
+ values = defaults | record.__dict__
456
+ else:
457
+ values = record.__dict__
458
+ return self._fmt.format(**values)
459
+
460
+ def validate(self):
461
+ """Validate the input format, ensure it is the correct string formatting style"""
462
+ fields = set()
463
+ try:
464
+ for _, fieldname, spec, conversion in _str_formatter.parse(self._fmt):
465
+ if fieldname:
466
+ if not self.field_spec.match(fieldname):
467
+ raise ValueError('invalid field name/expression: %r' % fieldname)
468
+ fields.add(fieldname)
469
+ if conversion and conversion not in 'rsa':
470
+ raise ValueError('invalid conversion: %r' % conversion)
471
+ if spec and not self.fmt_spec.match(spec):
472
+ raise ValueError('bad specifier: %r' % spec)
473
+ except ValueError as e:
474
+ raise ValueError('invalid format: %s' % e)
475
+ if not fields:
476
+ raise ValueError('invalid format: no fields')
477
+
478
+
479
+ class StringTemplateStyle(PercentStyle):
480
+ default_format = '${message}'
481
+ asctime_format = '${asctime}'
482
+ asctime_search = '${asctime}'
483
+
484
+ def __init__(self, *args, **kwargs):
485
+ super().__init__(*args, **kwargs)
486
+ self._tpl = Template(self._fmt)
487
+
488
+ def usesTime(self):
489
+ fmt = self._fmt
490
+ return fmt.find('$asctime') >= 0 or fmt.find(self.asctime_search) >= 0
491
+
492
+ def validate(self):
493
+ pattern = Template.pattern
494
+ fields = set()
495
+ for m in pattern.finditer(self._fmt):
496
+ d = m.groupdict()
497
+ if d['named']:
498
+ fields.add(d['named'])
499
+ elif d['braced']:
500
+ fields.add(d['braced'])
501
+ elif m.group(0) == '$':
502
+ raise ValueError('invalid format: bare \'$\' not allowed')
503
+ if not fields:
504
+ raise ValueError('invalid format: no fields')
505
+
506
+ def _format(self, record):
507
+ if defaults := self._defaults:
508
+ values = defaults | record.__dict__
509
+ else:
510
+ values = record.__dict__
511
+ return self._tpl.substitute(**values)
512
+
513
+
514
+ BASIC_FORMAT = "%(levelname)s:%(name)s:%(message)s"
515
+
516
+ _STYLES = {
517
+ '%': (PercentStyle, BASIC_FORMAT),
518
+ '{': (StrFormatStyle, '{levelname}:{name}:{message}'),
519
+ '$': (StringTemplateStyle, '${levelname}:${name}:${message}'),
520
+ }
521
+
522
+ class Formatter(object):
523
+ """
524
+ Formatter instances are used to convert a LogRecord to text.
525
+
526
+ Formatters need to know how a LogRecord is constructed. They are
527
+ responsible for converting a LogRecord to (usually) a string which can
528
+ be interpreted by either a human or an external system. The base Formatter
529
+ allows a formatting string to be specified. If none is supplied, the
530
+ style-dependent default value, "%(message)s", "{message}", or
531
+ "${message}", is used.
532
+
533
+ The Formatter can be initialized with a format string which makes use of
534
+ knowledge of the LogRecord attributes - e.g. the default value mentioned
535
+ above makes use of the fact that the user's message and arguments are pre-
536
+ formatted into a LogRecord's message attribute. Currently, the useful
537
+ attributes in a LogRecord are described by:
538
+
539
+ %(name)s Name of the logger (logging channel)
540
+ %(levelno)s Numeric logging level for the message (DEBUG, INFO,
541
+ WARNING, ERROR, CRITICAL)
542
+ %(levelname)s Text logging level for the message ("DEBUG", "INFO",
543
+ "WARNING", "ERROR", "CRITICAL")
544
+ %(pathname)s Full pathname of the source file where the logging
545
+ call was issued (if available)
546
+ %(filename)s Filename portion of pathname
547
+ %(module)s Module (name portion of filename)
548
+ %(lineno)d Source line number where the logging call was issued
549
+ (if available)
550
+ %(funcName)s Function name
551
+ %(created)f Time when the LogRecord was created (time.time()
552
+ return value)
553
+ %(asctime)s Textual time when the LogRecord was created
554
+ %(msecs)d Millisecond portion of the creation time
555
+ %(relativeCreated)d Time in milliseconds when the LogRecord was created,
556
+ relative to the time the logging module was loaded
557
+ (typically at application startup time)
558
+ %(thread)d Thread ID (if available)
559
+ %(threadName)s Thread name (if available)
560
+ %(process)d Process ID (if available)
561
+ %(message)s The result of record.getMessage(), computed just as
562
+ the record is emitted
563
+ """
564
+
565
+ converter = time.localtime
566
+
567
+ def __init__(self, fmt=None, datefmt=None, style='%', validate=True, *,
568
+ defaults=None):
569
+ """
570
+ Initialize the formatter with specified format strings.
571
+
572
+ Initialize the formatter either with the specified format string, or a
573
+ default as described above. Allow for specialized date formatting with
574
+ the optional datefmt argument. If datefmt is omitted, you get an
575
+ ISO8601-like (or RFC 3339-like) format.
576
+
577
+ Use a style parameter of '%', '{' or '$' to specify that you want to
578
+ use one of %-formatting, :meth:`str.format` (``{}``) formatting or
579
+ :class:`string.Template` formatting in your format string.
580
+
581
+ .. versionchanged:: 3.2
582
+ Added the ``style`` parameter.
583
+ """
584
+ if style not in _STYLES:
585
+ raise ValueError('Style must be one of: %s' % ','.join(
586
+ _STYLES.keys()))
587
+ self._style = _STYLES[style][0](fmt, defaults=defaults)
588
+ if validate:
589
+ self._style.validate()
590
+
591
+ self._fmt = self._style._fmt
592
+ self.datefmt = datefmt
593
+
594
+ default_time_format = '%Y-%m-%d %H:%M:%S'
595
+ default_msec_format = '%s,%03d'
596
+
597
+ def formatTime(self, record, datefmt=None):
598
+ """
599
+ Return the creation time of the specified LogRecord as formatted text.
600
+
601
+ This method should be called from format() by a formatter which
602
+ wants to make use of a formatted time. This method can be overridden
603
+ in formatters to provide for any specific requirement, but the
604
+ basic behaviour is as follows: if datefmt (a string) is specified,
605
+ it is used with time.strftime() to format the creation time of the
606
+ record. Otherwise, an ISO8601-like (or RFC 3339-like) format is used.
607
+ The resulting string is returned. This function uses a user-configurable
608
+ function to convert the creation time to a tuple. By default,
609
+ time.localtime() is used; to change this for a particular formatter
610
+ instance, set the 'converter' attribute to a function with the same
611
+ signature as time.localtime() or time.gmtime(). To change it for all
612
+ formatters, for example if you want all logging times to be shown in GMT,
613
+ set the 'converter' attribute in the Formatter class.
614
+ """
615
+ ct = self.converter(record.created)
616
+ if datefmt:
617
+ s = time.strftime(datefmt, ct)
618
+ else:
619
+ s = time.strftime(self.default_time_format, ct)
620
+ if self.default_msec_format:
621
+ s = self.default_msec_format % (s, record.msecs)
622
+ return s
623
+
624
+ def formatException(self, ei):
625
+ """
626
+ Format and return the specified exception information as a string.
627
+
628
+ This default implementation just uses
629
+ traceback.print_exception()
630
+ """
631
+ sio = io.StringIO()
632
+ tb = ei[2]
633
+ # See issues #9427, #1553375. Commented out for now.
634
+ #if getattr(self, 'fullstack', False):
635
+ # traceback.print_stack(tb.tb_frame.f_back, file=sio)
636
+ traceback.print_exception(ei[0], ei[1], tb, None, sio)
637
+ s = sio.getvalue()
638
+ sio.close()
639
+ if s[-1:] == "\n":
640
+ s = s[:-1]
641
+ return s
642
+
643
+ def usesTime(self):
644
+ """
645
+ Check if the format uses the creation time of the record.
646
+ """
647
+ return self._style.usesTime()
648
+
649
+ def formatMessage(self, record):
650
+ return self._style.format(record)
651
+
652
+ def formatStack(self, stack_info):
653
+ """
654
+ This method is provided as an extension point for specialized
655
+ formatting of stack information.
656
+
657
+ The input data is a string as returned from a call to
658
+ :func:`traceback.print_stack`, but with the last trailing newline
659
+ removed.
660
+
661
+ The base implementation just returns the value passed in.
662
+ """
663
+ return stack_info
664
+
665
+ def format(self, record):
666
+ """
667
+ Format the specified record as text.
668
+
669
+ The record's attribute dictionary is used as the operand to a
670
+ string formatting operation which yields the returned string.
671
+ Before formatting the dictionary, a couple of preparatory steps
672
+ are carried out. The message attribute of the record is computed
673
+ using LogRecord.getMessage(). If the formatting string uses the
674
+ time (as determined by a call to usesTime(), formatTime() is
675
+ called to format the event time. If there is exception information,
676
+ it is formatted using formatException() and appended to the message.
677
+ """
678
+ record.message = record.getMessage()
679
+ if self.usesTime():
680
+ record.asctime = self.formatTime(record, self.datefmt)
681
+ s = self.formatMessage(record)
682
+ if record.exc_info:
683
+ # Cache the traceback text to avoid converting it multiple times
684
+ # (it's constant anyway)
685
+ if not record.exc_text:
686
+ record.exc_text = self.formatException(record.exc_info)
687
+ if record.exc_text:
688
+ if s[-1:] != "\n":
689
+ s = s + "\n"
690
+ s = s + record.exc_text
691
+ if record.stack_info:
692
+ if s[-1:] != "\n":
693
+ s = s + "\n"
694
+ s = s + self.formatStack(record.stack_info)
695
+ return s
696
+
697
+ #
698
+ # The default formatter to use when no other is specified
699
+ #
700
+ _defaultFormatter = Formatter()
701
+
702
+ class BufferingFormatter(object):
703
+ """
704
+ A formatter suitable for formatting a number of records.
705
+ """
706
+ def __init__(self, linefmt=None):
707
+ """
708
+ Optionally specify a formatter which will be used to format each
709
+ individual record.
710
+ """
711
+ if linefmt:
712
+ self.linefmt = linefmt
713
+ else:
714
+ self.linefmt = _defaultFormatter
715
+
716
+ def formatHeader(self, records):
717
+ """
718
+ Return the header string for the specified records.
719
+ """
720
+ return ""
721
+
722
+ def formatFooter(self, records):
723
+ """
724
+ Return the footer string for the specified records.
725
+ """
726
+ return ""
727
+
728
+ def format(self, records):
729
+ """
730
+ Format the specified records and return the result as a string.
731
+ """
732
+ rv = ""
733
+ if len(records) > 0:
734
+ rv = rv + self.formatHeader(records)
735
+ for record in records:
736
+ rv = rv + self.linefmt.format(record)
737
+ rv = rv + self.formatFooter(records)
738
+ return rv
739
+
740
+ #---------------------------------------------------------------------------
741
+ # Filter classes and functions
742
+ #---------------------------------------------------------------------------
743
+
744
+ class Filter(object):
745
+ """
746
+ Filter instances are used to perform arbitrary filtering of LogRecords.
747
+
748
+ Loggers and Handlers can optionally use Filter instances to filter
749
+ records as desired. The base filter class only allows events which are
750
+ below a certain point in the logger hierarchy. For example, a filter
751
+ initialized with "A.B" will allow events logged by loggers "A.B",
752
+ "A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If
753
+ initialized with the empty string, all events are passed.
754
+ """
755
+ def __init__(self, name=''):
756
+ """
757
+ Initialize a filter.
758
+
759
+ Initialize with the name of the logger which, together with its
760
+ children, will have its events allowed through the filter. If no
761
+ name is specified, allow every event.
762
+ """
763
+ self.name = name
764
+ self.nlen = len(name)
765
+
766
+ def filter(self, record):
767
+ """
768
+ Determine if the specified record is to be logged.
769
+
770
+ Returns True if the record should be logged, or False otherwise.
771
+ If deemed appropriate, the record may be modified in-place.
772
+ """
773
+ if self.nlen == 0:
774
+ return True
775
+ elif self.name == record.name:
776
+ return True
777
+ elif record.name.find(self.name, 0, self.nlen) != 0:
778
+ return False
779
+ return (record.name[self.nlen] == ".")
780
+
781
+ class Filterer(object):
782
+ """
783
+ A base class for loggers and handlers which allows them to share
784
+ common code.
785
+ """
786
+ def __init__(self):
787
+ """
788
+ Initialize the list of filters to be an empty list.
789
+ """
790
+ self.filters = []
791
+
792
+ def addFilter(self, filter):
793
+ """
794
+ Add the specified filter to this handler.
795
+ """
796
+ if not (filter in self.filters):
797
+ self.filters.append(filter)
798
+
799
+ def removeFilter(self, filter):
800
+ """
801
+ Remove the specified filter from this handler.
802
+ """
803
+ if filter in self.filters:
804
+ self.filters.remove(filter)
805
+
806
+ def filter(self, record):
807
+ """
808
+ Determine if a record is loggable by consulting all the filters.
809
+
810
+ The default is to allow the record to be logged; any filter can veto
811
+ this and the record is then dropped. Returns a zero value if a record
812
+ is to be dropped, else non-zero.
813
+
814
+ .. versionchanged:: 3.2
815
+
816
+ Allow filters to be just callables.
817
+ """
818
+ rv = True
819
+ for f in self.filters:
820
+ if hasattr(f, 'filter'):
821
+ result = f.filter(record)
822
+ else:
823
+ result = f(record) # assume callable - will raise if not
824
+ if not result:
825
+ rv = False
826
+ break
827
+ return rv
828
+
829
+ #---------------------------------------------------------------------------
830
+ # Handler classes and functions
831
+ #---------------------------------------------------------------------------
832
+
833
+ _handlers = weakref.WeakValueDictionary() #map of handler names to handlers
834
+ _handlerList = [] # added to allow handlers to be removed in reverse of order initialized
835
+
836
+ def _removeHandlerRef(wr):
837
+ """
838
+ Remove a handler reference from the internal cleanup list.
839
+ """
840
+ # This function can be called during module teardown, when globals are
841
+ # set to None. It can also be called from another thread. So we need to
842
+ # pre-emptively grab the necessary globals and check if they're None,
843
+ # to prevent race conditions and failures during interpreter shutdown.
844
+ acquire, release, handlers = _acquireLock, _releaseLock, _handlerList
845
+ if acquire and release and handlers:
846
+ acquire()
847
+ try:
848
+ if wr in handlers:
849
+ handlers.remove(wr)
850
+ finally:
851
+ release()
852
+
853
+ def _addHandlerRef(handler):
854
+ """
855
+ Add a handler to the internal cleanup list using a weak reference.
856
+ """
857
+ _acquireLock()
858
+ try:
859
+ _handlerList.append(weakref.ref(handler, _removeHandlerRef))
860
+ finally:
861
+ _releaseLock()
862
+
863
+ class Handler(Filterer):
864
+ """
865
+ Handler instances dispatch logging events to specific destinations.
866
+
867
+ The base handler class. Acts as a placeholder which defines the Handler
868
+ interface. Handlers can optionally use Formatter instances to format
869
+ records as desired. By default, no formatter is specified; in this case,
870
+ the 'raw' message as determined by record.message is logged.
871
+ """
872
+ def __init__(self, level=NOTSET):
873
+ """
874
+ Initializes the instance - basically setting the formatter to None
875
+ and the filter list to empty.
876
+ """
877
+ Filterer.__init__(self)
878
+ self._name = None
879
+ self.level = _checkLevel(level)
880
+ self.formatter = None
881
+ self._closed = False
882
+ # Add the handler to the global _handlerList (for cleanup on shutdown)
883
+ _addHandlerRef(self)
884
+ self.createLock()
885
+
886
+ def get_name(self):
887
+ return self._name
888
+
889
+ def set_name(self, name):
890
+ _acquireLock()
891
+ try:
892
+ if self._name in _handlers:
893
+ del _handlers[self._name]
894
+ self._name = name
895
+ if name:
896
+ _handlers[name] = self
897
+ finally:
898
+ _releaseLock()
899
+
900
+ name = property(get_name, set_name)
901
+
902
+ def createLock(self):
903
+ """
904
+ Acquire a thread lock for serializing access to the underlying I/O.
905
+ """
906
+ self.lock = threading.RLock()
907
+ _register_at_fork_reinit_lock(self)
908
+
909
+ def _at_fork_reinit(self):
910
+ self.lock._at_fork_reinit()
911
+
912
+ def acquire(self):
913
+ """
914
+ Acquire the I/O thread lock.
915
+ """
916
+ if self.lock:
917
+ self.lock.acquire()
918
+
919
+ def release(self):
920
+ """
921
+ Release the I/O thread lock.
922
+ """
923
+ if self.lock:
924
+ self.lock.release()
925
+
926
+ def setLevel(self, level):
927
+ """
928
+ Set the logging level of this handler. level must be an int or a str.
929
+ """
930
+ self.level = _checkLevel(level)
931
+
932
+ def format(self, record):
933
+ """
934
+ Format the specified record.
935
+
936
+ If a formatter is set, use it. Otherwise, use the default formatter
937
+ for the module.
938
+ """
939
+ if self.formatter:
940
+ fmt = self.formatter
941
+ else:
942
+ fmt = _defaultFormatter
943
+ return fmt.format(record)
944
+
945
+ def emit(self, record):
946
+ """
947
+ Do whatever it takes to actually log the specified logging record.
948
+
949
+ This version is intended to be implemented by subclasses and so
950
+ raises a NotImplementedError.
951
+ """
952
+ raise NotImplementedError('emit must be implemented '
953
+ 'by Handler subclasses')
954
+
955
+ def handle(self, record):
956
+ """
957
+ Conditionally emit the specified logging record.
958
+
959
+ Emission depends on filters which may have been added to the handler.
960
+ Wrap the actual emission of the record with acquisition/release of
961
+ the I/O thread lock. Returns whether the filter passed the record for
962
+ emission.
963
+ """
964
+ rv = self.filter(record)
965
+ if rv:
966
+ self.acquire()
967
+ try:
968
+ self.emit(record)
969
+ finally:
970
+ self.release()
971
+ return rv
972
+
973
+ def setFormatter(self, fmt):
974
+ """
975
+ Set the formatter for this handler.
976
+ """
977
+ self.formatter = fmt
978
+
979
+ def flush(self):
980
+ """
981
+ Ensure all logging output has been flushed.
982
+
983
+ This version does nothing and is intended to be implemented by
984
+ subclasses.
985
+ """
986
+ pass
987
+
988
+ def close(self):
989
+ """
990
+ Tidy up any resources used by the handler.
991
+
992
+ This version removes the handler from an internal map of handlers,
993
+ _handlers, which is used for handler lookup by name. Subclasses
994
+ should ensure that this gets called from overridden close()
995
+ methods.
996
+ """
997
+ #get the module data lock, as we're updating a shared structure.
998
+ _acquireLock()
999
+ try: #unlikely to raise an exception, but you never know...
1000
+ self._closed = True
1001
+ if self._name and self._name in _handlers:
1002
+ del _handlers[self._name]
1003
+ finally:
1004
+ _releaseLock()
1005
+
1006
+ def handleError(self, record):
1007
+ """
1008
+ Handle errors which occur during an emit() call.
1009
+
1010
+ This method should be called from handlers when an exception is
1011
+ encountered during an emit() call. If raiseExceptions is false,
1012
+ exceptions get silently ignored. This is what is mostly wanted
1013
+ for a logging system - most users will not care about errors in
1014
+ the logging system, they are more interested in application errors.
1015
+ You could, however, replace this with a custom handler if you wish.
1016
+ The record which was being processed is passed in to this method.
1017
+ """
1018
+ if raiseExceptions and sys.stderr: # see issue 13807
1019
+ t, v, tb = sys.exc_info()
1020
+ try:
1021
+ sys.stderr.write('--- Logging error ---\n')
1022
+ traceback.print_exception(t, v, tb, None, sys.stderr)
1023
+ sys.stderr.write('Call stack:\n')
1024
+ # Walk the stack frame up until we're out of logging,
1025
+ # so as to print the calling context.
1026
+ frame = tb.tb_frame
1027
+ while (frame and os.path.dirname(frame.f_code.co_filename) ==
1028
+ __path__[0]):
1029
+ frame = frame.f_back
1030
+ if frame:
1031
+ traceback.print_stack(frame, file=sys.stderr)
1032
+ else:
1033
+ # couldn't find the right stack frame, for some reason
1034
+ sys.stderr.write('Logged from file %s, line %s\n' % (
1035
+ record.filename, record.lineno))
1036
+ # Issue 18671: output logging message and arguments
1037
+ try:
1038
+ sys.stderr.write('Message: %r\n'
1039
+ 'Arguments: %s\n' % (record.msg,
1040
+ record.args))
1041
+ except RecursionError: # See issue 36272
1042
+ raise
1043
+ except Exception:
1044
+ sys.stderr.write('Unable to print the message and arguments'
1045
+ ' - possible formatting error.\nUse the'
1046
+ ' traceback above to help find the error.\n'
1047
+ )
1048
+ except OSError: #pragma: no cover
1049
+ pass # see issue 5971
1050
+ finally:
1051
+ del t, v, tb
1052
+
1053
+ def __repr__(self):
1054
+ level = getLevelName(self.level)
1055
+ return '<%s (%s)>' % (self.__class__.__name__, level)
1056
+
1057
+ class StreamHandler(Handler):
1058
+ """
1059
+ A handler class which writes logging records, appropriately formatted,
1060
+ to a stream. Note that this class does not close the stream, as
1061
+ sys.stdout or sys.stderr may be used.
1062
+ """
1063
+
1064
+ terminator = '\n'
1065
+
1066
+ def __init__(self, stream=None):
1067
+ """
1068
+ Initialize the handler.
1069
+
1070
+ If stream is not specified, sys.stderr is used.
1071
+ """
1072
+ Handler.__init__(self)
1073
+ if stream is None:
1074
+ stream = sys.stderr
1075
+ self.stream = stream
1076
+
1077
+ def flush(self):
1078
+ """
1079
+ Flushes the stream.
1080
+ """
1081
+ self.acquire()
1082
+ try:
1083
+ if self.stream and hasattr(self.stream, "flush"):
1084
+ self.stream.flush()
1085
+ finally:
1086
+ self.release()
1087
+
1088
+ def emit(self, record):
1089
+ """
1090
+ Emit a record.
1091
+
1092
+ If a formatter is specified, it is used to format the record.
1093
+ The record is then written to the stream with a trailing newline. If
1094
+ exception information is present, it is formatted using
1095
+ traceback.print_exception and appended to the stream. If the stream
1096
+ has an 'encoding' attribute, it is used to determine how to do the
1097
+ output to the stream.
1098
+ """
1099
+ try:
1100
+ msg = self.format(record)
1101
+ stream = self.stream
1102
+ # issue 35046: merged two stream.writes into one.
1103
+ stream.write(msg + self.terminator)
1104
+ self.flush()
1105
+ except RecursionError: # See issue 36272
1106
+ raise
1107
+ except Exception:
1108
+ self.handleError(record)
1109
+
1110
+ def setStream(self, stream):
1111
+ """
1112
+ Sets the StreamHandler's stream to the specified value,
1113
+ if it is different.
1114
+
1115
+ Returns the old stream, if the stream was changed, or None
1116
+ if it wasn't.
1117
+ """
1118
+ if stream is self.stream:
1119
+ result = None
1120
+ else:
1121
+ result = self.stream
1122
+ self.acquire()
1123
+ try:
1124
+ self.flush()
1125
+ self.stream = stream
1126
+ finally:
1127
+ self.release()
1128
+ return result
1129
+
1130
+ def __repr__(self):
1131
+ level = getLevelName(self.level)
1132
+ name = getattr(self.stream, 'name', '')
1133
+ # bpo-36015: name can be an int
1134
+ name = str(name)
1135
+ if name:
1136
+ name += ' '
1137
+ return '<%s %s(%s)>' % (self.__class__.__name__, name, level)
1138
+
1139
+
1140
+ class FileHandler(StreamHandler):
1141
+ """
1142
+ A handler class which writes formatted logging records to disk files.
1143
+ """
1144
+ def __init__(self, filename, mode='a', encoding=None, delay=False, errors=None):
1145
+ """
1146
+ Open the specified file and use it as the stream for logging.
1147
+ """
1148
+ # Issue #27493: add support for Path objects to be passed in
1149
+ filename = os.fspath(filename)
1150
+ #keep the absolute path, otherwise derived classes which use this
1151
+ #may come a cropper when the current directory changes
1152
+ self.baseFilename = os.path.abspath(filename)
1153
+ self.mode = mode
1154
+ self.encoding = encoding
1155
+ if "b" not in mode:
1156
+ self.encoding = io.text_encoding(encoding)
1157
+ self.errors = errors
1158
+ self.delay = delay
1159
+ # bpo-26789: FileHandler keeps a reference to the builtin open()
1160
+ # function to be able to open or reopen the file during Python
1161
+ # finalization.
1162
+ self._builtin_open = open
1163
+ if delay:
1164
+ #We don't open the stream, but we still need to call the
1165
+ #Handler constructor to set level, formatter, lock etc.
1166
+ Handler.__init__(self)
1167
+ self.stream = None
1168
+ else:
1169
+ StreamHandler.__init__(self, self._open())
1170
+
1171
+ def close(self):
1172
+ """
1173
+ Closes the stream.
1174
+ """
1175
+ self.acquire()
1176
+ try:
1177
+ try:
1178
+ if self.stream:
1179
+ try:
1180
+ self.flush()
1181
+ finally:
1182
+ stream = self.stream
1183
+ self.stream = None
1184
+ if hasattr(stream, "close"):
1185
+ stream.close()
1186
+ finally:
1187
+ # Issue #19523: call unconditionally to
1188
+ # prevent a handler leak when delay is set
1189
+ # Also see Issue #42378: we also rely on
1190
+ # self._closed being set to True there
1191
+ StreamHandler.close(self)
1192
+ finally:
1193
+ self.release()
1194
+
1195
+ def _open(self):
1196
+ """
1197
+ Open the current base file with the (original) mode and encoding.
1198
+ Return the resulting stream.
1199
+ """
1200
+ open_func = self._builtin_open
1201
+ return open_func(self.baseFilename, self.mode,
1202
+ encoding=self.encoding, errors=self.errors)
1203
+
1204
+ def emit(self, record):
1205
+ """
1206
+ Emit a record.
1207
+
1208
+ If the stream was not opened because 'delay' was specified in the
1209
+ constructor, open it before calling the superclass's emit.
1210
+
1211
+ If stream is not open, current mode is 'w' and `_closed=True`, record
1212
+ will not be emitted (see Issue #42378).
1213
+ """
1214
+ if self.stream is None:
1215
+ if self.mode != 'w' or not self._closed:
1216
+ self.stream = self._open()
1217
+ if self.stream:
1218
+ StreamHandler.emit(self, record)
1219
+
1220
+ def __repr__(self):
1221
+ level = getLevelName(self.level)
1222
+ return '<%s %s (%s)>' % (self.__class__.__name__, self.baseFilename, level)
1223
+
1224
+
1225
+ class _StderrHandler(StreamHandler):
1226
+ """
1227
+ This class is like a StreamHandler using sys.stderr, but always uses
1228
+ whatever sys.stderr is currently set to rather than the value of
1229
+ sys.stderr at handler construction time.
1230
+ """
1231
+ def __init__(self, level=NOTSET):
1232
+ """
1233
+ Initialize the handler.
1234
+ """
1235
+ Handler.__init__(self, level)
1236
+
1237
+ @property
1238
+ def stream(self):
1239
+ return sys.stderr
1240
+
1241
+
1242
+ _defaultLastResort = _StderrHandler(WARNING)
1243
+ lastResort = _defaultLastResort
1244
+
1245
+ #---------------------------------------------------------------------------
1246
+ # Manager classes and functions
1247
+ #---------------------------------------------------------------------------
1248
+
1249
+ class PlaceHolder(object):
1250
+ """
1251
+ PlaceHolder instances are used in the Manager logger hierarchy to take
1252
+ the place of nodes for which no loggers have been defined. This class is
1253
+ intended for internal use only and not as part of the public API.
1254
+ """
1255
+ def __init__(self, alogger):
1256
+ """
1257
+ Initialize with the specified logger being a child of this placeholder.
1258
+ """
1259
+ self.loggerMap = { alogger : None }
1260
+
1261
+ def append(self, alogger):
1262
+ """
1263
+ Add the specified logger as a child of this placeholder.
1264
+ """
1265
+ if alogger not in self.loggerMap:
1266
+ self.loggerMap[alogger] = None
1267
+
1268
+ #
1269
+ # Determine which class to use when instantiating loggers.
1270
+ #
1271
+
1272
+ def setLoggerClass(klass):
1273
+ """
1274
+ Set the class to be used when instantiating a logger. The class should
1275
+ define __init__() such that only a name argument is required, and the
1276
+ __init__() should call Logger.__init__()
1277
+ """
1278
+ if klass != Logger:
1279
+ if not issubclass(klass, Logger):
1280
+ raise TypeError("logger not derived from logging.Logger: "
1281
+ + klass.__name__)
1282
+ global _loggerClass
1283
+ _loggerClass = klass
1284
+
1285
+ def getLoggerClass():
1286
+ """
1287
+ Return the class to be used when instantiating a logger.
1288
+ """
1289
+ return _loggerClass
1290
+
1291
+ class Manager(object):
1292
+ """
1293
+ There is [under normal circumstances] just one Manager instance, which
1294
+ holds the hierarchy of loggers.
1295
+ """
1296
+ def __init__(self, rootnode):
1297
+ """
1298
+ Initialize the manager with the root node of the logger hierarchy.
1299
+ """
1300
+ self.root = rootnode
1301
+ self.disable = 0
1302
+ self.emittedNoHandlerWarning = False
1303
+ self.loggerDict = {}
1304
+ self.loggerClass = None
1305
+ self.logRecordFactory = None
1306
+
1307
+ @property
1308
+ def disable(self):
1309
+ return self._disable
1310
+
1311
+ @disable.setter
1312
+ def disable(self, value):
1313
+ self._disable = _checkLevel(value)
1314
+
1315
+ def getLogger(self, name):
1316
+ """
1317
+ Get a logger with the specified name (channel name), creating it
1318
+ if it doesn't yet exist. This name is a dot-separated hierarchical
1319
+ name, such as "a", "a.b", "a.b.c" or similar.
1320
+
1321
+ If a PlaceHolder existed for the specified name [i.e. the logger
1322
+ didn't exist but a child of it did], replace it with the created
1323
+ logger and fix up the parent/child references which pointed to the
1324
+ placeholder to now point to the logger.
1325
+ """
1326
+ rv = None
1327
+ if not isinstance(name, str):
1328
+ raise TypeError('A logger name must be a string')
1329
+ _acquireLock()
1330
+ try:
1331
+ if name in self.loggerDict:
1332
+ rv = self.loggerDict[name]
1333
+ if isinstance(rv, PlaceHolder):
1334
+ ph = rv
1335
+ rv = (self.loggerClass or _loggerClass)(name)
1336
+ rv.manager = self
1337
+ self.loggerDict[name] = rv
1338
+ self._fixupChildren(ph, rv)
1339
+ self._fixupParents(rv)
1340
+ else:
1341
+ rv = (self.loggerClass or _loggerClass)(name)
1342
+ rv.manager = self
1343
+ self.loggerDict[name] = rv
1344
+ self._fixupParents(rv)
1345
+ finally:
1346
+ _releaseLock()
1347
+ return rv
1348
+
1349
+ def setLoggerClass(self, klass):
1350
+ """
1351
+ Set the class to be used when instantiating a logger with this Manager.
1352
+ """
1353
+ if klass != Logger:
1354
+ if not issubclass(klass, Logger):
1355
+ raise TypeError("logger not derived from logging.Logger: "
1356
+ + klass.__name__)
1357
+ self.loggerClass = klass
1358
+
1359
+ def setLogRecordFactory(self, factory):
1360
+ """
1361
+ Set the factory to be used when instantiating a log record with this
1362
+ Manager.
1363
+ """
1364
+ self.logRecordFactory = factory
1365
+
1366
+ def _fixupParents(self, alogger):
1367
+ """
1368
+ Ensure that there are either loggers or placeholders all the way
1369
+ from the specified logger to the root of the logger hierarchy.
1370
+ """
1371
+ name = alogger.name
1372
+ i = name.rfind(".")
1373
+ rv = None
1374
+ while (i > 0) and not rv:
1375
+ substr = name[:i]
1376
+ if substr not in self.loggerDict:
1377
+ self.loggerDict[substr] = PlaceHolder(alogger)
1378
+ else:
1379
+ obj = self.loggerDict[substr]
1380
+ if isinstance(obj, Logger):
1381
+ rv = obj
1382
+ else:
1383
+ assert isinstance(obj, PlaceHolder)
1384
+ obj.append(alogger)
1385
+ i = name.rfind(".", 0, i - 1)
1386
+ if not rv:
1387
+ rv = self.root
1388
+ alogger.parent = rv
1389
+
1390
+ def _fixupChildren(self, ph, alogger):
1391
+ """
1392
+ Ensure that children of the placeholder ph are connected to the
1393
+ specified logger.
1394
+ """
1395
+ name = alogger.name
1396
+ namelen = len(name)
1397
+ for c in ph.loggerMap.keys():
1398
+ #The if means ... if not c.parent.name.startswith(nm)
1399
+ if c.parent.name[:namelen] != name:
1400
+ alogger.parent = c.parent
1401
+ c.parent = alogger
1402
+
1403
+ def _clear_cache(self):
1404
+ """
1405
+ Clear the cache for all loggers in loggerDict
1406
+ Called when level changes are made
1407
+ """
1408
+
1409
+ _acquireLock()
1410
+ for logger in self.loggerDict.values():
1411
+ if isinstance(logger, Logger):
1412
+ logger._cache.clear()
1413
+ self.root._cache.clear()
1414
+ _releaseLock()
1415
+
1416
+ #---------------------------------------------------------------------------
1417
+ # Logger classes and functions
1418
+ #---------------------------------------------------------------------------
1419
+
1420
+ class Logger(Filterer):
1421
+ """
1422
+ Instances of the Logger class represent a single logging channel. A
1423
+ "logging channel" indicates an area of an application. Exactly how an
1424
+ "area" is defined is up to the application developer. Since an
1425
+ application can have any number of areas, logging channels are identified
1426
+ by a unique string. Application areas can be nested (e.g. an area
1427
+ of "input processing" might include sub-areas "read CSV files", "read
1428
+ XLS files" and "read Gnumeric files"). To cater for this natural nesting,
1429
+ channel names are organized into a namespace hierarchy where levels are
1430
+ separated by periods, much like the Java or Python package namespace. So
1431
+ in the instance given above, channel names might be "input" for the upper
1432
+ level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels.
1433
+ There is no arbitrary limit to the depth of nesting.
1434
+ """
1435
+ def __init__(self, name, level=NOTSET):
1436
+ """
1437
+ Initialize the logger with a name and an optional level.
1438
+ """
1439
+ Filterer.__init__(self)
1440
+ self.name = name
1441
+ self.level = _checkLevel(level)
1442
+ self.parent = None
1443
+ self.propagate = True
1444
+ self.handlers = []
1445
+ self.disabled = False
1446
+ self._cache = {}
1447
+
1448
+ def setLevel(self, level):
1449
+ """
1450
+ Set the logging level of this logger. level must be an int or a str.
1451
+ """
1452
+ self.level = _checkLevel(level)
1453
+ self.manager._clear_cache()
1454
+
1455
+ def debug(self, msg, *args, **kwargs):
1456
+ """
1457
+ Log 'msg % args' with severity 'DEBUG'.
1458
+
1459
+ To pass exception information, use the keyword argument exc_info with
1460
+ a true value, e.g.
1461
+
1462
+ logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
1463
+ """
1464
+ if self.isEnabledFor(DEBUG):
1465
+ self._log(DEBUG, msg, args, **kwargs)
1466
+
1467
+ def info(self, msg, *args, **kwargs):
1468
+ """
1469
+ Log 'msg % args' with severity 'INFO'.
1470
+
1471
+ To pass exception information, use the keyword argument exc_info with
1472
+ a true value, e.g.
1473
+
1474
+ logger.info("Houston, we have a %s", "interesting problem", exc_info=1)
1475
+ """
1476
+ if self.isEnabledFor(INFO):
1477
+ self._log(INFO, msg, args, **kwargs)
1478
+
1479
+ def warning(self, msg, *args, **kwargs):
1480
+ """
1481
+ Log 'msg % args' with severity 'WARNING'.
1482
+
1483
+ To pass exception information, use the keyword argument exc_info with
1484
+ a true value, e.g.
1485
+
1486
+ logger.warning("Houston, we have a %s", "bit of a problem", exc_info=1)
1487
+ """
1488
+ if self.isEnabledFor(WARNING):
1489
+ self._log(WARNING, msg, args, **kwargs)
1490
+
1491
+ def warn(self, msg, *args, **kwargs):
1492
+ warnings.warn("The 'warn' method is deprecated, "
1493
+ "use 'warning' instead", DeprecationWarning, 2)
1494
+ self.warning(msg, *args, **kwargs)
1495
+
1496
+ def error(self, msg, *args, **kwargs):
1497
+ """
1498
+ Log 'msg % args' with severity 'ERROR'.
1499
+
1500
+ To pass exception information, use the keyword argument exc_info with
1501
+ a true value, e.g.
1502
+
1503
+ logger.error("Houston, we have a %s", "major problem", exc_info=1)
1504
+ """
1505
+ if self.isEnabledFor(ERROR):
1506
+ self._log(ERROR, msg, args, **kwargs)
1507
+
1508
+ def exception(self, msg, *args, exc_info=True, **kwargs):
1509
+ """
1510
+ Convenience method for logging an ERROR with exception information.
1511
+ """
1512
+ self.error(msg, *args, exc_info=exc_info, **kwargs)
1513
+
1514
+ def critical(self, msg, *args, **kwargs):
1515
+ """
1516
+ Log 'msg % args' with severity 'CRITICAL'.
1517
+
1518
+ To pass exception information, use the keyword argument exc_info with
1519
+ a true value, e.g.
1520
+
1521
+ logger.critical("Houston, we have a %s", "major disaster", exc_info=1)
1522
+ """
1523
+ if self.isEnabledFor(CRITICAL):
1524
+ self._log(CRITICAL, msg, args, **kwargs)
1525
+
1526
+ def fatal(self, msg, *args, **kwargs):
1527
+ """
1528
+ Don't use this method, use critical() instead.
1529
+ """
1530
+ self.critical(msg, *args, **kwargs)
1531
+
1532
+ def log(self, level, msg, *args, **kwargs):
1533
+ """
1534
+ Log 'msg % args' with the integer severity 'level'.
1535
+
1536
+ To pass exception information, use the keyword argument exc_info with
1537
+ a true value, e.g.
1538
+
1539
+ logger.log(level, "We have a %s", "mysterious problem", exc_info=1)
1540
+ """
1541
+ if not isinstance(level, int):
1542
+ if raiseExceptions:
1543
+ raise TypeError("level must be an integer")
1544
+ else:
1545
+ return
1546
+ if self.isEnabledFor(level):
1547
+ self._log(level, msg, args, **kwargs)
1548
+
1549
+ def findCaller(self, stack_info=False, stacklevel=1):
1550
+ """
1551
+ Find the stack frame of the caller so that we can note the source
1552
+ file name, line number and function name.
1553
+ """
1554
+ f = currentframe()
1555
+ #On some versions of IronPython, currentframe() returns None if
1556
+ #IronPython isn't run with -X:Frames.
1557
+ if f is not None:
1558
+ f = f.f_back
1559
+ orig_f = f
1560
+ while f and stacklevel > 1:
1561
+ f = f.f_back
1562
+ stacklevel -= 1
1563
+ if not f:
1564
+ f = orig_f
1565
+ rv = "(unknown file)", 0, "(unknown function)", None
1566
+ while hasattr(f, "f_code"):
1567
+ co = f.f_code
1568
+ filename = os.path.normcase(co.co_filename)
1569
+ if filename == _srcfile:
1570
+ f = f.f_back
1571
+ continue
1572
+ sinfo = None
1573
+ if stack_info:
1574
+ sio = io.StringIO()
1575
+ sio.write('Stack (most recent call last):\n')
1576
+ traceback.print_stack(f, file=sio)
1577
+ sinfo = sio.getvalue()
1578
+ if sinfo[-1] == '\n':
1579
+ sinfo = sinfo[:-1]
1580
+ sio.close()
1581
+ rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
1582
+ break
1583
+ return rv
1584
+
1585
+ def makeRecord(self, name, level, fn, lno, msg, args, exc_info,
1586
+ func=None, extra=None, sinfo=None):
1587
+ """
1588
+ A factory method which can be overridden in subclasses to create
1589
+ specialized LogRecords.
1590
+ """
1591
+ rv = _logRecordFactory(name, level, fn, lno, msg, args, exc_info, func,
1592
+ sinfo)
1593
+ if extra is not None:
1594
+ for key in extra:
1595
+ if (key in ["message", "asctime"]) or (key in rv.__dict__):
1596
+ raise KeyError("Attempt to overwrite %r in LogRecord" % key)
1597
+ rv.__dict__[key] = extra[key]
1598
+ return rv
1599
+
1600
+ def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False,
1601
+ stacklevel=1):
1602
+ """
1603
+ Low-level logging routine which creates a LogRecord and then calls
1604
+ all the handlers of this logger to handle the record.
1605
+ """
1606
+ sinfo = None
1607
+ if _srcfile:
1608
+ #IronPython doesn't track Python frames, so findCaller raises an
1609
+ #exception on some versions of IronPython. We trap it here so that
1610
+ #IronPython can use logging.
1611
+ try:
1612
+ fn, lno, func, sinfo = self.findCaller(stack_info, stacklevel)
1613
+ except ValueError: # pragma: no cover
1614
+ fn, lno, func = "(unknown file)", 0, "(unknown function)"
1615
+ else: # pragma: no cover
1616
+ fn, lno, func = "(unknown file)", 0, "(unknown function)"
1617
+ if exc_info:
1618
+ if isinstance(exc_info, BaseException):
1619
+ exc_info = (type(exc_info), exc_info, exc_info.__traceback__)
1620
+ elif not isinstance(exc_info, tuple):
1621
+ exc_info = sys.exc_info()
1622
+ record = self.makeRecord(self.name, level, fn, lno, msg, args,
1623
+ exc_info, func, extra, sinfo)
1624
+ self.handle(record)
1625
+
1626
+ def handle(self, record):
1627
+ """
1628
+ Call the handlers for the specified record.
1629
+
1630
+ This method is used for unpickled records received from a socket, as
1631
+ well as those created locally. Logger-level filtering is applied.
1632
+ """
1633
+ if (not self.disabled) and self.filter(record):
1634
+ self.callHandlers(record)
1635
+
1636
+ def addHandler(self, hdlr):
1637
+ """
1638
+ Add the specified handler to this logger.
1639
+ """
1640
+ _acquireLock()
1641
+ try:
1642
+ if not (hdlr in self.handlers):
1643
+ self.handlers.append(hdlr)
1644
+ finally:
1645
+ _releaseLock()
1646
+
1647
+ def removeHandler(self, hdlr):
1648
+ """
1649
+ Remove the specified handler from this logger.
1650
+ """
1651
+ _acquireLock()
1652
+ try:
1653
+ if hdlr in self.handlers:
1654
+ self.handlers.remove(hdlr)
1655
+ finally:
1656
+ _releaseLock()
1657
+
1658
+ def hasHandlers(self):
1659
+ """
1660
+ See if this logger has any handlers configured.
1661
+
1662
+ Loop through all handlers for this logger and its parents in the
1663
+ logger hierarchy. Return True if a handler was found, else False.
1664
+ Stop searching up the hierarchy whenever a logger with the "propagate"
1665
+ attribute set to zero is found - that will be the last logger which
1666
+ is checked for the existence of handlers.
1667
+ """
1668
+ c = self
1669
+ rv = False
1670
+ while c:
1671
+ if c.handlers:
1672
+ rv = True
1673
+ break
1674
+ if not c.propagate:
1675
+ break
1676
+ else:
1677
+ c = c.parent
1678
+ return rv
1679
+
1680
+ def callHandlers(self, record):
1681
+ """
1682
+ Pass a record to all relevant handlers.
1683
+
1684
+ Loop through all handlers for this logger and its parents in the
1685
+ logger hierarchy. If no handler was found, output a one-off error
1686
+ message to sys.stderr. Stop searching up the hierarchy whenever a
1687
+ logger with the "propagate" attribute set to zero is found - that
1688
+ will be the last logger whose handlers are called.
1689
+ """
1690
+ c = self
1691
+ found = 0
1692
+ while c:
1693
+ for hdlr in c.handlers:
1694
+ found = found + 1
1695
+ if record.levelno >= hdlr.level:
1696
+ hdlr.handle(record)
1697
+ if not c.propagate:
1698
+ c = None #break out
1699
+ else:
1700
+ c = c.parent
1701
+ if (found == 0):
1702
+ if lastResort:
1703
+ if record.levelno >= lastResort.level:
1704
+ lastResort.handle(record)
1705
+ elif raiseExceptions and not self.manager.emittedNoHandlerWarning:
1706
+ sys.stderr.write("No handlers could be found for logger"
1707
+ " \"%s\"\n" % self.name)
1708
+ self.manager.emittedNoHandlerWarning = True
1709
+
1710
+ def getEffectiveLevel(self):
1711
+ """
1712
+ Get the effective level for this logger.
1713
+
1714
+ Loop through this logger and its parents in the logger hierarchy,
1715
+ looking for a non-zero logging level. Return the first one found.
1716
+ """
1717
+ logger = self
1718
+ while logger:
1719
+ if logger.level:
1720
+ return logger.level
1721
+ logger = logger.parent
1722
+ return NOTSET
1723
+
1724
+ def isEnabledFor(self, level):
1725
+ """
1726
+ Is this logger enabled for level 'level'?
1727
+ """
1728
+ if self.disabled:
1729
+ return False
1730
+
1731
+ try:
1732
+ return self._cache[level]
1733
+ except KeyError:
1734
+ _acquireLock()
1735
+ try:
1736
+ if self.manager.disable >= level:
1737
+ is_enabled = self._cache[level] = False
1738
+ else:
1739
+ is_enabled = self._cache[level] = (
1740
+ level >= self.getEffectiveLevel()
1741
+ )
1742
+ finally:
1743
+ _releaseLock()
1744
+ return is_enabled
1745
+
1746
+ def getChild(self, suffix):
1747
+ """
1748
+ Get a logger which is a descendant to this one.
1749
+
1750
+ This is a convenience method, such that
1751
+
1752
+ logging.getLogger('abc').getChild('def.ghi')
1753
+
1754
+ is the same as
1755
+
1756
+ logging.getLogger('abc.def.ghi')
1757
+
1758
+ It's useful, for example, when the parent logger is named using
1759
+ __name__ rather than a literal string.
1760
+ """
1761
+ if self.root is not self:
1762
+ suffix = '.'.join((self.name, suffix))
1763
+ return self.manager.getLogger(suffix)
1764
+
1765
+ def __repr__(self):
1766
+ level = getLevelName(self.getEffectiveLevel())
1767
+ return '<%s %s (%s)>' % (self.__class__.__name__, self.name, level)
1768
+
1769
+ def __reduce__(self):
1770
+ # In general, only the root logger will not be accessible via its name.
1771
+ # However, the root logger's class has its own __reduce__ method.
1772
+ if getLogger(self.name) is not self:
1773
+ import pickle
1774
+ raise pickle.PicklingError('logger cannot be pickled')
1775
+ return getLogger, (self.name,)
1776
+
1777
+
1778
+ class RootLogger(Logger):
1779
+ """
1780
+ A root logger is not that different to any other logger, except that
1781
+ it must have a logging level and there is only one instance of it in
1782
+ the hierarchy.
1783
+ """
1784
+ def __init__(self, level):
1785
+ """
1786
+ Initialize the logger with the name "root".
1787
+ """
1788
+ Logger.__init__(self, "root", level)
1789
+
1790
+ def __reduce__(self):
1791
+ return getLogger, ()
1792
+
1793
+ _loggerClass = Logger
1794
+
1795
+ class LoggerAdapter(object):
1796
+ """
1797
+ An adapter for loggers which makes it easier to specify contextual
1798
+ information in logging output.
1799
+ """
1800
+
1801
+ def __init__(self, logger, extra=None):
1802
+ """
1803
+ Initialize the adapter with a logger and a dict-like object which
1804
+ provides contextual information. This constructor signature allows
1805
+ easy stacking of LoggerAdapters, if so desired.
1806
+
1807
+ You can effectively pass keyword arguments as shown in the
1808
+ following example:
1809
+
1810
+ adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2"))
1811
+ """
1812
+ self.logger = logger
1813
+ self.extra = extra
1814
+
1815
+ def process(self, msg, kwargs):
1816
+ """
1817
+ Process the logging message and keyword arguments passed in to
1818
+ a logging call to insert contextual information. You can either
1819
+ manipulate the message itself, the keyword args or both. Return
1820
+ the message and kwargs modified (or not) to suit your needs.
1821
+
1822
+ Normally, you'll only need to override this one method in a
1823
+ LoggerAdapter subclass for your specific needs.
1824
+ """
1825
+ kwargs["extra"] = self.extra
1826
+ return msg, kwargs
1827
+
1828
+ #
1829
+ # Boilerplate convenience methods
1830
+ #
1831
+ def debug(self, msg, *args, **kwargs):
1832
+ """
1833
+ Delegate a debug call to the underlying logger.
1834
+ """
1835
+ self.log(DEBUG, msg, *args, **kwargs)
1836
+
1837
+ def info(self, msg, *args, **kwargs):
1838
+ """
1839
+ Delegate an info call to the underlying logger.
1840
+ """
1841
+ self.log(INFO, msg, *args, **kwargs)
1842
+
1843
+ def warning(self, msg, *args, **kwargs):
1844
+ """
1845
+ Delegate a warning call to the underlying logger.
1846
+ """
1847
+ self.log(WARNING, msg, *args, **kwargs)
1848
+
1849
+ def warn(self, msg, *args, **kwargs):
1850
+ warnings.warn("The 'warn' method is deprecated, "
1851
+ "use 'warning' instead", DeprecationWarning, 2)
1852
+ self.warning(msg, *args, **kwargs)
1853
+
1854
+ def error(self, msg, *args, **kwargs):
1855
+ """
1856
+ Delegate an error call to the underlying logger.
1857
+ """
1858
+ self.log(ERROR, msg, *args, **kwargs)
1859
+
1860
+ def exception(self, msg, *args, exc_info=True, **kwargs):
1861
+ """
1862
+ Delegate an exception call to the underlying logger.
1863
+ """
1864
+ self.log(ERROR, msg, *args, exc_info=exc_info, **kwargs)
1865
+
1866
+ def critical(self, msg, *args, **kwargs):
1867
+ """
1868
+ Delegate a critical call to the underlying logger.
1869
+ """
1870
+ self.log(CRITICAL, msg, *args, **kwargs)
1871
+
1872
+ def log(self, level, msg, *args, **kwargs):
1873
+ """
1874
+ Delegate a log call to the underlying logger, after adding
1875
+ contextual information from this adapter instance.
1876
+ """
1877
+ if self.isEnabledFor(level):
1878
+ msg, kwargs = self.process(msg, kwargs)
1879
+ self.logger.log(level, msg, *args, **kwargs)
1880
+
1881
+ def isEnabledFor(self, level):
1882
+ """
1883
+ Is this logger enabled for level 'level'?
1884
+ """
1885
+ return self.logger.isEnabledFor(level)
1886
+
1887
+ def setLevel(self, level):
1888
+ """
1889
+ Set the specified level on the underlying logger.
1890
+ """
1891
+ self.logger.setLevel(level)
1892
+
1893
+ def getEffectiveLevel(self):
1894
+ """
1895
+ Get the effective level for the underlying logger.
1896
+ """
1897
+ return self.logger.getEffectiveLevel()
1898
+
1899
+ def hasHandlers(self):
1900
+ """
1901
+ See if the underlying logger has any handlers.
1902
+ """
1903
+ return self.logger.hasHandlers()
1904
+
1905
+ def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False):
1906
+ """
1907
+ Low-level log implementation, proxied to allow nested logger adapters.
1908
+ """
1909
+ return self.logger._log(
1910
+ level,
1911
+ msg,
1912
+ args,
1913
+ exc_info=exc_info,
1914
+ extra=extra,
1915
+ stack_info=stack_info,
1916
+ )
1917
+
1918
+ @property
1919
+ def manager(self):
1920
+ return self.logger.manager
1921
+
1922
+ @manager.setter
1923
+ def manager(self, value):
1924
+ self.logger.manager = value
1925
+
1926
+ @property
1927
+ def name(self):
1928
+ return self.logger.name
1929
+
1930
+ def __repr__(self):
1931
+ logger = self.logger
1932
+ level = getLevelName(logger.getEffectiveLevel())
1933
+ return '<%s %s (%s)>' % (self.__class__.__name__, logger.name, level)
1934
+
1935
+ root = RootLogger(WARNING)
1936
+ Logger.root = root
1937
+ Logger.manager = Manager(Logger.root)
1938
+
1939
+ #---------------------------------------------------------------------------
1940
+ # Configuration classes and functions
1941
+ #---------------------------------------------------------------------------
1942
+
1943
+ def basicConfig(**kwargs):
1944
+ """
1945
+ Do basic configuration for the logging system.
1946
+
1947
+ This function does nothing if the root logger already has handlers
1948
+ configured, unless the keyword argument *force* is set to ``True``.
1949
+ It is a convenience method intended for use by simple scripts
1950
+ to do one-shot configuration of the logging package.
1951
+
1952
+ The default behaviour is to create a StreamHandler which writes to
1953
+ sys.stderr, set a formatter using the BASIC_FORMAT format string, and
1954
+ add the handler to the root logger.
1955
+
1956
+ A number of optional keyword arguments may be specified, which can alter
1957
+ the default behaviour.
1958
+
1959
+ filename Specifies that a FileHandler be created, using the specified
1960
+ filename, rather than a StreamHandler.
1961
+ filemode Specifies the mode to open the file, if filename is specified
1962
+ (if filemode is unspecified, it defaults to 'a').
1963
+ format Use the specified format string for the handler.
1964
+ datefmt Use the specified date/time format.
1965
+ style If a format string is specified, use this to specify the
1966
+ type of format string (possible values '%', '{', '$', for
1967
+ %-formatting, :meth:`str.format` and :class:`string.Template`
1968
+ - defaults to '%').
1969
+ level Set the root logger level to the specified level.
1970
+ stream Use the specified stream to initialize the StreamHandler. Note
1971
+ that this argument is incompatible with 'filename' - if both
1972
+ are present, 'stream' is ignored.
1973
+ handlers If specified, this should be an iterable of already created
1974
+ handlers, which will be added to the root handler. Any handler
1975
+ in the list which does not have a formatter assigned will be
1976
+ assigned the formatter created in this function.
1977
+ force If this keyword is specified as true, any existing handlers
1978
+ attached to the root logger are removed and closed, before
1979
+ carrying out the configuration as specified by the other
1980
+ arguments.
1981
+ encoding If specified together with a filename, this encoding is passed to
1982
+ the created FileHandler, causing it to be used when the file is
1983
+ opened.
1984
+ errors If specified together with a filename, this value is passed to the
1985
+ created FileHandler, causing it to be used when the file is
1986
+ opened in text mode. If not specified, the default value is
1987
+ `backslashreplace`.
1988
+
1989
+ Note that you could specify a stream created using open(filename, mode)
1990
+ rather than passing the filename and mode in. However, it should be
1991
+ remembered that StreamHandler does not close its stream (since it may be
1992
+ using sys.stdout or sys.stderr), whereas FileHandler closes its stream
1993
+ when the handler is closed.
1994
+
1995
+ .. versionchanged:: 3.2
1996
+ Added the ``style`` parameter.
1997
+
1998
+ .. versionchanged:: 3.3
1999
+ Added the ``handlers`` parameter. A ``ValueError`` is now thrown for
2000
+ incompatible arguments (e.g. ``handlers`` specified together with
2001
+ ``filename``/``filemode``, or ``filename``/``filemode`` specified
2002
+ together with ``stream``, or ``handlers`` specified together with
2003
+ ``stream``.
2004
+
2005
+ .. versionchanged:: 3.8
2006
+ Added the ``force`` parameter.
2007
+
2008
+ .. versionchanged:: 3.9
2009
+ Added the ``encoding`` and ``errors`` parameters.
2010
+ """
2011
+ # Add thread safety in case someone mistakenly calls
2012
+ # basicConfig() from multiple threads
2013
+ _acquireLock()
2014
+ try:
2015
+ force = kwargs.pop('force', False)
2016
+ encoding = kwargs.pop('encoding', None)
2017
+ errors = kwargs.pop('errors', 'backslashreplace')
2018
+ if force:
2019
+ for h in root.handlers[:]:
2020
+ root.removeHandler(h)
2021
+ h.close()
2022
+ if len(root.handlers) == 0:
2023
+ handlers = kwargs.pop("handlers", None)
2024
+ if handlers is None:
2025
+ if "stream" in kwargs and "filename" in kwargs:
2026
+ raise ValueError("'stream' and 'filename' should not be "
2027
+ "specified together")
2028
+ else:
2029
+ if "stream" in kwargs or "filename" in kwargs:
2030
+ raise ValueError("'stream' or 'filename' should not be "
2031
+ "specified together with 'handlers'")
2032
+ if handlers is None:
2033
+ filename = kwargs.pop("filename", None)
2034
+ mode = kwargs.pop("filemode", 'a')
2035
+ if filename:
2036
+ if 'b' in mode:
2037
+ errors = None
2038
+ else:
2039
+ encoding = io.text_encoding(encoding)
2040
+ h = FileHandler(filename, mode,
2041
+ encoding=encoding, errors=errors)
2042
+ else:
2043
+ stream = kwargs.pop("stream", None)
2044
+ h = StreamHandler(stream)
2045
+ handlers = [h]
2046
+ dfs = kwargs.pop("datefmt", None)
2047
+ style = kwargs.pop("style", '%')
2048
+ if style not in _STYLES:
2049
+ raise ValueError('Style must be one of: %s' % ','.join(
2050
+ _STYLES.keys()))
2051
+ fs = kwargs.pop("format", _STYLES[style][1])
2052
+ fmt = Formatter(fs, dfs, style)
2053
+ for h in handlers:
2054
+ if h.formatter is None:
2055
+ h.setFormatter(fmt)
2056
+ root.addHandler(h)
2057
+ level = kwargs.pop("level", None)
2058
+ if level is not None:
2059
+ root.setLevel(level)
2060
+ if kwargs:
2061
+ keys = ', '.join(kwargs.keys())
2062
+ raise ValueError('Unrecognised argument(s): %s' % keys)
2063
+ finally:
2064
+ _releaseLock()
2065
+
2066
+ #---------------------------------------------------------------------------
2067
+ # Utility functions at module level.
2068
+ # Basically delegate everything to the root logger.
2069
+ #---------------------------------------------------------------------------
2070
+
2071
+ def getLogger(name=None):
2072
+ """
2073
+ Return a logger with the specified name, creating it if necessary.
2074
+
2075
+ If no name is specified, return the root logger.
2076
+ """
2077
+ if not name or isinstance(name, str) and name == root.name:
2078
+ return root
2079
+ return Logger.manager.getLogger(name)
2080
+
2081
+ def critical(msg, *args, **kwargs):
2082
+ """
2083
+ Log a message with severity 'CRITICAL' on the root logger. If the logger
2084
+ has no handlers, call basicConfig() to add a console handler with a
2085
+ pre-defined format.
2086
+ """
2087
+ if len(root.handlers) == 0:
2088
+ basicConfig()
2089
+ root.critical(msg, *args, **kwargs)
2090
+
2091
+ def fatal(msg, *args, **kwargs):
2092
+ """
2093
+ Don't use this function, use critical() instead.
2094
+ """
2095
+ critical(msg, *args, **kwargs)
2096
+
2097
+ def error(msg, *args, **kwargs):
2098
+ """
2099
+ Log a message with severity 'ERROR' on the root logger. If the logger has
2100
+ no handlers, call basicConfig() to add a console handler with a pre-defined
2101
+ format.
2102
+ """
2103
+ if len(root.handlers) == 0:
2104
+ basicConfig()
2105
+ root.error(msg, *args, **kwargs)
2106
+
2107
+ def exception(msg, *args, exc_info=True, **kwargs):
2108
+ """
2109
+ Log a message with severity 'ERROR' on the root logger, with exception
2110
+ information. If the logger has no handlers, basicConfig() is called to add
2111
+ a console handler with a pre-defined format.
2112
+ """
2113
+ error(msg, *args, exc_info=exc_info, **kwargs)
2114
+
2115
+ def warning(msg, *args, **kwargs):
2116
+ """
2117
+ Log a message with severity 'WARNING' on the root logger. If the logger has
2118
+ no handlers, call basicConfig() to add a console handler with a pre-defined
2119
+ format.
2120
+ """
2121
+ if len(root.handlers) == 0:
2122
+ basicConfig()
2123
+ root.warning(msg, *args, **kwargs)
2124
+
2125
+ def warn(msg, *args, **kwargs):
2126
+ warnings.warn("The 'warn' function is deprecated, "
2127
+ "use 'warning' instead", DeprecationWarning, 2)
2128
+ warning(msg, *args, **kwargs)
2129
+
2130
+ def info(msg, *args, **kwargs):
2131
+ """
2132
+ Log a message with severity 'INFO' on the root logger. If the logger has
2133
+ no handlers, call basicConfig() to add a console handler with a pre-defined
2134
+ format.
2135
+ """
2136
+ if len(root.handlers) == 0:
2137
+ basicConfig()
2138
+ root.info(msg, *args, **kwargs)
2139
+
2140
+ def debug(msg, *args, **kwargs):
2141
+ """
2142
+ Log a message with severity 'DEBUG' on the root logger. If the logger has
2143
+ no handlers, call basicConfig() to add a console handler with a pre-defined
2144
+ format.
2145
+ """
2146
+ if len(root.handlers) == 0:
2147
+ basicConfig()
2148
+ root.debug(msg, *args, **kwargs)
2149
+
2150
+ def log(level, msg, *args, **kwargs):
2151
+ """
2152
+ Log 'msg % args' with the integer severity 'level' on the root logger. If
2153
+ the logger has no handlers, call basicConfig() to add a console handler
2154
+ with a pre-defined format.
2155
+ """
2156
+ if len(root.handlers) == 0:
2157
+ basicConfig()
2158
+ root.log(level, msg, *args, **kwargs)
2159
+
2160
+ def disable(level=CRITICAL):
2161
+ """
2162
+ Disable all logging calls of severity 'level' and below.
2163
+ """
2164
+ root.manager.disable = level
2165
+ root.manager._clear_cache()
2166
+
2167
+ def shutdown(handlerList=_handlerList):
2168
+ """
2169
+ Perform any cleanup actions in the logging system (e.g. flushing
2170
+ buffers).
2171
+
2172
+ Should be called at application exit.
2173
+ """
2174
+ for wr in reversed(handlerList[:]):
2175
+ #errors might occur, for example, if files are locked
2176
+ #we just ignore them if raiseExceptions is not set
2177
+ try:
2178
+ h = wr()
2179
+ if h:
2180
+ try:
2181
+ h.acquire()
2182
+ h.flush()
2183
+ h.close()
2184
+ except (OSError, ValueError):
2185
+ # Ignore errors which might be caused
2186
+ # because handlers have been closed but
2187
+ # references to them are still around at
2188
+ # application exit.
2189
+ pass
2190
+ finally:
2191
+ h.release()
2192
+ except: # ignore everything, as we're shutting down
2193
+ if raiseExceptions:
2194
+ raise
2195
+ #else, swallow
2196
+
2197
+ #Let's try and shutdown automatically on application exit...
2198
+ import atexit
2199
+ atexit.register(shutdown)
2200
+
2201
+ # Null handler
2202
+
2203
+ class NullHandler(Handler):
2204
+ """
2205
+ This handler does nothing. It's intended to be used to avoid the
2206
+ "No handlers could be found for logger XXX" one-off warning. This is
2207
+ important for library code, which may contain code to log events. If a user
2208
+ of the library does not configure logging, the one-off warning might be
2209
+ produced; to avoid this, the library developer simply needs to instantiate
2210
+ a NullHandler and add it to the top-level logger of the library module or
2211
+ package.
2212
+ """
2213
+ def handle(self, record):
2214
+ """Stub."""
2215
+
2216
+ def emit(self, record):
2217
+ """Stub."""
2218
+
2219
+ def createLock(self):
2220
+ self.lock = None
2221
+
2222
+ def _at_fork_reinit(self):
2223
+ pass
2224
+
2225
+ # Warnings integration
2226
+
2227
+ _warnings_showwarning = None
2228
+
2229
+ def _showwarning(message, category, filename, lineno, file=None, line=None):
2230
+ """
2231
+ Implementation of showwarnings which redirects to logging, which will first
2232
+ check to see if the file parameter is None. If a file is specified, it will
2233
+ delegate to the original warnings implementation of showwarning. Otherwise,
2234
+ it will call warnings.formatwarning and will log the resulting string to a
2235
+ warnings logger named "py.warnings" with level logging.WARNING.
2236
+ """
2237
+ if file is not None:
2238
+ if _warnings_showwarning is not None:
2239
+ _warnings_showwarning(message, category, filename, lineno, file, line)
2240
+ else:
2241
+ s = warnings.formatwarning(message, category, filename, lineno, line)
2242
+ logger = getLogger("py.warnings")
2243
+ if not logger.handlers:
2244
+ logger.addHandler(NullHandler())
2245
+ logger.warning("%s", s)
2246
+
2247
+ def captureWarnings(capture):
2248
+ """
2249
+ If capture is true, redirect all warnings to the logging package.
2250
+ If capture is False, ensure that warnings are not redirected to logging
2251
+ but to their original destinations.
2252
+ """
2253
+ global _warnings_showwarning
2254
+ if capture:
2255
+ if _warnings_showwarning is None:
2256
+ _warnings_showwarning = warnings.showwarning
2257
+ warnings.showwarning = _showwarning
2258
+ else:
2259
+ if _warnings_showwarning is not None:
2260
+ warnings.showwarning = _warnings_showwarning
2261
+ _warnings_showwarning = None
evalkit_llava/lib/python3.10/logging/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (66.9 kB). View file
 
evalkit_llava/lib/python3.10/logging/__pycache__/config.cpython-310.pyc ADDED
Binary file (23.4 kB). View file
 
evalkit_llava/lib/python3.10/logging/__pycache__/handlers.cpython-310.pyc ADDED
Binary file (44.9 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/__init__.py ADDED
@@ -0,0 +1,456 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import logging
3
+ import os
4
+ import pathlib
5
+ import sys
6
+ import sysconfig
7
+ from typing import Any, Dict, Generator, Optional, Tuple
8
+
9
+ from pip._internal.models.scheme import SCHEME_KEYS, Scheme
10
+ from pip._internal.utils.compat import WINDOWS
11
+ from pip._internal.utils.deprecation import deprecated
12
+ from pip._internal.utils.virtualenv import running_under_virtualenv
13
+
14
+ from . import _sysconfig
15
+ from .base import (
16
+ USER_CACHE_DIR,
17
+ get_major_minor_version,
18
+ get_src_prefix,
19
+ is_osx_framework,
20
+ site_packages,
21
+ user_site,
22
+ )
23
+
24
+ __all__ = [
25
+ "USER_CACHE_DIR",
26
+ "get_bin_prefix",
27
+ "get_bin_user",
28
+ "get_major_minor_version",
29
+ "get_platlib",
30
+ "get_purelib",
31
+ "get_scheme",
32
+ "get_src_prefix",
33
+ "site_packages",
34
+ "user_site",
35
+ ]
36
+
37
+
38
+ logger = logging.getLogger(__name__)
39
+
40
+
41
+ _PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
42
+
43
+ _USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)
44
+
45
+
46
+ def _should_use_sysconfig() -> bool:
47
+ """This function determines the value of _USE_SYSCONFIG.
48
+
49
+ By default, pip uses sysconfig on Python 3.10+.
50
+ But Python distributors can override this decision by setting:
51
+ sysconfig._PIP_USE_SYSCONFIG = True / False
52
+ Rationale in https://github.com/pypa/pip/issues/10647
53
+
54
+ This is a function for testability, but should be constant during any one
55
+ run.
56
+ """
57
+ return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))
58
+
59
+
60
+ _USE_SYSCONFIG = _should_use_sysconfig()
61
+
62
+ if not _USE_SYSCONFIG:
63
+ # Import distutils lazily to avoid deprecation warnings,
64
+ # but import it soon enough that it is in memory and available during
65
+ # a pip reinstall.
66
+ from . import _distutils
67
+
68
+ # Be noisy about incompatibilities if this platforms "should" be using
69
+ # sysconfig, but is explicitly opting out and using distutils instead.
70
+ if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:
71
+ _MISMATCH_LEVEL = logging.WARNING
72
+ else:
73
+ _MISMATCH_LEVEL = logging.DEBUG
74
+
75
+
76
+ def _looks_like_bpo_44860() -> bool:
77
+ """The resolution to bpo-44860 will change this incorrect platlib.
78
+
79
+ See <https://bugs.python.org/issue44860>.
80
+ """
81
+ from distutils.command.install import INSTALL_SCHEMES
82
+
83
+ try:
84
+ unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
85
+ except KeyError:
86
+ return False
87
+ return unix_user_platlib == "$usersite"
88
+
89
+
90
+ def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
91
+ platlib = scheme["platlib"]
92
+ if "/$platlibdir/" in platlib:
93
+ platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")
94
+ if "/lib64/" not in platlib:
95
+ return False
96
+ unpatched = platlib.replace("/lib64/", "/lib/")
97
+ return unpatched.replace("$platbase/", "$base/") == scheme["purelib"]
98
+
99
+
100
+ @functools.lru_cache(maxsize=None)
101
+ def _looks_like_red_hat_lib() -> bool:
102
+ """Red Hat patches platlib in unix_prefix and unix_home, but not purelib.
103
+
104
+ This is the only way I can see to tell a Red Hat-patched Python.
105
+ """
106
+ from distutils.command.install import INSTALL_SCHEMES
107
+
108
+ return all(
109
+ k in INSTALL_SCHEMES
110
+ and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k])
111
+ for k in ("unix_prefix", "unix_home")
112
+ )
113
+
114
+
115
+ @functools.lru_cache(maxsize=None)
116
+ def _looks_like_debian_scheme() -> bool:
117
+ """Debian adds two additional schemes."""
118
+ from distutils.command.install import INSTALL_SCHEMES
119
+
120
+ return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
121
+
122
+
123
+ @functools.lru_cache(maxsize=None)
124
+ def _looks_like_red_hat_scheme() -> bool:
125
+ """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``.
126
+
127
+ Red Hat's ``00251-change-user-install-location.patch`` changes the install
128
+ command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is
129
+ (fortunately?) done quite unconditionally, so we create a default command
130
+ object without any configuration to detect this.
131
+ """
132
+ from distutils.command.install import install
133
+ from distutils.dist import Distribution
134
+
135
+ cmd: Any = install(Distribution())
136
+ cmd.finalize_options()
137
+ return (
138
+ cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local"
139
+ and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local"
140
+ )
141
+
142
+
143
+ @functools.lru_cache(maxsize=None)
144
+ def _looks_like_slackware_scheme() -> bool:
145
+ """Slackware patches sysconfig but fails to patch distutils and site.
146
+
147
+ Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib
148
+ path, but does not do the same to the site module.
149
+ """
150
+ if user_site is None: # User-site not available.
151
+ return False
152
+ try:
153
+ paths = sysconfig.get_paths(scheme="posix_user", expand=False)
154
+ except KeyError: # User-site not available.
155
+ return False
156
+ return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site
157
+
158
+
159
+ @functools.lru_cache(maxsize=None)
160
+ def _looks_like_msys2_mingw_scheme() -> bool:
161
+ """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
162
+
163
+ However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is
164
+ likely going to be included in their 3.10 release, so we ignore the warning.
165
+ See msys2/MINGW-packages#9319.
166
+
167
+ MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase,
168
+ and is missing the final ``"site-packages"``.
169
+ """
170
+ paths = sysconfig.get_paths("nt", expand=False)
171
+ return all(
172
+ "Lib" not in p and "lib" in p and not p.endswith("site-packages")
173
+ for p in (paths[key] for key in ("platlib", "purelib"))
174
+ )
175
+
176
+
177
+ def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]:
178
+ ldversion = sysconfig.get_config_var("LDVERSION")
179
+ abiflags = getattr(sys, "abiflags", None)
180
+
181
+ # LDVERSION does not end with sys.abiflags. Just return the path unchanged.
182
+ if not ldversion or not abiflags or not ldversion.endswith(abiflags):
183
+ yield from parts
184
+ return
185
+
186
+ # Strip sys.abiflags from LDVERSION-based path components.
187
+ for part in parts:
188
+ if part.endswith(ldversion):
189
+ part = part[: (0 - len(abiflags))]
190
+ yield part
191
+
192
+
193
+ @functools.lru_cache(maxsize=None)
194
+ def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None:
195
+ issue_url = "https://github.com/pypa/pip/issues/10151"
196
+ message = (
197
+ "Value for %s does not match. Please report this to <%s>"
198
+ "\ndistutils: %s"
199
+ "\nsysconfig: %s"
200
+ )
201
+ logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new)
202
+
203
+
204
+ def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
205
+ if old == new:
206
+ return False
207
+ _warn_mismatched(old, new, key=key)
208
+ return True
209
+
210
+
211
+ @functools.lru_cache(maxsize=None)
212
+ def _log_context(
213
+ *,
214
+ user: bool = False,
215
+ home: Optional[str] = None,
216
+ root: Optional[str] = None,
217
+ prefix: Optional[str] = None,
218
+ ) -> None:
219
+ parts = [
220
+ "Additional context:",
221
+ "user = %r",
222
+ "home = %r",
223
+ "root = %r",
224
+ "prefix = %r",
225
+ ]
226
+
227
+ logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix)
228
+
229
+
230
+ def get_scheme(
231
+ dist_name: str,
232
+ user: bool = False,
233
+ home: Optional[str] = None,
234
+ root: Optional[str] = None,
235
+ isolated: bool = False,
236
+ prefix: Optional[str] = None,
237
+ ) -> Scheme:
238
+ new = _sysconfig.get_scheme(
239
+ dist_name,
240
+ user=user,
241
+ home=home,
242
+ root=root,
243
+ isolated=isolated,
244
+ prefix=prefix,
245
+ )
246
+ if _USE_SYSCONFIG:
247
+ return new
248
+
249
+ old = _distutils.get_scheme(
250
+ dist_name,
251
+ user=user,
252
+ home=home,
253
+ root=root,
254
+ isolated=isolated,
255
+ prefix=prefix,
256
+ )
257
+
258
+ warning_contexts = []
259
+ for k in SCHEME_KEYS:
260
+ old_v = pathlib.Path(getattr(old, k))
261
+ new_v = pathlib.Path(getattr(new, k))
262
+
263
+ if old_v == new_v:
264
+ continue
265
+
266
+ # distutils incorrectly put PyPy packages under ``site-packages/python``
267
+ # in the ``posix_home`` scheme, but PyPy devs said they expect the
268
+ # directory name to be ``pypy`` instead. So we treat this as a bug fix
269
+ # and not warn about it. See bpo-43307 and python/cpython#24628.
270
+ skip_pypy_special_case = (
271
+ sys.implementation.name == "pypy"
272
+ and home is not None
273
+ and k in ("platlib", "purelib")
274
+ and old_v.parent == new_v.parent
275
+ and old_v.name.startswith("python")
276
+ and new_v.name.startswith("pypy")
277
+ )
278
+ if skip_pypy_special_case:
279
+ continue
280
+
281
+ # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in
282
+ # the ``include`` value, but distutils's ``headers`` does. We'll let
283
+ # CPython decide whether this is a bug or feature. See bpo-43948.
284
+ skip_osx_framework_user_special_case = (
285
+ user
286
+ and is_osx_framework()
287
+ and k == "headers"
288
+ and old_v.parent.parent == new_v.parent
289
+ and old_v.parent.name.startswith("python")
290
+ )
291
+ if skip_osx_framework_user_special_case:
292
+ continue
293
+
294
+ # On Red Hat and derived Linux distributions, distutils is patched to
295
+ # use "lib64" instead of "lib" for platlib.
296
+ if k == "platlib" and _looks_like_red_hat_lib():
297
+ continue
298
+
299
+ # On Python 3.9+, sysconfig's posix_user scheme sets platlib against
300
+ # sys.platlibdir, but distutils's unix_user incorrectly coninutes
301
+ # using the same $usersite for both platlib and purelib. This creates a
302
+ # mismatch when sys.platlibdir is not "lib".
303
+ skip_bpo_44860 = (
304
+ user
305
+ and k == "platlib"
306
+ and not WINDOWS
307
+ and sys.version_info >= (3, 9)
308
+ and _PLATLIBDIR != "lib"
309
+ and _looks_like_bpo_44860()
310
+ )
311
+ if skip_bpo_44860:
312
+ continue
313
+
314
+ # Slackware incorrectly patches posix_user to use lib64 instead of lib,
315
+ # but not usersite to match the location.
316
+ skip_slackware_user_scheme = (
317
+ user
318
+ and k in ("platlib", "purelib")
319
+ and not WINDOWS
320
+ and _looks_like_slackware_scheme()
321
+ )
322
+ if skip_slackware_user_scheme:
323
+ continue
324
+
325
+ # Both Debian and Red Hat patch Python to place the system site under
326
+ # /usr/local instead of /usr. Debian also places lib in dist-packages
327
+ # instead of site-packages, but the /usr/local check should cover it.
328
+ skip_linux_system_special_case = (
329
+ not (user or home or prefix or running_under_virtualenv())
330
+ and old_v.parts[1:3] == ("usr", "local")
331
+ and len(new_v.parts) > 1
332
+ and new_v.parts[1] == "usr"
333
+ and (len(new_v.parts) < 3 or new_v.parts[2] != "local")
334
+ and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme())
335
+ )
336
+ if skip_linux_system_special_case:
337
+ continue
338
+
339
+ # MSYS2 MINGW's sysconfig patch does not include the "site-packages"
340
+ # part of the path. This is incorrect and will be fixed in MSYS.
341
+ skip_msys2_mingw_bug = (
342
+ WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme()
343
+ )
344
+ if skip_msys2_mingw_bug:
345
+ continue
346
+
347
+ # CPython's POSIX install script invokes pip (via ensurepip) against the
348
+ # interpreter located in the source tree, not the install site. This
349
+ # triggers special logic in sysconfig that's not present in distutils.
350
+ # https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194
351
+ skip_cpython_build = (
352
+ sysconfig.is_python_build(check_home=True)
353
+ and not WINDOWS
354
+ and k in ("headers", "include", "platinclude")
355
+ )
356
+ if skip_cpython_build:
357
+ continue
358
+
359
+ warning_contexts.append((old_v, new_v, f"scheme.{k}"))
360
+
361
+ if not warning_contexts:
362
+ return old
363
+
364
+ # Check if this path mismatch is caused by distutils config files. Those
365
+ # files will no longer work once we switch to sysconfig, so this raises a
366
+ # deprecation message for them.
367
+ default_old = _distutils.distutils_scheme(
368
+ dist_name,
369
+ user,
370
+ home,
371
+ root,
372
+ isolated,
373
+ prefix,
374
+ ignore_config_files=True,
375
+ )
376
+ if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
377
+ deprecated(
378
+ reason=(
379
+ "Configuring installation scheme with distutils config files "
380
+ "is deprecated and will no longer work in the near future. If you "
381
+ "are using a Homebrew or Linuxbrew Python, please see discussion "
382
+ "at https://github.com/Homebrew/homebrew-core/issues/76621"
383
+ ),
384
+ replacement=None,
385
+ gone_in=None,
386
+ )
387
+ return old
388
+
389
+ # Post warnings about this mismatch so user can report them back.
390
+ for old_v, new_v, key in warning_contexts:
391
+ _warn_mismatched(old_v, new_v, key=key)
392
+ _log_context(user=user, home=home, root=root, prefix=prefix)
393
+
394
+ return old
395
+
396
+
397
+ def get_bin_prefix() -> str:
398
+ new = _sysconfig.get_bin_prefix()
399
+ if _USE_SYSCONFIG:
400
+ return new
401
+
402
+ old = _distutils.get_bin_prefix()
403
+ if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
404
+ _log_context()
405
+ return old
406
+
407
+
408
+ def get_bin_user() -> str:
409
+ return _sysconfig.get_scheme("", user=True).scripts
410
+
411
+
412
+ def _looks_like_deb_system_dist_packages(value: str) -> bool:
413
+ """Check if the value is Debian's APT-controlled dist-packages.
414
+
415
+ Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the
416
+ default package path controlled by APT, but does not patch ``sysconfig`` to
417
+ do the same. This is similar to the bug worked around in ``get_scheme()``,
418
+ but here the default is ``deb_system`` instead of ``unix_local``. Ultimately
419
+ we can't do anything about this Debian bug, and this detection allows us to
420
+ skip the warning when needed.
421
+ """
422
+ if not _looks_like_debian_scheme():
423
+ return False
424
+ if value == "/usr/lib/python3/dist-packages":
425
+ return True
426
+ return False
427
+
428
+
429
+ def get_purelib() -> str:
430
+ """Return the default pure-Python lib location."""
431
+ new = _sysconfig.get_purelib()
432
+ if _USE_SYSCONFIG:
433
+ return new
434
+
435
+ old = _distutils.get_purelib()
436
+ if _looks_like_deb_system_dist_packages(old):
437
+ return old
438
+ if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
439
+ _log_context()
440
+ return old
441
+
442
+
443
+ def get_platlib() -> str:
444
+ """Return the default platform-shared lib location."""
445
+ new = _sysconfig.get_platlib()
446
+ if _USE_SYSCONFIG:
447
+ return new
448
+
449
+ from . import _distutils
450
+
451
+ old = _distutils.get_platlib()
452
+ if _looks_like_deb_system_dist_packages(old):
453
+ return old
454
+ if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
455
+ _log_context()
456
+ return old
evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (10.9 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-310.pyc ADDED
Binary file (4.55 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-310.pyc ADDED
Binary file (6 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/base.cpython-310.pyc ADDED
Binary file (2.38 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/_distutils.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Locations where we look for configs, install stuff, etc"""
2
+
3
+ # The following comment should be removed at some point in the future.
4
+ # mypy: strict-optional=False
5
+
6
+ # If pip's going to use distutils, it should not be using the copy that setuptools
7
+ # might have injected into the environment. This is done by removing the injected
8
+ # shim, if it's injected.
9
+ #
10
+ # See https://github.com/pypa/pip/issues/8761 for the original discussion and
11
+ # rationale for why this is done within pip.
12
+ try:
13
+ __import__("_distutils_hack").remove_shim()
14
+ except (ImportError, AttributeError):
15
+ pass
16
+
17
+ import logging
18
+ import os
19
+ import sys
20
+ from distutils.cmd import Command as DistutilsCommand
21
+ from distutils.command.install import SCHEME_KEYS
22
+ from distutils.command.install import install as distutils_install_command
23
+ from distutils.sysconfig import get_python_lib
24
+ from typing import Dict, List, Optional, Union
25
+
26
+ from pip._internal.models.scheme import Scheme
27
+ from pip._internal.utils.compat import WINDOWS
28
+ from pip._internal.utils.virtualenv import running_under_virtualenv
29
+
30
+ from .base import get_major_minor_version
31
+
32
+ logger = logging.getLogger(__name__)
33
+
34
+
35
+ def distutils_scheme(
36
+ dist_name: str,
37
+ user: bool = False,
38
+ home: Optional[str] = None,
39
+ root: Optional[str] = None,
40
+ isolated: bool = False,
41
+ prefix: Optional[str] = None,
42
+ *,
43
+ ignore_config_files: bool = False,
44
+ ) -> Dict[str, str]:
45
+ """
46
+ Return a distutils install scheme
47
+ """
48
+ from distutils.dist import Distribution
49
+
50
+ dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name}
51
+ if isolated:
52
+ dist_args["script_args"] = ["--no-user-cfg"]
53
+
54
+ d = Distribution(dist_args)
55
+ if not ignore_config_files:
56
+ try:
57
+ d.parse_config_files()
58
+ except UnicodeDecodeError:
59
+ paths = d.find_config_files()
60
+ logger.warning(
61
+ "Ignore distutils configs in %s due to encoding errors.",
62
+ ", ".join(os.path.basename(p) for p in paths),
63
+ )
64
+ obj: Optional[DistutilsCommand] = None
65
+ obj = d.get_command_obj("install", create=True)
66
+ assert obj is not None
67
+ i: distutils_install_command = obj
68
+ # NOTE: setting user or home has the side-effect of creating the home dir
69
+ # or user base for installations during finalize_options()
70
+ # ideally, we'd prefer a scheme class that has no side-effects.
71
+ assert not (user and prefix), f"user={user} prefix={prefix}"
72
+ assert not (home and prefix), f"home={home} prefix={prefix}"
73
+ i.user = user or i.user
74
+ if user or home:
75
+ i.prefix = ""
76
+ i.prefix = prefix or i.prefix
77
+ i.home = home or i.home
78
+ i.root = root or i.root
79
+ i.finalize_options()
80
+
81
+ scheme: Dict[str, str] = {}
82
+ for key in SCHEME_KEYS:
83
+ scheme[key] = getattr(i, "install_" + key)
84
+
85
+ # install_lib specified in setup.cfg should install *everything*
86
+ # into there (i.e. it takes precedence over both purelib and
87
+ # platlib). Note, i.install_lib is *always* set after
88
+ # finalize_options(); we only want to override here if the user
89
+ # has explicitly requested it hence going back to the config
90
+ if "install_lib" in d.get_option_dict("install"):
91
+ scheme.update({"purelib": i.install_lib, "platlib": i.install_lib})
92
+
93
+ if running_under_virtualenv():
94
+ if home:
95
+ prefix = home
96
+ elif user:
97
+ prefix = i.install_userbase
98
+ else:
99
+ prefix = i.prefix
100
+ scheme["headers"] = os.path.join(
101
+ prefix,
102
+ "include",
103
+ "site",
104
+ f"python{get_major_minor_version()}",
105
+ dist_name,
106
+ )
107
+
108
+ if root is not None:
109
+ path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]
110
+ scheme["headers"] = os.path.join(root, path_no_drive[1:])
111
+
112
+ return scheme
113
+
114
+
115
+ def get_scheme(
116
+ dist_name: str,
117
+ user: bool = False,
118
+ home: Optional[str] = None,
119
+ root: Optional[str] = None,
120
+ isolated: bool = False,
121
+ prefix: Optional[str] = None,
122
+ ) -> Scheme:
123
+ """
124
+ Get the "scheme" corresponding to the input parameters. The distutils
125
+ documentation provides the context for the available schemes:
126
+ https://docs.python.org/3/install/index.html#alternate-installation
127
+
128
+ :param dist_name: the name of the package to retrieve the scheme for, used
129
+ in the headers scheme path
130
+ :param user: indicates to use the "user" scheme
131
+ :param home: indicates to use the "home" scheme and provides the base
132
+ directory for the same
133
+ :param root: root under which other directories are re-based
134
+ :param isolated: equivalent to --no-user-cfg, i.e. do not consider
135
+ ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
136
+ scheme paths
137
+ :param prefix: indicates to use the "prefix" scheme and provides the
138
+ base directory for the same
139
+ """
140
+ scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix)
141
+ return Scheme(
142
+ platlib=scheme["platlib"],
143
+ purelib=scheme["purelib"],
144
+ headers=scheme["headers"],
145
+ scripts=scheme["scripts"],
146
+ data=scheme["data"],
147
+ )
148
+
149
+
150
+ def get_bin_prefix() -> str:
151
+ # XXX: In old virtualenv versions, sys.prefix can contain '..' components,
152
+ # so we need to call normpath to eliminate them.
153
+ prefix = os.path.normpath(sys.prefix)
154
+ if WINDOWS:
155
+ bin_py = os.path.join(prefix, "Scripts")
156
+ # buildout uses 'bin' on Windows too?
157
+ if not os.path.exists(bin_py):
158
+ bin_py = os.path.join(prefix, "bin")
159
+ return bin_py
160
+ # Forcing to use /usr/local/bin for standard macOS framework installs
161
+ # Also log to ~/Library/Logs/ for use with the Console.app log viewer
162
+ if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/":
163
+ return "/usr/local/bin"
164
+ return os.path.join(prefix, "bin")
165
+
166
+
167
+ def get_purelib() -> str:
168
+ return get_python_lib(plat_specific=False)
169
+
170
+
171
+ def get_platlib() -> str:
172
+ return get_python_lib(plat_specific=True)
evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/_sysconfig.py ADDED
@@ -0,0 +1,214 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ import sys
4
+ import sysconfig
5
+ import typing
6
+
7
+ from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid
8
+ from pip._internal.models.scheme import SCHEME_KEYS, Scheme
9
+ from pip._internal.utils.virtualenv import running_under_virtualenv
10
+
11
+ from .base import change_root, get_major_minor_version, is_osx_framework
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ # Notes on _infer_* functions.
17
+ # Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no
18
+ # way to ask things like "what is the '_prefix' scheme on this platform". These
19
+ # functions try to answer that with some heuristics while accounting for ad-hoc
20
+ # platforms not covered by CPython's default sysconfig implementation. If the
21
+ # ad-hoc implementation does not fully implement sysconfig, we'll fall back to
22
+ # a POSIX scheme.
23
+
24
+ _AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())
25
+
26
+ _PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None)
27
+
28
+
29
+ def _should_use_osx_framework_prefix() -> bool:
30
+ """Check for Apple's ``osx_framework_library`` scheme.
31
+
32
+ Python distributed by Apple's Command Line Tools has this special scheme
33
+ that's used when:
34
+
35
+ * This is a framework build.
36
+ * We are installing into the system prefix.
37
+
38
+ This does not account for ``pip install --prefix`` (also means we're not
39
+ installing to the system prefix), which should use ``posix_prefix``, but
40
+ logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But
41
+ since ``prefix`` is not available for ``sysconfig.get_default_scheme()``,
42
+ which is the stdlib replacement for ``_infer_prefix()``, presumably Apple
43
+ wouldn't be able to magically switch between ``osx_framework_library`` and
44
+ ``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library``
45
+ means its behavior is consistent whether we use the stdlib implementation
46
+ or our own, and we deal with this special case in ``get_scheme()`` instead.
47
+ """
48
+ return (
49
+ "osx_framework_library" in _AVAILABLE_SCHEMES
50
+ and not running_under_virtualenv()
51
+ and is_osx_framework()
52
+ )
53
+
54
+
55
+ def _infer_prefix() -> str:
56
+ """Try to find a prefix scheme for the current platform.
57
+
58
+ This tries:
59
+
60
+ * A special ``osx_framework_library`` for Python distributed by Apple's
61
+ Command Line Tools, when not running in a virtual environment.
62
+ * Implementation + OS, used by PyPy on Windows (``pypy_nt``).
63
+ * Implementation without OS, used by PyPy on POSIX (``pypy``).
64
+ * OS + "prefix", used by CPython on POSIX (``posix_prefix``).
65
+ * Just the OS name, used by CPython on Windows (``nt``).
66
+
67
+ If none of the above works, fall back to ``posix_prefix``.
68
+ """
69
+ if _PREFERRED_SCHEME_API:
70
+ return _PREFERRED_SCHEME_API("prefix")
71
+ if _should_use_osx_framework_prefix():
72
+ return "osx_framework_library"
73
+ implementation_suffixed = f"{sys.implementation.name}_{os.name}"
74
+ if implementation_suffixed in _AVAILABLE_SCHEMES:
75
+ return implementation_suffixed
76
+ if sys.implementation.name in _AVAILABLE_SCHEMES:
77
+ return sys.implementation.name
78
+ suffixed = f"{os.name}_prefix"
79
+ if suffixed in _AVAILABLE_SCHEMES:
80
+ return suffixed
81
+ if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt".
82
+ return os.name
83
+ return "posix_prefix"
84
+
85
+
86
+ def _infer_user() -> str:
87
+ """Try to find a user scheme for the current platform."""
88
+ if _PREFERRED_SCHEME_API:
89
+ return _PREFERRED_SCHEME_API("user")
90
+ if is_osx_framework() and not running_under_virtualenv():
91
+ suffixed = "osx_framework_user"
92
+ else:
93
+ suffixed = f"{os.name}_user"
94
+ if suffixed in _AVAILABLE_SCHEMES:
95
+ return suffixed
96
+ if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable.
97
+ raise UserInstallationInvalid()
98
+ return "posix_user"
99
+
100
+
101
+ def _infer_home() -> str:
102
+ """Try to find a home for the current platform."""
103
+ if _PREFERRED_SCHEME_API:
104
+ return _PREFERRED_SCHEME_API("home")
105
+ suffixed = f"{os.name}_home"
106
+ if suffixed in _AVAILABLE_SCHEMES:
107
+ return suffixed
108
+ return "posix_home"
109
+
110
+
111
+ # Update these keys if the user sets a custom home.
112
+ _HOME_KEYS = [
113
+ "installed_base",
114
+ "base",
115
+ "installed_platbase",
116
+ "platbase",
117
+ "prefix",
118
+ "exec_prefix",
119
+ ]
120
+ if sysconfig.get_config_var("userbase") is not None:
121
+ _HOME_KEYS.append("userbase")
122
+
123
+
124
+ def get_scheme(
125
+ dist_name: str,
126
+ user: bool = False,
127
+ home: typing.Optional[str] = None,
128
+ root: typing.Optional[str] = None,
129
+ isolated: bool = False,
130
+ prefix: typing.Optional[str] = None,
131
+ ) -> Scheme:
132
+ """
133
+ Get the "scheme" corresponding to the input parameters.
134
+
135
+ :param dist_name: the name of the package to retrieve the scheme for, used
136
+ in the headers scheme path
137
+ :param user: indicates to use the "user" scheme
138
+ :param home: indicates to use the "home" scheme
139
+ :param root: root under which other directories are re-based
140
+ :param isolated: ignored, but kept for distutils compatibility (where
141
+ this controls whether the user-site pydistutils.cfg is honored)
142
+ :param prefix: indicates to use the "prefix" scheme and provides the
143
+ base directory for the same
144
+ """
145
+ if user and prefix:
146
+ raise InvalidSchemeCombination("--user", "--prefix")
147
+ if home and prefix:
148
+ raise InvalidSchemeCombination("--home", "--prefix")
149
+
150
+ if home is not None:
151
+ scheme_name = _infer_home()
152
+ elif user:
153
+ scheme_name = _infer_user()
154
+ else:
155
+ scheme_name = _infer_prefix()
156
+
157
+ # Special case: When installing into a custom prefix, use posix_prefix
158
+ # instead of osx_framework_library. See _should_use_osx_framework_prefix()
159
+ # docstring for details.
160
+ if prefix is not None and scheme_name == "osx_framework_library":
161
+ scheme_name = "posix_prefix"
162
+
163
+ if home is not None:
164
+ variables = {k: home for k in _HOME_KEYS}
165
+ elif prefix is not None:
166
+ variables = {k: prefix for k in _HOME_KEYS}
167
+ else:
168
+ variables = {}
169
+
170
+ paths = sysconfig.get_paths(scheme=scheme_name, vars=variables)
171
+
172
+ # Logic here is very arbitrary, we're doing it for compatibility, don't ask.
173
+ # 1. Pip historically uses a special header path in virtual environments.
174
+ # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We
175
+ # only do the same when not running in a virtual environment because
176
+ # pip's historical header path logic (see point 1) did not do this.
177
+ if running_under_virtualenv():
178
+ if user:
179
+ base = variables.get("userbase", sys.prefix)
180
+ else:
181
+ base = variables.get("base", sys.prefix)
182
+ python_xy = f"python{get_major_minor_version()}"
183
+ paths["include"] = os.path.join(base, "include", "site", python_xy)
184
+ elif not dist_name:
185
+ dist_name = "UNKNOWN"
186
+
187
+ scheme = Scheme(
188
+ platlib=paths["platlib"],
189
+ purelib=paths["purelib"],
190
+ headers=os.path.join(paths["include"], dist_name),
191
+ scripts=paths["scripts"],
192
+ data=paths["data"],
193
+ )
194
+ if root is not None:
195
+ converted_keys = {}
196
+ for key in SCHEME_KEYS:
197
+ converted_keys[key] = change_root(root, getattr(scheme, key))
198
+ scheme = Scheme(**converted_keys)
199
+ return scheme
200
+
201
+
202
+ def get_bin_prefix() -> str:
203
+ # Forcing to use /usr/local/bin for standard macOS framework installs.
204
+ if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
205
+ return "/usr/local/bin"
206
+ return sysconfig.get_paths()["scripts"]
207
+
208
+
209
+ def get_purelib() -> str:
210
+ return sysconfig.get_paths()["purelib"]
211
+
212
+
213
+ def get_platlib() -> str:
214
+ return sysconfig.get_paths()["platlib"]
evalkit_llava/lib/python3.10/site-packages/pip/_internal/locations/base.py ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import os
3
+ import site
4
+ import sys
5
+ import sysconfig
6
+ import typing
7
+
8
+ from pip._internal.exceptions import InstallationError
9
+ from pip._internal.utils import appdirs
10
+ from pip._internal.utils.virtualenv import running_under_virtualenv
11
+
12
+ # Application Directories
13
+ USER_CACHE_DIR = appdirs.user_cache_dir("pip")
14
+
15
+ # FIXME doesn't account for venv linked to global site-packages
16
+ site_packages: str = sysconfig.get_path("purelib")
17
+
18
+
19
+ def get_major_minor_version() -> str:
20
+ """
21
+ Return the major-minor version of the current Python as a string, e.g.
22
+ "3.7" or "3.10".
23
+ """
24
+ return "{}.{}".format(*sys.version_info)
25
+
26
+
27
+ def change_root(new_root: str, pathname: str) -> str:
28
+ """Return 'pathname' with 'new_root' prepended.
29
+
30
+ If 'pathname' is relative, this is equivalent to os.path.join(new_root, pathname).
31
+ Otherwise, it requires making 'pathname' relative and then joining the
32
+ two, which is tricky on DOS/Windows and Mac OS.
33
+
34
+ This is borrowed from Python's standard library's distutils module.
35
+ """
36
+ if os.name == "posix":
37
+ if not os.path.isabs(pathname):
38
+ return os.path.join(new_root, pathname)
39
+ else:
40
+ return os.path.join(new_root, pathname[1:])
41
+
42
+ elif os.name == "nt":
43
+ (drive, path) = os.path.splitdrive(pathname)
44
+ if path[0] == "\\":
45
+ path = path[1:]
46
+ return os.path.join(new_root, path)
47
+
48
+ else:
49
+ raise InstallationError(
50
+ f"Unknown platform: {os.name}\n"
51
+ "Can not change root path prefix on unknown platform."
52
+ )
53
+
54
+
55
+ def get_src_prefix() -> str:
56
+ if running_under_virtualenv():
57
+ src_prefix = os.path.join(sys.prefix, "src")
58
+ else:
59
+ # FIXME: keep src in cwd for now (it is not a temporary folder)
60
+ try:
61
+ src_prefix = os.path.join(os.getcwd(), "src")
62
+ except OSError:
63
+ # In case the current working directory has been renamed or deleted
64
+ sys.exit("The folder you are executing pip from can no longer be found.")
65
+
66
+ # under macOS + virtualenv sys.prefix is not properly resolved
67
+ # it is something like /path/to/python/bin/..
68
+ return os.path.abspath(src_prefix)
69
+
70
+
71
+ try:
72
+ # Use getusersitepackages if this is present, as it ensures that the
73
+ # value is initialised properly.
74
+ user_site: typing.Optional[str] = site.getusersitepackages()
75
+ except AttributeError:
76
+ user_site = site.USER_SITE
77
+
78
+
79
+ @functools.lru_cache(maxsize=None)
80
+ def is_osx_framework() -> bool:
81
+ return bool(sysconfig.get_config_var("PYTHONFRAMEWORK"))
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (247 Bytes). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc ADDED
Binary file (1.22 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc ADDED
Binary file (2.72 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/index.cpython-310.pyc ADDED
Binary file (1.22 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-310.pyc ADDED
Binary file (1.74 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc ADDED
Binary file (3.79 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/__init__.py ADDED
File without changes
evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/_jaraco_text.py ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Functions brought over from jaraco.text.
2
+
3
+ These functions are not supposed to be used within `pip._internal`. These are
4
+ helper functions brought over from `jaraco.text` to enable vendoring newer
5
+ copies of `pkg_resources` without having to vendor `jaraco.text` and its entire
6
+ dependency cone; something that our vendoring setup is not currently capable of
7
+ handling.
8
+
9
+ License reproduced from original source below:
10
+
11
+ Copyright Jason R. Coombs
12
+
13
+ Permission is hereby granted, free of charge, to any person obtaining a copy
14
+ of this software and associated documentation files (the "Software"), to
15
+ deal in the Software without restriction, including without limitation the
16
+ rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
17
+ sell copies of the Software, and to permit persons to whom the Software is
18
+ furnished to do so, subject to the following conditions:
19
+
20
+ The above copyright notice and this permission notice shall be included in
21
+ all copies or substantial portions of the Software.
22
+
23
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
26
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
29
+ IN THE SOFTWARE.
30
+ """
31
+
32
+ import functools
33
+ import itertools
34
+
35
+
36
+ def _nonblank(str):
37
+ return str and not str.startswith("#")
38
+
39
+
40
+ @functools.singledispatch
41
+ def yield_lines(iterable):
42
+ r"""
43
+ Yield valid lines of a string or iterable.
44
+
45
+ >>> list(yield_lines(''))
46
+ []
47
+ >>> list(yield_lines(['foo', 'bar']))
48
+ ['foo', 'bar']
49
+ >>> list(yield_lines('foo\nbar'))
50
+ ['foo', 'bar']
51
+ >>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
52
+ ['foo', 'baz #comment']
53
+ >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
54
+ ['foo', 'bar', 'baz', 'bing']
55
+ """
56
+ return itertools.chain.from_iterable(map(yield_lines, iterable))
57
+
58
+
59
+ @yield_lines.register(str)
60
+ def _(text):
61
+ return filter(_nonblank, map(str.strip, text.splitlines()))
62
+
63
+
64
+ def drop_comment(line):
65
+ """
66
+ Drop comments.
67
+
68
+ >>> drop_comment('foo # bar')
69
+ 'foo'
70
+
71
+ A hash without a space may be in a URL.
72
+
73
+ >>> drop_comment('http://example.com/foo#bar')
74
+ 'http://example.com/foo#bar'
75
+ """
76
+ return line.partition(" #")[0]
77
+
78
+
79
+ def join_continuation(lines):
80
+ r"""
81
+ Join lines continued by a trailing backslash.
82
+
83
+ >>> list(join_continuation(['foo \\', 'bar', 'baz']))
84
+ ['foobar', 'baz']
85
+ >>> list(join_continuation(['foo \\', 'bar', 'baz']))
86
+ ['foobar', 'baz']
87
+ >>> list(join_continuation(['foo \\', 'bar \\', 'baz']))
88
+ ['foobarbaz']
89
+
90
+ Not sure why, but...
91
+ The character preceding the backslash is also elided.
92
+
93
+ >>> list(join_continuation(['goo\\', 'dly']))
94
+ ['godly']
95
+
96
+ A terrible idea, but...
97
+ If no line is available to continue, suppress the lines.
98
+
99
+ >>> list(join_continuation(['foo', 'bar\\', 'baz\\']))
100
+ ['foo']
101
+ """
102
+ lines = iter(lines)
103
+ for item in lines:
104
+ while item.endswith("\\"):
105
+ try:
106
+ item = item[:-2].strip() + next(lines)
107
+ except StopIteration:
108
+ return
109
+ yield item
evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/_log.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Customize logging
2
+
3
+ Defines custom logger class for the `logger.verbose(...)` method.
4
+
5
+ init_logging() must be called before any other modules that call logging.getLogger.
6
+ """
7
+
8
+ import logging
9
+ from typing import Any, cast
10
+
11
+ # custom log level for `--verbose` output
12
+ # between DEBUG and INFO
13
+ VERBOSE = 15
14
+
15
+
16
+ class VerboseLogger(logging.Logger):
17
+ """Custom Logger, defining a verbose log-level
18
+
19
+ VERBOSE is between INFO and DEBUG.
20
+ """
21
+
22
+ def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None:
23
+ return self.log(VERBOSE, msg, *args, **kwargs)
24
+
25
+
26
+ def getLogger(name: str) -> VerboseLogger:
27
+ """logging.getLogger, but ensures our VerboseLogger class is returned"""
28
+ return cast(VerboseLogger, logging.getLogger(name))
29
+
30
+
31
+ def init_logging() -> None:
32
+ """Register our VerboseLogger and VERBOSE log level.
33
+
34
+ Should be called before any calls to getLogger(),
35
+ i.e. in pip._internal.__init__
36
+ """
37
+ logging.setLoggerClass(VerboseLogger)
38
+ logging.addLevelName(VERBOSE, "VERBOSE")
evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/appdirs.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This code wraps the vendored appdirs module to so the return values are
3
+ compatible for the current pip code base.
4
+
5
+ The intention is to rewrite current usages gradually, keeping the tests pass,
6
+ and eventually drop this after all usages are changed.
7
+ """
8
+
9
+ import os
10
+ import sys
11
+ from typing import List
12
+
13
+ from pip._vendor import platformdirs as _appdirs
14
+
15
+
16
+ def user_cache_dir(appname: str) -> str:
17
+ return _appdirs.user_cache_dir(appname, appauthor=False)
18
+
19
+
20
+ def _macos_user_config_dir(appname: str, roaming: bool = True) -> str:
21
+ # Use ~/Application Support/pip, if the directory exists.
22
+ path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming)
23
+ if os.path.isdir(path):
24
+ return path
25
+
26
+ # Use a Linux-like ~/.config/pip, by default.
27
+ linux_like_path = "~/.config/"
28
+ if appname:
29
+ linux_like_path = os.path.join(linux_like_path, appname)
30
+
31
+ return os.path.expanduser(linux_like_path)
32
+
33
+
34
+ def user_config_dir(appname: str, roaming: bool = True) -> str:
35
+ if sys.platform == "darwin":
36
+ return _macos_user_config_dir(appname, roaming)
37
+
38
+ return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
39
+
40
+
41
+ # for the discussion regarding site_config_dir locations
42
+ # see <https://github.com/pypa/pip/issues/1733>
43
+ def site_config_dirs(appname: str) -> List[str]:
44
+ if sys.platform == "darwin":
45
+ return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)]
46
+
47
+ dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)
48
+ if sys.platform == "win32":
49
+ return [dirval]
50
+
51
+ # Unix-y system. Look in /etc as well.
52
+ return dirval.split(os.pathsep) + ["/etc"]
evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/compat.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Stuff that differs in different Python versions and platform
2
+ distributions."""
3
+
4
+ import importlib.resources
5
+ import logging
6
+ import os
7
+ import sys
8
+ from typing import IO
9
+
10
+ __all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"]
11
+
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ def has_tls() -> bool:
17
+ try:
18
+ import _ssl # noqa: F401 # ignore unused
19
+
20
+ return True
21
+ except ImportError:
22
+ pass
23
+
24
+ from pip._vendor.urllib3.util import IS_PYOPENSSL
25
+
26
+ return IS_PYOPENSSL
27
+
28
+
29
+ def get_path_uid(path: str) -> int:
30
+ """
31
+ Return path's uid.
32
+
33
+ Does not follow symlinks:
34
+ https://github.com/pypa/pip/pull/935#discussion_r5307003
35
+
36
+ Placed this function in compat due to differences on AIX and
37
+ Jython, that should eventually go away.
38
+
39
+ :raises OSError: When path is a symlink or can't be read.
40
+ """
41
+ if hasattr(os, "O_NOFOLLOW"):
42
+ fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
43
+ file_uid = os.fstat(fd).st_uid
44
+ os.close(fd)
45
+ else: # AIX and Jython
46
+ # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
47
+ if not os.path.islink(path):
48
+ # older versions of Jython don't have `os.fstat`
49
+ file_uid = os.stat(path).st_uid
50
+ else:
51
+ # raise OSError for parity with os.O_NOFOLLOW above
52
+ raise OSError(f"{path} is a symlink; Will not return uid for symlinks")
53
+ return file_uid
54
+
55
+
56
+ # The importlib.resources.open_text function was deprecated in 3.11 with suggested
57
+ # replacement we use below.
58
+ if sys.version_info < (3, 11):
59
+ open_text_resource = importlib.resources.open_text
60
+ else:
61
+
62
+ def open_text_resource(
63
+ package: str, resource: str, encoding: str = "utf-8", errors: str = "strict"
64
+ ) -> IO[str]:
65
+ return (importlib.resources.files(package) / resource).open(
66
+ "r", encoding=encoding, errors=errors
67
+ )
68
+
69
+
70
+ # packages in the stdlib that may have installation metadata, but should not be
71
+ # considered 'installed'. this theoretically could be determined based on
72
+ # dist.location (py27:`sysconfig.get_paths()['stdlib']`,
73
+ # py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
74
+ # make this ineffective, so hard-coding
75
+ stdlib_pkgs = {"python", "wsgiref", "argparse"}
76
+
77
+
78
+ # windows detection, covers cpython and ironpython
79
+ WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")
evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/datetime.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """For when pip wants to check the date or time.
2
+ """
3
+
4
+ import datetime
5
+
6
+
7
+ def today_is_later_than(year: int, month: int, day: int) -> bool:
8
+ today = datetime.date.today()
9
+ given = datetime.date(year, month, day)
10
+
11
+ return today > given
evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/deprecation.py ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ A module that implements tooling to enable easy warnings about deprecations.
3
+ """
4
+
5
+ import logging
6
+ import warnings
7
+ from typing import Any, Optional, TextIO, Type, Union
8
+
9
+ from pip._vendor.packaging.version import parse
10
+
11
+ from pip import __version__ as current_version # NOTE: tests patch this name.
12
+
13
+ DEPRECATION_MSG_PREFIX = "DEPRECATION: "
14
+
15
+
16
+ class PipDeprecationWarning(Warning):
17
+ pass
18
+
19
+
20
+ _original_showwarning: Any = None
21
+
22
+
23
+ # Warnings <-> Logging Integration
24
+ def _showwarning(
25
+ message: Union[Warning, str],
26
+ category: Type[Warning],
27
+ filename: str,
28
+ lineno: int,
29
+ file: Optional[TextIO] = None,
30
+ line: Optional[str] = None,
31
+ ) -> None:
32
+ if file is not None:
33
+ if _original_showwarning is not None:
34
+ _original_showwarning(message, category, filename, lineno, file, line)
35
+ elif issubclass(category, PipDeprecationWarning):
36
+ # We use a specially named logger which will handle all of the
37
+ # deprecation messages for pip.
38
+ logger = logging.getLogger("pip._internal.deprecations")
39
+ logger.warning(message)
40
+ else:
41
+ _original_showwarning(message, category, filename, lineno, file, line)
42
+
43
+
44
+ def install_warning_logger() -> None:
45
+ # Enable our Deprecation Warnings
46
+ warnings.simplefilter("default", PipDeprecationWarning, append=True)
47
+
48
+ global _original_showwarning
49
+
50
+ if _original_showwarning is None:
51
+ _original_showwarning = warnings.showwarning
52
+ warnings.showwarning = _showwarning
53
+
54
+
55
+ def deprecated(
56
+ *,
57
+ reason: str,
58
+ replacement: Optional[str],
59
+ gone_in: Optional[str],
60
+ feature_flag: Optional[str] = None,
61
+ issue: Optional[int] = None,
62
+ ) -> None:
63
+ """Helper to deprecate existing functionality.
64
+
65
+ reason:
66
+ Textual reason shown to the user about why this functionality has
67
+ been deprecated. Should be a complete sentence.
68
+ replacement:
69
+ Textual suggestion shown to the user about what alternative
70
+ functionality they can use.
71
+ gone_in:
72
+ The version of pip does this functionality should get removed in.
73
+ Raises an error if pip's current version is greater than or equal to
74
+ this.
75
+ feature_flag:
76
+ Command-line flag of the form --use-feature={feature_flag} for testing
77
+ upcoming functionality.
78
+ issue:
79
+ Issue number on the tracker that would serve as a useful place for
80
+ users to find related discussion and provide feedback.
81
+ """
82
+
83
+ # Determine whether or not the feature is already gone in this version.
84
+ is_gone = gone_in is not None and parse(current_version) >= parse(gone_in)
85
+
86
+ message_parts = [
87
+ (reason, f"{DEPRECATION_MSG_PREFIX}{{}}"),
88
+ (
89
+ gone_in,
90
+ (
91
+ "pip {} will enforce this behaviour change."
92
+ if not is_gone
93
+ else "Since pip {}, this is no longer supported."
94
+ ),
95
+ ),
96
+ (
97
+ replacement,
98
+ "A possible replacement is {}.",
99
+ ),
100
+ (
101
+ feature_flag,
102
+ (
103
+ "You can use the flag --use-feature={} to test the upcoming behaviour."
104
+ if not is_gone
105
+ else None
106
+ ),
107
+ ),
108
+ (
109
+ issue,
110
+ "Discussion can be found at https://github.com/pypa/pip/issues/{}",
111
+ ),
112
+ ]
113
+
114
+ message = " ".join(
115
+ format_str.format(value)
116
+ for value, format_str in message_parts
117
+ if format_str is not None and value is not None
118
+ )
119
+
120
+ # Raise as an error if this behaviour is deprecated.
121
+ if is_gone:
122
+ raise PipDeprecationWarning(message)
123
+
124
+ warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/hashes.py ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import hashlib
2
+ from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, NoReturn, Optional
3
+
4
+ from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError
5
+ from pip._internal.utils.misc import read_chunks
6
+
7
+ if TYPE_CHECKING:
8
+ from hashlib import _Hash
9
+
10
+
11
+ # The recommended hash algo of the moment. Change this whenever the state of
12
+ # the art changes; it won't hurt backward compatibility.
13
+ FAVORITE_HASH = "sha256"
14
+
15
+
16
+ # Names of hashlib algorithms allowed by the --hash option and ``pip hash``
17
+ # Currently, those are the ones at least as collision-resistant as sha256.
18
+ STRONG_HASHES = ["sha256", "sha384", "sha512"]
19
+
20
+
21
+ class Hashes:
22
+ """A wrapper that builds multiple hashes at once and checks them against
23
+ known-good values
24
+
25
+ """
26
+
27
+ def __init__(self, hashes: Optional[Dict[str, List[str]]] = None) -> None:
28
+ """
29
+ :param hashes: A dict of algorithm names pointing to lists of allowed
30
+ hex digests
31
+ """
32
+ allowed = {}
33
+ if hashes is not None:
34
+ for alg, keys in hashes.items():
35
+ # Make sure values are always sorted (to ease equality checks)
36
+ allowed[alg] = [k.lower() for k in sorted(keys)]
37
+ self._allowed = allowed
38
+
39
+ def __and__(self, other: "Hashes") -> "Hashes":
40
+ if not isinstance(other, Hashes):
41
+ return NotImplemented
42
+
43
+ # If either of the Hashes object is entirely empty (i.e. no hash
44
+ # specified at all), all hashes from the other object are allowed.
45
+ if not other:
46
+ return self
47
+ if not self:
48
+ return other
49
+
50
+ # Otherwise only hashes that present in both objects are allowed.
51
+ new = {}
52
+ for alg, values in other._allowed.items():
53
+ if alg not in self._allowed:
54
+ continue
55
+ new[alg] = [v for v in values if v in self._allowed[alg]]
56
+ return Hashes(new)
57
+
58
+ @property
59
+ def digest_count(self) -> int:
60
+ return sum(len(digests) for digests in self._allowed.values())
61
+
62
+ def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool:
63
+ """Return whether the given hex digest is allowed."""
64
+ return hex_digest in self._allowed.get(hash_name, [])
65
+
66
+ def check_against_chunks(self, chunks: Iterable[bytes]) -> None:
67
+ """Check good hashes against ones built from iterable of chunks of
68
+ data.
69
+
70
+ Raise HashMismatch if none match.
71
+
72
+ """
73
+ gots = {}
74
+ for hash_name in self._allowed.keys():
75
+ try:
76
+ gots[hash_name] = hashlib.new(hash_name)
77
+ except (ValueError, TypeError):
78
+ raise InstallationError(f"Unknown hash name: {hash_name}")
79
+
80
+ for chunk in chunks:
81
+ for hash in gots.values():
82
+ hash.update(chunk)
83
+
84
+ for hash_name, got in gots.items():
85
+ if got.hexdigest() in self._allowed[hash_name]:
86
+ return
87
+ self._raise(gots)
88
+
89
+ def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
90
+ raise HashMismatch(self._allowed, gots)
91
+
92
+ def check_against_file(self, file: BinaryIO) -> None:
93
+ """Check good hashes against a file-like object
94
+
95
+ Raise HashMismatch if none match.
96
+
97
+ """
98
+ return self.check_against_chunks(read_chunks(file))
99
+
100
+ def check_against_path(self, path: str) -> None:
101
+ with open(path, "rb") as file:
102
+ return self.check_against_file(file)
103
+
104
+ def has_one_of(self, hashes: Dict[str, str]) -> bool:
105
+ """Return whether any of the given hashes are allowed."""
106
+ for hash_name, hex_digest in hashes.items():
107
+ if self.is_hash_allowed(hash_name, hex_digest):
108
+ return True
109
+ return False
110
+
111
+ def __bool__(self) -> bool:
112
+ """Return whether I know any known-good hashes."""
113
+ return bool(self._allowed)
114
+
115
+ def __eq__(self, other: object) -> bool:
116
+ if not isinstance(other, Hashes):
117
+ return NotImplemented
118
+ return self._allowed == other._allowed
119
+
120
+ def __hash__(self) -> int:
121
+ return hash(
122
+ ",".join(
123
+ sorted(
124
+ ":".join((alg, digest))
125
+ for alg, digest_list in self._allowed.items()
126
+ for digest in digest_list
127
+ )
128
+ )
129
+ )
130
+
131
+
132
+ class MissingHashes(Hashes):
133
+ """A workalike for Hashes used when we're missing a hash for a requirement
134
+
135
+ It computes the actual hash of the requirement and raises a HashMissing
136
+ exception showing it to the user.
137
+
138
+ """
139
+
140
+ def __init__(self) -> None:
141
+ """Don't offer the ``hashes`` kwarg."""
142
+ # Pass our favorite hash in to generate a "gotten hash". With the
143
+ # empty list, it will never match, so an error will always raise.
144
+ super().__init__(hashes={FAVORITE_HASH: []})
145
+
146
+ def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
147
+ raise HashMissing(gots[FAVORITE_HASH].hexdigest())
evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/packaging.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import logging
3
+ import re
4
+ from typing import NewType, Optional, Tuple, cast
5
+
6
+ from pip._vendor.packaging import specifiers, version
7
+ from pip._vendor.packaging.requirements import Requirement
8
+
9
+ NormalizedExtra = NewType("NormalizedExtra", str)
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ @functools.lru_cache(maxsize=32)
15
+ def check_requires_python(
16
+ requires_python: Optional[str], version_info: Tuple[int, ...]
17
+ ) -> bool:
18
+ """
19
+ Check if the given Python version matches a "Requires-Python" specifier.
20
+
21
+ :param version_info: A 3-tuple of ints representing a Python
22
+ major-minor-micro version to check (e.g. `sys.version_info[:3]`).
23
+
24
+ :return: `True` if the given Python version satisfies the requirement.
25
+ Otherwise, return `False`.
26
+
27
+ :raises InvalidSpecifier: If `requires_python` has an invalid format.
28
+ """
29
+ if requires_python is None:
30
+ # The package provides no information
31
+ return True
32
+ requires_python_specifier = specifiers.SpecifierSet(requires_python)
33
+
34
+ python_version = version.parse(".".join(map(str, version_info)))
35
+ return python_version in requires_python_specifier
36
+
37
+
38
+ @functools.lru_cache(maxsize=2048)
39
+ def get_requirement(req_string: str) -> Requirement:
40
+ """Construct a packaging.Requirement object with caching"""
41
+ # Parsing requirement strings is expensive, and is also expected to happen
42
+ # with a low diversity of different arguments (at least relative the number
43
+ # constructed). This method adds a cache to requirement object creation to
44
+ # minimize repeated parsing of the same string to construct equivalent
45
+ # Requirement objects.
46
+ return Requirement(req_string)
47
+
48
+
49
+ def safe_extra(extra: str) -> NormalizedExtra:
50
+ """Convert an arbitrary string to a standard 'extra' name
51
+
52
+ Any runs of non-alphanumeric characters are replaced with a single '_',
53
+ and the result is always lowercased.
54
+
55
+ This function is duplicated from ``pkg_resources``. Note that this is not
56
+ the same to either ``canonicalize_name`` or ``_egg_link_name``.
57
+ """
58
+ return cast(NormalizedExtra, re.sub("[^A-Za-z0-9.-]+", "_", extra).lower())
evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/retry.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ from time import perf_counter, sleep
3
+ from typing import Callable, TypeVar
4
+
5
+ from pip._vendor.typing_extensions import ParamSpec
6
+
7
+ T = TypeVar("T")
8
+ P = ParamSpec("P")
9
+
10
+
11
+ def retry(
12
+ wait: float, stop_after_delay: float
13
+ ) -> Callable[[Callable[P, T]], Callable[P, T]]:
14
+ """Decorator to automatically retry a function on error.
15
+
16
+ If the function raises, the function is recalled with the same arguments
17
+ until it returns or the time limit is reached. When the time limit is
18
+ surpassed, the last exception raised is reraised.
19
+
20
+ :param wait: The time to wait after an error before retrying, in seconds.
21
+ :param stop_after_delay: The time limit after which retries will cease,
22
+ in seconds.
23
+ """
24
+
25
+ def wrapper(func: Callable[P, T]) -> Callable[P, T]:
26
+
27
+ @functools.wraps(func)
28
+ def retry_wrapped(*args: P.args, **kwargs: P.kwargs) -> T:
29
+ # The performance counter is monotonic on all platforms we care
30
+ # about and has much better resolution than time.monotonic().
31
+ start_time = perf_counter()
32
+ while True:
33
+ try:
34
+ return func(*args, **kwargs)
35
+ except Exception:
36
+ if perf_counter() - start_time > stop_after_delay:
37
+ raise
38
+ sleep(wait)
39
+
40
+ return retry_wrapped
41
+
42
+ return wrapper
evalkit_llava/lib/python3.10/site-packages/pip/_internal/utils/setuptools_build.py ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ import textwrap
3
+ from typing import List, Optional, Sequence
4
+
5
+ # Shim to wrap setup.py invocation with setuptools
6
+ # Note that __file__ is handled via two {!r} *and* %r, to ensure that paths on
7
+ # Windows are correctly handled (it should be "C:\\Users" not "C:\Users").
8
+ _SETUPTOOLS_SHIM = textwrap.dedent(
9
+ """
10
+ exec(compile('''
11
+ # This is <pip-setuptools-caller> -- a caller that pip uses to run setup.py
12
+ #
13
+ # - It imports setuptools before invoking setup.py, to enable projects that directly
14
+ # import from `distutils.core` to work with newer packaging standards.
15
+ # - It provides a clear error message when setuptools is not installed.
16
+ # - It sets `sys.argv[0]` to the underlying `setup.py`, when invoking `setup.py` so
17
+ # setuptools doesn't think the script is `-c`. This avoids the following warning:
18
+ # manifest_maker: standard file '-c' not found".
19
+ # - It generates a shim setup.py, for handling setup.cfg-only projects.
20
+ import os, sys, tokenize
21
+
22
+ try:
23
+ import setuptools
24
+ except ImportError as error:
25
+ print(
26
+ "ERROR: Can not execute `setup.py` since setuptools is not available in "
27
+ "the build environment.",
28
+ file=sys.stderr,
29
+ )
30
+ sys.exit(1)
31
+
32
+ __file__ = %r
33
+ sys.argv[0] = __file__
34
+
35
+ if os.path.exists(__file__):
36
+ filename = __file__
37
+ with tokenize.open(__file__) as f:
38
+ setup_py_code = f.read()
39
+ else:
40
+ filename = "<auto-generated setuptools caller>"
41
+ setup_py_code = "from setuptools import setup; setup()"
42
+
43
+ exec(compile(setup_py_code, filename, "exec"))
44
+ ''' % ({!r},), "<pip-setuptools-caller>", "exec"))
45
+ """
46
+ ).rstrip()
47
+
48
+
49
+ def make_setuptools_shim_args(
50
+ setup_py_path: str,
51
+ global_options: Optional[Sequence[str]] = None,
52
+ no_user_config: bool = False,
53
+ unbuffered_output: bool = False,
54
+ ) -> List[str]:
55
+ """
56
+ Get setuptools command arguments with shim wrapped setup file invocation.
57
+
58
+ :param setup_py_path: The path to setup.py to be wrapped.
59
+ :param global_options: Additional global options.
60
+ :param no_user_config: If True, disables personal user configuration.
61
+ :param unbuffered_output: If True, adds the unbuffered switch to the
62
+ argument list.
63
+ """
64
+ args = [sys.executable]
65
+ if unbuffered_output:
66
+ args += ["-u"]
67
+ args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)]
68
+ if global_options:
69
+ args += global_options
70
+ if no_user_config:
71
+ args += ["--no-user-cfg"]
72
+ return args
73
+
74
+
75
+ def make_setuptools_bdist_wheel_args(
76
+ setup_py_path: str,
77
+ global_options: Sequence[str],
78
+ build_options: Sequence[str],
79
+ destination_dir: str,
80
+ ) -> List[str]:
81
+ # NOTE: Eventually, we'd want to also -S to the flags here, when we're
82
+ # isolating. Currently, it breaks Python in virtualenvs, because it
83
+ # relies on site.py to find parts of the standard library outside the
84
+ # virtualenv.
85
+ args = make_setuptools_shim_args(
86
+ setup_py_path, global_options=global_options, unbuffered_output=True
87
+ )
88
+ args += ["bdist_wheel", "-d", destination_dir]
89
+ args += build_options
90
+ return args
91
+
92
+
93
+ def make_setuptools_clean_args(
94
+ setup_py_path: str,
95
+ global_options: Sequence[str],
96
+ ) -> List[str]:
97
+ args = make_setuptools_shim_args(
98
+ setup_py_path, global_options=global_options, unbuffered_output=True
99
+ )
100
+ args += ["clean", "--all"]
101
+ return args
102
+
103
+
104
+ def make_setuptools_develop_args(
105
+ setup_py_path: str,
106
+ *,
107
+ global_options: Sequence[str],
108
+ no_user_config: bool,
109
+ prefix: Optional[str],
110
+ home: Optional[str],
111
+ use_user_site: bool,
112
+ ) -> List[str]:
113
+ assert not (use_user_site and prefix)
114
+
115
+ args = make_setuptools_shim_args(
116
+ setup_py_path,
117
+ global_options=global_options,
118
+ no_user_config=no_user_config,
119
+ )
120
+
121
+ args += ["develop", "--no-deps"]
122
+
123
+ if prefix:
124
+ args += ["--prefix", prefix]
125
+ if home is not None:
126
+ args += ["--install-dir", home]
127
+
128
+ if use_user_site:
129
+ args += ["--user", "--prefix="]
130
+
131
+ return args
132
+
133
+
134
+ def make_setuptools_egg_info_args(
135
+ setup_py_path: str,
136
+ egg_info_dir: Optional[str],
137
+ no_user_config: bool,
138
+ ) -> List[str]:
139
+ args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config)
140
+
141
+ args += ["egg_info"]
142
+
143
+ if egg_info_dir:
144
+ args += ["--egg-base", egg_info_dir]
145
+
146
+ return args
evalkit_llava/lib/python3.10/turtledemo/__pycache__/chaos.cpython-310.pyc ADDED
Binary file (1.97 kB). View file
 
evalkit_llava/lib/python3.10/turtledemo/__pycache__/colormixer.cpython-310.pyc ADDED
Binary file (2.1 kB). View file
 
evalkit_llava/lib/python3.10/turtledemo/__pycache__/forest.cpython-310.pyc ADDED
Binary file (3.54 kB). View file
 
evalkit_llava/lib/python3.10/turtledemo/__pycache__/fractalcurves.cpython-310.pyc ADDED
Binary file (3.15 kB). View file
 
evalkit_llava/lib/python3.10/turtledemo/__pycache__/lindenmayer.cpython-310.pyc ADDED
Binary file (2.79 kB). View file
 
evalkit_llava/lib/python3.10/turtledemo/__pycache__/minimal_hanoi.cpython-310.pyc ADDED
Binary file (3.09 kB). View file
 
evalkit_llava/lib/python3.10/turtledemo/__pycache__/nim.cpython-310.pyc ADDED
Binary file (7.66 kB). View file
 
evalkit_llava/lib/python3.10/turtledemo/__pycache__/paint.cpython-310.pyc ADDED
Binary file (1.62 kB). View file
 
evalkit_llava/lib/python3.10/turtledemo/__pycache__/penrose.cpython-310.pyc ADDED
Binary file (4.87 kB). View file
 
evalkit_llava/lib/python3.10/turtledemo/__pycache__/planet_and_moon.cpython-310.pyc ADDED
Binary file (3.5 kB). View file
 
evalkit_llava/lib/python3.10/turtledemo/__pycache__/rosette.cpython-310.pyc ADDED
Binary file (1.73 kB). View file
 
evalkit_llava/lib/python3.10/turtledemo/__pycache__/sorting_animate.cpython-310.pyc ADDED
Binary file (6.33 kB). View file
 
evalkit_llava/lib/python3.10/turtledemo/__pycache__/tree.cpython-310.pyc ADDED
Binary file (2 kB). View file
 
evalkit_llava/lib/python3.10/turtledemo/__pycache__/two_canvases.cpython-310.pyc ADDED
Binary file (1.53 kB). View file