mirror of
https://github.com/aykhans/AzSuicideDataVisualization.git
synced 2025-07-02 22:30:48 +00:00
first commit
This commit is contained in:
70
.venv/Lib/site-packages/jedi/inference/compiled/__init__.py
Normal file
70
.venv/Lib/site-packages/jedi/inference/compiled/__init__.py
Normal file
@ -0,0 +1,70 @@
|
||||
# This file also re-exports symbols for wider use. We configure mypy and flake8
|
||||
# to be aware that this file does this.
|
||||
|
||||
from jedi.inference.compiled.value import CompiledValue, CompiledName, \
|
||||
CompiledValueFilter, CompiledValueName, create_from_access_path
|
||||
from jedi.inference.base_value import LazyValueWrapper
|
||||
|
||||
|
||||
def builtin_from_name(inference_state, string):
|
||||
typing_builtins_module = inference_state.builtins_module
|
||||
if string in ('None', 'True', 'False'):
|
||||
builtins, = typing_builtins_module.non_stub_value_set
|
||||
filter_ = next(builtins.get_filters())
|
||||
else:
|
||||
filter_ = next(typing_builtins_module.get_filters())
|
||||
name, = filter_.get(string)
|
||||
value, = name.infer()
|
||||
return value
|
||||
|
||||
|
||||
class ExactValue(LazyValueWrapper):
|
||||
"""
|
||||
This class represents exact values, that makes operations like additions
|
||||
and exact boolean values possible, while still being a "normal" stub.
|
||||
"""
|
||||
def __init__(self, compiled_value):
|
||||
self.inference_state = compiled_value.inference_state
|
||||
self._compiled_value = compiled_value
|
||||
|
||||
def __getattribute__(self, name):
|
||||
if name in ('get_safe_value', 'execute_operation', 'access_handle',
|
||||
'negate', 'py__bool__', 'is_compiled'):
|
||||
return getattr(self._compiled_value, name)
|
||||
return super().__getattribute__(name)
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
instance, = builtin_from_name(
|
||||
self.inference_state, self._compiled_value.name.string_name).execute_with_values()
|
||||
return instance
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self._compiled_value)
|
||||
|
||||
|
||||
def create_simple_object(inference_state, obj):
|
||||
"""
|
||||
Only allows creations of objects that are easily picklable across Python
|
||||
versions.
|
||||
"""
|
||||
assert type(obj) in (int, float, str, bytes, slice, complex, bool), repr(obj)
|
||||
compiled_value = create_from_access_path(
|
||||
inference_state,
|
||||
inference_state.compiled_subprocess.create_simple_object(obj)
|
||||
)
|
||||
return ExactValue(compiled_value)
|
||||
|
||||
|
||||
def get_string_value_set(inference_state):
|
||||
return builtin_from_name(inference_state, 'str').execute_with_values()
|
||||
|
||||
|
||||
def load_module(inference_state, dotted_name, **kwargs):
|
||||
# Temporary, some tensorflow builtins cannot be loaded, so it's tried again
|
||||
# and again and it's really slow.
|
||||
if dotted_name.startswith('tensorflow.'):
|
||||
return None
|
||||
access_path = inference_state.compiled_subprocess.load_module(dotted_name=dotted_name, **kwargs)
|
||||
if access_path is None:
|
||||
return None
|
||||
return create_from_access_path(inference_state, access_path)
|
558
.venv/Lib/site-packages/jedi/inference/compiled/access.py
Normal file
558
.venv/Lib/site-packages/jedi/inference/compiled/access.py
Normal file
@ -0,0 +1,558 @@
|
||||
import inspect
|
||||
import types
|
||||
import traceback
|
||||
import sys
|
||||
import operator as op
|
||||
from collections import namedtuple
|
||||
import warnings
|
||||
import re
|
||||
import builtins
|
||||
import typing
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from jedi.inference.compiled.getattr_static import getattr_static
|
||||
|
||||
ALLOWED_GETITEM_TYPES = (str, list, tuple, bytes, bytearray, dict)
|
||||
|
||||
MethodDescriptorType = type(str.replace)
|
||||
# These are not considered classes and access is granted even though they have
|
||||
# a __class__ attribute.
|
||||
NOT_CLASS_TYPES = (
|
||||
types.BuiltinFunctionType,
|
||||
types.CodeType,
|
||||
types.FrameType,
|
||||
types.FunctionType,
|
||||
types.GeneratorType,
|
||||
types.GetSetDescriptorType,
|
||||
types.LambdaType,
|
||||
types.MemberDescriptorType,
|
||||
types.MethodType,
|
||||
types.ModuleType,
|
||||
types.TracebackType,
|
||||
MethodDescriptorType,
|
||||
types.MappingProxyType,
|
||||
types.SimpleNamespace,
|
||||
types.DynamicClassAttribute,
|
||||
)
|
||||
|
||||
# Those types don't exist in typing.
|
||||
MethodDescriptorType = type(str.replace)
|
||||
WrapperDescriptorType = type(set.__iter__)
|
||||
# `object.__subclasshook__` is an already executed descriptor.
|
||||
object_class_dict = type.__dict__["__dict__"].__get__(object)
|
||||
ClassMethodDescriptorType = type(object_class_dict['__subclasshook__'])
|
||||
|
||||
_sentinel = object()
|
||||
|
||||
# Maps Python syntax to the operator module.
|
||||
COMPARISON_OPERATORS = {
|
||||
'==': op.eq,
|
||||
'!=': op.ne,
|
||||
'is': op.is_,
|
||||
'is not': op.is_not,
|
||||
'<': op.lt,
|
||||
'<=': op.le,
|
||||
'>': op.gt,
|
||||
'>=': op.ge,
|
||||
}
|
||||
|
||||
_OPERATORS = {
|
||||
'+': op.add,
|
||||
'-': op.sub,
|
||||
}
|
||||
_OPERATORS.update(COMPARISON_OPERATORS)
|
||||
|
||||
ALLOWED_DESCRIPTOR_ACCESS = (
|
||||
types.FunctionType,
|
||||
types.GetSetDescriptorType,
|
||||
types.MemberDescriptorType,
|
||||
MethodDescriptorType,
|
||||
WrapperDescriptorType,
|
||||
ClassMethodDescriptorType,
|
||||
staticmethod,
|
||||
classmethod,
|
||||
)
|
||||
|
||||
|
||||
def safe_getattr(obj, name, default=_sentinel):
|
||||
try:
|
||||
attr, is_get_descriptor = getattr_static(obj, name)
|
||||
except AttributeError:
|
||||
if default is _sentinel:
|
||||
raise
|
||||
return default
|
||||
else:
|
||||
if isinstance(attr, ALLOWED_DESCRIPTOR_ACCESS):
|
||||
# In case of descriptors that have get methods we cannot return
|
||||
# it's value, because that would mean code execution.
|
||||
# Since it's an isinstance call, code execution is still possible,
|
||||
# but this is not really a security feature, but much more of a
|
||||
# safety feature. Code execution is basically always possible when
|
||||
# a module is imported. This is here so people don't shoot
|
||||
# themselves in the foot.
|
||||
return getattr(obj, name)
|
||||
return attr
|
||||
|
||||
|
||||
SignatureParam = namedtuple(
|
||||
'SignatureParam',
|
||||
'name has_default default default_string has_annotation annotation annotation_string kind_name'
|
||||
)
|
||||
|
||||
|
||||
def shorten_repr(func):
|
||||
def wrapper(self):
|
||||
r = func(self)
|
||||
if len(r) > 50:
|
||||
r = r[:50] + '..'
|
||||
return r
|
||||
return wrapper
|
||||
|
||||
|
||||
def create_access(inference_state, obj):
|
||||
return inference_state.compiled_subprocess.get_or_create_access_handle(obj)
|
||||
|
||||
|
||||
def load_module(inference_state, dotted_name, sys_path):
|
||||
temp, sys.path = sys.path, sys_path
|
||||
try:
|
||||
__import__(dotted_name)
|
||||
except ImportError:
|
||||
# If a module is "corrupt" or not really a Python module or whatever.
|
||||
warnings.warn(
|
||||
"Module %s not importable in path %s." % (dotted_name, sys_path),
|
||||
UserWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return None
|
||||
except Exception:
|
||||
# Since __import__ pretty much makes code execution possible, just
|
||||
# catch any error here and print it.
|
||||
warnings.warn(
|
||||
"Cannot import:\n%s" % traceback.format_exc(), UserWarning, stacklevel=2
|
||||
)
|
||||
return None
|
||||
finally:
|
||||
sys.path = temp
|
||||
|
||||
# Just access the cache after import, because of #59 as well as the very
|
||||
# complicated import structure of Python.
|
||||
module = sys.modules[dotted_name]
|
||||
return create_access_path(inference_state, module)
|
||||
|
||||
|
||||
class AccessPath:
|
||||
def __init__(self, accesses):
|
||||
self.accesses = accesses
|
||||
|
||||
|
||||
def create_access_path(inference_state, obj):
|
||||
access = create_access(inference_state, obj)
|
||||
return AccessPath(access.get_access_path_tuples())
|
||||
|
||||
|
||||
def get_api_type(obj):
|
||||
if inspect.isclass(obj):
|
||||
return 'class'
|
||||
elif inspect.ismodule(obj):
|
||||
return 'module'
|
||||
elif inspect.isbuiltin(obj) or inspect.ismethod(obj) \
|
||||
or inspect.ismethoddescriptor(obj) or inspect.isfunction(obj):
|
||||
return 'function'
|
||||
# Everything else...
|
||||
return 'instance'
|
||||
|
||||
|
||||
class DirectObjectAccess:
|
||||
def __init__(self, inference_state, obj):
|
||||
self._inference_state = inference_state
|
||||
self._obj = obj
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (self.__class__.__name__, self.get_repr())
|
||||
|
||||
def _create_access(self, obj):
|
||||
return create_access(self._inference_state, obj)
|
||||
|
||||
def _create_access_path(self, obj):
|
||||
return create_access_path(self._inference_state, obj)
|
||||
|
||||
def py__bool__(self):
|
||||
return bool(self._obj)
|
||||
|
||||
def py__file__(self) -> Optional[Path]:
|
||||
try:
|
||||
return Path(self._obj.__file__)
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
def py__doc__(self):
|
||||
return inspect.getdoc(self._obj) or ''
|
||||
|
||||
def py__name__(self):
|
||||
if not _is_class_instance(self._obj) or \
|
||||
inspect.ismethoddescriptor(self._obj): # slots
|
||||
cls = self._obj
|
||||
else:
|
||||
try:
|
||||
cls = self._obj.__class__
|
||||
except AttributeError:
|
||||
# happens with numpy.core.umath._UFUNC_API (you get it
|
||||
# automatically by doing `import numpy`.
|
||||
return None
|
||||
|
||||
try:
|
||||
return cls.__name__
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
def py__mro__accesses(self):
|
||||
return tuple(self._create_access_path(cls) for cls in self._obj.__mro__[1:])
|
||||
|
||||
def py__getitem__all_values(self):
|
||||
if isinstance(self._obj, dict):
|
||||
return [self._create_access_path(v) for v in self._obj.values()]
|
||||
if isinstance(self._obj, (list, tuple)):
|
||||
return [self._create_access_path(v) for v in self._obj]
|
||||
|
||||
if self.is_instance():
|
||||
cls = DirectObjectAccess(self._inference_state, self._obj.__class__)
|
||||
return cls.py__getitem__all_values()
|
||||
|
||||
try:
|
||||
getitem = self._obj.__getitem__
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
annotation = DirectObjectAccess(self._inference_state, getitem).get_return_annotation()
|
||||
if annotation is not None:
|
||||
return [annotation]
|
||||
return None
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
if type(self._obj) not in ALLOWED_GETITEM_TYPES:
|
||||
# Get rid of side effects, we won't call custom `__getitem__`s.
|
||||
return None
|
||||
|
||||
return self._create_access_path(self._obj[index])
|
||||
|
||||
def py__iter__list(self):
|
||||
try:
|
||||
iter_method = self._obj.__iter__
|
||||
except AttributeError:
|
||||
return None
|
||||
else:
|
||||
p = DirectObjectAccess(self._inference_state, iter_method).get_return_annotation()
|
||||
if p is not None:
|
||||
return [p]
|
||||
|
||||
if type(self._obj) not in ALLOWED_GETITEM_TYPES:
|
||||
# Get rid of side effects, we won't call custom `__getitem__`s.
|
||||
return []
|
||||
|
||||
lst = []
|
||||
for i, part in enumerate(self._obj):
|
||||
if i > 20:
|
||||
# Should not go crazy with large iterators
|
||||
break
|
||||
lst.append(self._create_access_path(part))
|
||||
return lst
|
||||
|
||||
def py__class__(self):
|
||||
return self._create_access_path(self._obj.__class__)
|
||||
|
||||
def py__bases__(self):
|
||||
return [self._create_access_path(base) for base in self._obj.__bases__]
|
||||
|
||||
def py__path__(self):
|
||||
paths = getattr(self._obj, '__path__', None)
|
||||
# Avoid some weird hacks that would just fail, because they cannot be
|
||||
# used by pickle.
|
||||
if not isinstance(paths, list) \
|
||||
or not all(isinstance(p, str) for p in paths):
|
||||
return None
|
||||
return paths
|
||||
|
||||
@shorten_repr
|
||||
def get_repr(self):
|
||||
if inspect.ismodule(self._obj):
|
||||
return repr(self._obj)
|
||||
# Try to avoid execution of the property.
|
||||
if safe_getattr(self._obj, '__module__', default='') == 'builtins':
|
||||
return repr(self._obj)
|
||||
|
||||
type_ = type(self._obj)
|
||||
if type_ == type:
|
||||
return type.__repr__(self._obj)
|
||||
|
||||
if safe_getattr(type_, '__module__', default='') == 'builtins':
|
||||
# Allow direct execution of repr for builtins.
|
||||
return repr(self._obj)
|
||||
return object.__repr__(self._obj)
|
||||
|
||||
def is_class(self):
|
||||
return inspect.isclass(self._obj)
|
||||
|
||||
def is_function(self):
|
||||
return inspect.isfunction(self._obj) or inspect.ismethod(self._obj)
|
||||
|
||||
def is_module(self):
|
||||
return inspect.ismodule(self._obj)
|
||||
|
||||
def is_instance(self):
|
||||
return _is_class_instance(self._obj)
|
||||
|
||||
def ismethoddescriptor(self):
|
||||
return inspect.ismethoddescriptor(self._obj)
|
||||
|
||||
def get_qualified_names(self):
|
||||
def try_to_get_name(obj):
|
||||
return getattr(obj, '__qualname__', getattr(obj, '__name__', None))
|
||||
|
||||
if self.is_module():
|
||||
return ()
|
||||
name = try_to_get_name(self._obj)
|
||||
if name is None:
|
||||
name = try_to_get_name(type(self._obj))
|
||||
if name is None:
|
||||
return ()
|
||||
return tuple(name.split('.'))
|
||||
|
||||
def dir(self):
|
||||
return dir(self._obj)
|
||||
|
||||
def has_iter(self):
|
||||
try:
|
||||
iter(self._obj)
|
||||
return True
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
def is_allowed_getattr(self, name, safe=True):
|
||||
# TODO this API is ugly.
|
||||
if not safe:
|
||||
# Unsafe is mostly used to check for __getattr__/__getattribute__.
|
||||
# getattr_static works for properties, but the underscore methods
|
||||
# are just ignored (because it's safer and avoids more code
|
||||
# execution). See also GH #1378.
|
||||
|
||||
# Avoid warnings, see comment in the next function.
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.simplefilter("always")
|
||||
try:
|
||||
return hasattr(self._obj, name), False
|
||||
except Exception:
|
||||
# Obviously has an attribute (propably a property) that
|
||||
# gets executed, so just avoid all exceptions here.
|
||||
return False, False
|
||||
try:
|
||||
attr, is_get_descriptor = getattr_static(self._obj, name)
|
||||
except AttributeError:
|
||||
return False, False
|
||||
else:
|
||||
if is_get_descriptor and type(attr) not in ALLOWED_DESCRIPTOR_ACCESS:
|
||||
# In case of descriptors that have get methods we cannot return
|
||||
# it's value, because that would mean code execution.
|
||||
return True, True
|
||||
return True, False
|
||||
|
||||
def getattr_paths(self, name, default=_sentinel):
|
||||
try:
|
||||
# Make sure no warnings are printed here, this is autocompletion,
|
||||
# warnings should not be shown. See also GH #1383.
|
||||
with warnings.catch_warnings(record=True):
|
||||
warnings.simplefilter("always")
|
||||
return_obj = getattr(self._obj, name)
|
||||
except Exception as e:
|
||||
if default is _sentinel:
|
||||
if isinstance(e, AttributeError):
|
||||
# Happens e.g. in properties of
|
||||
# PyQt4.QtGui.QStyleOptionComboBox.currentText
|
||||
# -> just set it to None
|
||||
raise
|
||||
# Just in case anything happens, return an AttributeError. It
|
||||
# should not crash.
|
||||
raise AttributeError
|
||||
return_obj = default
|
||||
access = self._create_access(return_obj)
|
||||
if inspect.ismodule(return_obj):
|
||||
return [access]
|
||||
|
||||
try:
|
||||
module = return_obj.__module__
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
if module is not None and isinstance(module, str):
|
||||
try:
|
||||
__import__(module)
|
||||
# For some modules like _sqlite3, the __module__ for classes is
|
||||
# different, in this case it's sqlite3. So we have to try to
|
||||
# load that "original" module, because it's not loaded yet. If
|
||||
# we don't do that, we don't really have a "parent" module and
|
||||
# we would fall back to builtins.
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
module = inspect.getmodule(return_obj)
|
||||
if module is None:
|
||||
module = inspect.getmodule(type(return_obj))
|
||||
if module is None:
|
||||
module = builtins
|
||||
return [self._create_access(module), access]
|
||||
|
||||
def get_safe_value(self):
|
||||
if type(self._obj) in (bool, bytes, float, int, str, slice) or self._obj is None:
|
||||
return self._obj
|
||||
raise ValueError("Object is type %s and not simple" % type(self._obj))
|
||||
|
||||
def get_api_type(self):
|
||||
return get_api_type(self._obj)
|
||||
|
||||
def get_array_type(self):
|
||||
if isinstance(self._obj, dict):
|
||||
return 'dict'
|
||||
return None
|
||||
|
||||
def get_key_paths(self):
|
||||
def iter_partial_keys():
|
||||
# We could use list(keys()), but that might take a lot more memory.
|
||||
for (i, k) in enumerate(self._obj.keys()):
|
||||
# Limit key listing at some point. This is artificial, but this
|
||||
# way we don't get stalled because of slow completions
|
||||
if i > 50:
|
||||
break
|
||||
yield k
|
||||
|
||||
return [self._create_access_path(k) for k in iter_partial_keys()]
|
||||
|
||||
def get_access_path_tuples(self):
|
||||
accesses = [create_access(self._inference_state, o) for o in self._get_objects_path()]
|
||||
return [(access.py__name__(), access) for access in accesses]
|
||||
|
||||
def _get_objects_path(self):
|
||||
def get():
|
||||
obj = self._obj
|
||||
yield obj
|
||||
try:
|
||||
obj = obj.__objclass__
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
yield obj
|
||||
|
||||
try:
|
||||
# Returns a dotted string path.
|
||||
imp_plz = obj.__module__
|
||||
except AttributeError:
|
||||
# Unfortunately in some cases like `int` there's no __module__
|
||||
if not inspect.ismodule(obj):
|
||||
yield builtins
|
||||
else:
|
||||
if imp_plz is None:
|
||||
# Happens for example in `(_ for _ in []).send.__module__`.
|
||||
yield builtins
|
||||
else:
|
||||
try:
|
||||
yield sys.modules[imp_plz]
|
||||
except KeyError:
|
||||
# __module__ can be something arbitrary that doesn't exist.
|
||||
yield builtins
|
||||
|
||||
return list(reversed(list(get())))
|
||||
|
||||
def execute_operation(self, other_access_handle, operator):
|
||||
other_access = other_access_handle.access
|
||||
op = _OPERATORS[operator]
|
||||
return self._create_access_path(op(self._obj, other_access._obj))
|
||||
|
||||
def get_annotation_name_and_args(self):
|
||||
"""
|
||||
Returns Tuple[Optional[str], Tuple[AccessPath, ...]]
|
||||
"""
|
||||
name = None
|
||||
args = ()
|
||||
if safe_getattr(self._obj, '__module__', default='') == 'typing':
|
||||
m = re.match(r'typing.(\w+)\[', repr(self._obj))
|
||||
if m is not None:
|
||||
name = m.group(1)
|
||||
|
||||
import typing
|
||||
if sys.version_info >= (3, 8):
|
||||
args = typing.get_args(self._obj)
|
||||
else:
|
||||
args = safe_getattr(self._obj, '__args__', default=None)
|
||||
return name, tuple(self._create_access_path(arg) for arg in args)
|
||||
|
||||
def needs_type_completions(self):
|
||||
return inspect.isclass(self._obj) and self._obj != type
|
||||
|
||||
def _annotation_to_str(self, annotation):
|
||||
return inspect.formatannotation(annotation)
|
||||
|
||||
def get_signature_params(self):
|
||||
return [
|
||||
SignatureParam(
|
||||
name=p.name,
|
||||
has_default=p.default is not p.empty,
|
||||
default=self._create_access_path(p.default),
|
||||
default_string=repr(p.default),
|
||||
has_annotation=p.annotation is not p.empty,
|
||||
annotation=self._create_access_path(p.annotation),
|
||||
annotation_string=self._annotation_to_str(p.annotation),
|
||||
kind_name=str(p.kind)
|
||||
) for p in self._get_signature().parameters.values()
|
||||
]
|
||||
|
||||
def _get_signature(self):
|
||||
obj = self._obj
|
||||
try:
|
||||
return inspect.signature(obj)
|
||||
except (RuntimeError, TypeError):
|
||||
# Reading the code of the function in Python 3.6 implies there are
|
||||
# at least these errors that might occur if something is wrong with
|
||||
# the signature. In that case we just want a simple escape for now.
|
||||
raise ValueError
|
||||
|
||||
def get_return_annotation(self):
|
||||
try:
|
||||
o = self._obj.__annotations__.get('return')
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
if o is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
o = typing.get_type_hints(self._obj).get('return')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return self._create_access_path(o)
|
||||
|
||||
def negate(self):
|
||||
return self._create_access_path(-self._obj)
|
||||
|
||||
def get_dir_infos(self):
|
||||
"""
|
||||
Used to return a couple of infos that are needed when accessing the sub
|
||||
objects of an objects
|
||||
"""
|
||||
tuples = dict(
|
||||
(name, self.is_allowed_getattr(name))
|
||||
for name in self.dir()
|
||||
)
|
||||
return self.needs_type_completions(), tuples
|
||||
|
||||
|
||||
def _is_class_instance(obj):
|
||||
"""Like inspect.* methods."""
|
||||
try:
|
||||
cls = obj.__class__
|
||||
except AttributeError:
|
||||
return False
|
||||
else:
|
||||
# The isinstance check for cls is just there so issubclass doesn't
|
||||
# raise an exception.
|
||||
return cls != type and isinstance(cls, type) and not issubclass(cls, NOT_CLASS_TYPES)
|
@ -0,0 +1,121 @@
|
||||
"""
|
||||
A static version of getattr.
|
||||
This is a backport of the Python 3 code with a little bit of additional
|
||||
information returned to enable Jedi to make decisions.
|
||||
"""
|
||||
|
||||
import types
|
||||
|
||||
from jedi import debug
|
||||
|
||||
_sentinel = object()
|
||||
|
||||
|
||||
def _check_instance(obj, attr):
|
||||
instance_dict = {}
|
||||
try:
|
||||
instance_dict = object.__getattribute__(obj, "__dict__")
|
||||
except AttributeError:
|
||||
pass
|
||||
return dict.get(instance_dict, attr, _sentinel)
|
||||
|
||||
|
||||
def _check_class(klass, attr):
|
||||
for entry in _static_getmro(klass):
|
||||
if _shadowed_dict(type(entry)) is _sentinel:
|
||||
try:
|
||||
return entry.__dict__[attr]
|
||||
except KeyError:
|
||||
pass
|
||||
return _sentinel
|
||||
|
||||
|
||||
def _is_type(obj):
|
||||
try:
|
||||
_static_getmro(obj)
|
||||
except TypeError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _shadowed_dict(klass):
|
||||
dict_attr = type.__dict__["__dict__"]
|
||||
for entry in _static_getmro(klass):
|
||||
try:
|
||||
class_dict = dict_attr.__get__(entry)["__dict__"]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if not (type(class_dict) is types.GetSetDescriptorType
|
||||
and class_dict.__name__ == "__dict__"
|
||||
and class_dict.__objclass__ is entry):
|
||||
return class_dict
|
||||
return _sentinel
|
||||
|
||||
|
||||
def _static_getmro(klass):
|
||||
mro = type.__dict__['__mro__'].__get__(klass)
|
||||
if not isinstance(mro, (tuple, list)):
|
||||
# There are unfortunately no tests for this, I was not able to
|
||||
# reproduce this in pure Python. However should still solve the issue
|
||||
# raised in GH #1517.
|
||||
debug.warning('mro of %s returned %s, should be a tuple' % (klass, mro))
|
||||
return ()
|
||||
return mro
|
||||
|
||||
|
||||
def _safe_hasattr(obj, name):
|
||||
return _check_class(type(obj), name) is not _sentinel
|
||||
|
||||
|
||||
def _safe_is_data_descriptor(obj):
|
||||
return _safe_hasattr(obj, '__set__') or _safe_hasattr(obj, '__delete__')
|
||||
|
||||
|
||||
def getattr_static(obj, attr, default=_sentinel):
|
||||
"""Retrieve attributes without triggering dynamic lookup via the
|
||||
descriptor protocol, __getattr__ or __getattribute__.
|
||||
|
||||
Note: this function may not be able to retrieve all attributes
|
||||
that getattr can fetch (like dynamically created attributes)
|
||||
and may find attributes that getattr can't (like descriptors
|
||||
that raise AttributeError). It can also return descriptor objects
|
||||
instead of instance members in some cases. See the
|
||||
documentation for details.
|
||||
|
||||
Returns a tuple `(attr, is_get_descriptor)`. is_get_descripter means that
|
||||
the attribute is a descriptor that has a `__get__` attribute.
|
||||
"""
|
||||
instance_result = _sentinel
|
||||
if not _is_type(obj):
|
||||
klass = type(obj)
|
||||
dict_attr = _shadowed_dict(klass)
|
||||
if (dict_attr is _sentinel or type(dict_attr) is types.MemberDescriptorType):
|
||||
instance_result = _check_instance(obj, attr)
|
||||
else:
|
||||
klass = obj
|
||||
|
||||
klass_result = _check_class(klass, attr)
|
||||
|
||||
if instance_result is not _sentinel and klass_result is not _sentinel:
|
||||
if _safe_hasattr(klass_result, '__get__') \
|
||||
and _safe_is_data_descriptor(klass_result):
|
||||
# A get/set descriptor has priority over everything.
|
||||
return klass_result, True
|
||||
|
||||
if instance_result is not _sentinel:
|
||||
return instance_result, False
|
||||
if klass_result is not _sentinel:
|
||||
return klass_result, _safe_hasattr(klass_result, '__get__')
|
||||
|
||||
if obj is klass:
|
||||
# for types we check the metaclass too
|
||||
for entry in _static_getmro(type(klass)):
|
||||
if _shadowed_dict(type(entry)) is _sentinel:
|
||||
try:
|
||||
return entry.__dict__[attr], False
|
||||
except KeyError:
|
||||
pass
|
||||
if default is not _sentinel:
|
||||
return default, False
|
||||
raise AttributeError(attr)
|
309
.venv/Lib/site-packages/jedi/inference/compiled/mixed.py
Normal file
309
.venv/Lib/site-packages/jedi/inference/compiled/mixed.py
Normal file
@ -0,0 +1,309 @@
|
||||
"""
|
||||
Used only for REPL Completion.
|
||||
"""
|
||||
|
||||
import inspect
|
||||
from pathlib import Path
|
||||
|
||||
from jedi.parser_utils import get_cached_code_lines
|
||||
|
||||
from jedi import settings
|
||||
from jedi.cache import memoize_method
|
||||
from jedi.inference import compiled
|
||||
from jedi.file_io import FileIO
|
||||
from jedi.inference.names import NameWrapper
|
||||
from jedi.inference.base_value import ValueSet, ValueWrapper, NO_VALUES
|
||||
from jedi.inference.value import ModuleValue
|
||||
from jedi.inference.cache import inference_state_function_cache, \
|
||||
inference_state_method_cache
|
||||
from jedi.inference.compiled.access import ALLOWED_GETITEM_TYPES, get_api_type
|
||||
from jedi.inference.gradual.conversion import to_stub
|
||||
from jedi.inference.context import CompiledContext, CompiledModuleContext, \
|
||||
TreeContextMixin
|
||||
|
||||
_sentinel = object()
|
||||
|
||||
|
||||
class MixedObject(ValueWrapper):
|
||||
"""
|
||||
A ``MixedObject`` is used in two ways:
|
||||
|
||||
1. It uses the default logic of ``parser.python.tree`` objects,
|
||||
2. except for getattr calls and signatures. The names dicts are generated
|
||||
in a fashion like ``CompiledValue``.
|
||||
|
||||
This combined logic makes it possible to provide more powerful REPL
|
||||
completion. It allows side effects that are not noticable with the default
|
||||
parser structure to still be completeable.
|
||||
|
||||
The biggest difference from CompiledValue to MixedObject is that we are
|
||||
generally dealing with Python code and not with C code. This will generate
|
||||
fewer special cases, because we in Python you don't have the same freedoms
|
||||
to modify the runtime.
|
||||
"""
|
||||
def __init__(self, compiled_value, tree_value):
|
||||
super().__init__(tree_value)
|
||||
self.compiled_value = compiled_value
|
||||
self.access_handle = compiled_value.access_handle
|
||||
|
||||
def get_filters(self, *args, **kwargs):
|
||||
yield MixedObjectFilter(
|
||||
self.inference_state, self.compiled_value, self._wrapped_value)
|
||||
|
||||
def get_signatures(self):
|
||||
# Prefer `inspect.signature` over somehow analyzing Python code. It
|
||||
# should be very precise, especially for stuff like `partial`.
|
||||
return self.compiled_value.get_signatures()
|
||||
|
||||
@inference_state_method_cache(default=NO_VALUES)
|
||||
def py__call__(self, arguments):
|
||||
# Fallback to the wrapped value if to stub returns no values.
|
||||
values = to_stub(self._wrapped_value)
|
||||
if not values:
|
||||
values = self._wrapped_value
|
||||
return values.py__call__(arguments)
|
||||
|
||||
def get_safe_value(self, default=_sentinel):
|
||||
if default is _sentinel:
|
||||
return self.compiled_value.get_safe_value()
|
||||
else:
|
||||
return self.compiled_value.get_safe_value(default)
|
||||
|
||||
@property
|
||||
def array_type(self):
|
||||
return self.compiled_value.array_type
|
||||
|
||||
def get_key_values(self):
|
||||
return self.compiled_value.get_key_values()
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
python_object = self.compiled_value.access_handle.access._obj
|
||||
if type(python_object) in ALLOWED_GETITEM_TYPES:
|
||||
return self.compiled_value.py__simple_getitem__(index)
|
||||
return self._wrapped_value.py__simple_getitem__(index)
|
||||
|
||||
def negate(self):
|
||||
return self.compiled_value.negate()
|
||||
|
||||
def _as_context(self):
|
||||
if self.parent_context is None:
|
||||
return MixedModuleContext(self)
|
||||
return MixedContext(self)
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s; %s>' % (
|
||||
type(self).__name__,
|
||||
self.access_handle.get_repr(),
|
||||
self._wrapped_value,
|
||||
)
|
||||
|
||||
|
||||
class MixedContext(CompiledContext, TreeContextMixin):
|
||||
@property
|
||||
def compiled_value(self):
|
||||
return self._value.compiled_value
|
||||
|
||||
|
||||
class MixedModuleContext(CompiledModuleContext, MixedContext):
|
||||
pass
|
||||
|
||||
|
||||
class MixedName(NameWrapper):
|
||||
"""
|
||||
The ``CompiledName._compiled_value`` is our MixedObject.
|
||||
"""
|
||||
def __init__(self, wrapped_name, parent_tree_value):
|
||||
super().__init__(wrapped_name)
|
||||
self._parent_tree_value = parent_tree_value
|
||||
|
||||
@property
|
||||
def start_pos(self):
|
||||
values = list(self.infer())
|
||||
if not values:
|
||||
# This means a start_pos that doesn't exist (compiled objects).
|
||||
return 0, 0
|
||||
return values[0].name.start_pos
|
||||
|
||||
@memoize_method
|
||||
def infer(self):
|
||||
compiled_value = self._wrapped_name.infer_compiled_value()
|
||||
tree_value = self._parent_tree_value
|
||||
if tree_value.is_instance() or tree_value.is_class():
|
||||
tree_values = tree_value.py__getattribute__(self.string_name)
|
||||
if compiled_value.is_function():
|
||||
return ValueSet({MixedObject(compiled_value, v) for v in tree_values})
|
||||
|
||||
module_context = tree_value.get_root_context()
|
||||
return _create(self._inference_state, compiled_value, module_context)
|
||||
|
||||
|
||||
class MixedObjectFilter(compiled.CompiledValueFilter):
|
||||
def __init__(self, inference_state, compiled_value, tree_value):
|
||||
super().__init__(inference_state, compiled_value)
|
||||
self._tree_value = tree_value
|
||||
|
||||
def _create_name(self, name):
|
||||
return MixedName(
|
||||
super()._create_name(name),
|
||||
self._tree_value,
|
||||
)
|
||||
|
||||
|
||||
@inference_state_function_cache()
|
||||
def _load_module(inference_state, path):
|
||||
return inference_state.parse(
|
||||
path=path,
|
||||
cache=True,
|
||||
diff_cache=settings.fast_parser,
|
||||
cache_path=settings.cache_directory
|
||||
).get_root_node()
|
||||
|
||||
|
||||
def _get_object_to_check(python_object):
|
||||
"""Check if inspect.getfile has a chance to find the source."""
|
||||
try:
|
||||
python_object = inspect.unwrap(python_object)
|
||||
except ValueError:
|
||||
# Can return a ValueError when it wraps around
|
||||
pass
|
||||
|
||||
if (inspect.ismodule(python_object)
|
||||
or inspect.isclass(python_object)
|
||||
or inspect.ismethod(python_object)
|
||||
or inspect.isfunction(python_object)
|
||||
or inspect.istraceback(python_object)
|
||||
or inspect.isframe(python_object)
|
||||
or inspect.iscode(python_object)):
|
||||
return python_object
|
||||
|
||||
try:
|
||||
return python_object.__class__
|
||||
except AttributeError:
|
||||
raise TypeError # Prevents computation of `repr` within inspect.
|
||||
|
||||
|
||||
def _find_syntax_node_name(inference_state, python_object):
|
||||
original_object = python_object
|
||||
try:
|
||||
python_object = _get_object_to_check(python_object)
|
||||
path = inspect.getsourcefile(python_object)
|
||||
except (OSError, TypeError):
|
||||
# The type might not be known (e.g. class_with_dict.__weakref__)
|
||||
return None
|
||||
path = None if path is None else Path(path)
|
||||
try:
|
||||
if path is None or not path.exists():
|
||||
# The path might not exist or be e.g. <stdin>.
|
||||
return None
|
||||
except OSError:
|
||||
# Might raise an OSError on Windows:
|
||||
#
|
||||
# [WinError 123] The filename, directory name, or volume label
|
||||
# syntax is incorrect: '<string>'
|
||||
return None
|
||||
|
||||
file_io = FileIO(path)
|
||||
module_node = _load_module(inference_state, path)
|
||||
|
||||
if inspect.ismodule(python_object):
|
||||
# We don't need to check names for modules, because there's not really
|
||||
# a way to write a module in a module in Python (and also __name__ can
|
||||
# be something like ``email.utils``).
|
||||
code_lines = get_cached_code_lines(inference_state.grammar, path)
|
||||
return module_node, module_node, file_io, code_lines
|
||||
|
||||
try:
|
||||
name_str = python_object.__name__
|
||||
except AttributeError:
|
||||
# Stuff like python_function.__code__.
|
||||
return None
|
||||
|
||||
if name_str == '<lambda>':
|
||||
return None # It's too hard to find lambdas.
|
||||
|
||||
# Doesn't always work (e.g. os.stat_result)
|
||||
names = module_node.get_used_names().get(name_str, [])
|
||||
# Only functions and classes are relevant. If a name e.g. points to an
|
||||
# import, it's probably a builtin (like collections.deque) and needs to be
|
||||
# ignored.
|
||||
names = [
|
||||
n for n in names
|
||||
if n.parent.type in ('funcdef', 'classdef') and n.parent.name == n
|
||||
]
|
||||
if not names:
|
||||
return None
|
||||
|
||||
try:
|
||||
code = python_object.__code__
|
||||
# By using the line number of a code object we make the lookup in a
|
||||
# file pretty easy. There's still a possibility of people defining
|
||||
# stuff like ``a = 3; foo(a); a = 4`` on the same line, but if people
|
||||
# do so we just don't care.
|
||||
line_nr = code.co_firstlineno
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
line_names = [name for name in names if name.start_pos[0] == line_nr]
|
||||
# There's a chance that the object is not available anymore, because
|
||||
# the code has changed in the background.
|
||||
if line_names:
|
||||
names = line_names
|
||||
|
||||
code_lines = get_cached_code_lines(inference_state.grammar, path)
|
||||
# It's really hard to actually get the right definition, here as a last
|
||||
# resort we just return the last one. This chance might lead to odd
|
||||
# completions at some points but will lead to mostly correct type
|
||||
# inference, because people tend to define a public name in a module only
|
||||
# once.
|
||||
tree_node = names[-1].parent
|
||||
if tree_node.type == 'funcdef' and get_api_type(original_object) == 'instance':
|
||||
# If an instance is given and we're landing on a function (e.g.
|
||||
# partial in 3.5), something is completely wrong and we should not
|
||||
# return that.
|
||||
return None
|
||||
return module_node, tree_node, file_io, code_lines
|
||||
|
||||
|
||||
@inference_state_function_cache()
|
||||
def _create(inference_state, compiled_value, module_context):
|
||||
# TODO accessing this is bad, but it probably doesn't matter that much,
|
||||
# because we're working with interpreteters only here.
|
||||
python_object = compiled_value.access_handle.access._obj
|
||||
result = _find_syntax_node_name(inference_state, python_object)
|
||||
if result is None:
|
||||
# TODO Care about generics from stuff like `[1]` and don't return like this.
|
||||
if type(python_object) in (dict, list, tuple):
|
||||
return ValueSet({compiled_value})
|
||||
|
||||
tree_values = to_stub(compiled_value)
|
||||
if not tree_values:
|
||||
return ValueSet({compiled_value})
|
||||
else:
|
||||
module_node, tree_node, file_io, code_lines = result
|
||||
|
||||
if module_context is None or module_context.tree_node != module_node:
|
||||
root_compiled_value = compiled_value.get_root_context().get_value()
|
||||
# TODO this __name__ might be wrong.
|
||||
name = root_compiled_value.py__name__()
|
||||
string_names = tuple(name.split('.'))
|
||||
module_value = ModuleValue(
|
||||
inference_state, module_node,
|
||||
file_io=file_io,
|
||||
string_names=string_names,
|
||||
code_lines=code_lines,
|
||||
is_package=root_compiled_value.is_package(),
|
||||
)
|
||||
if name is not None:
|
||||
inference_state.module_cache.add(string_names, ValueSet([module_value]))
|
||||
module_context = module_value.as_context()
|
||||
|
||||
tree_values = ValueSet({module_context.create_value(tree_node)})
|
||||
if tree_node.type == 'classdef':
|
||||
if not compiled_value.is_class():
|
||||
# Is an instance, not a class.
|
||||
tree_values = tree_values.execute_with_values()
|
||||
|
||||
return ValueSet(
|
||||
MixedObject(compiled_value, tree_value=tree_value)
|
||||
for tree_value in tree_values
|
||||
)
|
@ -0,0 +1,391 @@
|
||||
"""
|
||||
Makes it possible to do the compiled analysis in a subprocess. This has two
|
||||
goals:
|
||||
|
||||
1. Making it safer - Segfaults and RuntimeErrors as well as stdout/stderr can
|
||||
be ignored and dealt with.
|
||||
2. Make it possible to handle different Python versions as well as virtualenvs.
|
||||
"""
|
||||
|
||||
import collections
|
||||
import os
|
||||
import sys
|
||||
import queue
|
||||
import subprocess
|
||||
import traceback
|
||||
import weakref
|
||||
from functools import partial
|
||||
from threading import Thread
|
||||
|
||||
from jedi._compatibility import pickle_dump, pickle_load
|
||||
from jedi import debug
|
||||
from jedi.cache import memoize_method
|
||||
from jedi.inference.compiled.subprocess import functions
|
||||
from jedi.inference.compiled.access import DirectObjectAccess, AccessPath, \
|
||||
SignatureParam
|
||||
from jedi.api.exceptions import InternalError
|
||||
|
||||
|
||||
_MAIN_PATH = os.path.join(os.path.dirname(__file__), '__main__.py')
|
||||
PICKLE_PROTOCOL = 4
|
||||
|
||||
|
||||
def _GeneralizedPopen(*args, **kwargs):
|
||||
if os.name == 'nt':
|
||||
try:
|
||||
# Was introduced in Python 3.7.
|
||||
CREATE_NO_WINDOW = subprocess.CREATE_NO_WINDOW
|
||||
except AttributeError:
|
||||
CREATE_NO_WINDOW = 0x08000000
|
||||
kwargs['creationflags'] = CREATE_NO_WINDOW
|
||||
# The child process doesn't need file descriptors except 0, 1, 2.
|
||||
# This is unix only.
|
||||
kwargs['close_fds'] = 'posix' in sys.builtin_module_names
|
||||
|
||||
return subprocess.Popen(*args, **kwargs)
|
||||
|
||||
|
||||
def _enqueue_output(out, queue_):
|
||||
for line in iter(out.readline, b''):
|
||||
queue_.put(line)
|
||||
|
||||
|
||||
def _add_stderr_to_debug(stderr_queue):
|
||||
while True:
|
||||
# Try to do some error reporting from the subprocess and print its
|
||||
# stderr contents.
|
||||
try:
|
||||
line = stderr_queue.get_nowait()
|
||||
line = line.decode('utf-8', 'replace')
|
||||
debug.warning('stderr output: %s' % line.rstrip('\n'))
|
||||
except queue.Empty:
|
||||
break
|
||||
|
||||
|
||||
def _get_function(name):
|
||||
return getattr(functions, name)
|
||||
|
||||
|
||||
def _cleanup_process(process, thread):
|
||||
try:
|
||||
process.kill()
|
||||
process.wait()
|
||||
except OSError:
|
||||
# Raised if the process is already killed.
|
||||
pass
|
||||
thread.join()
|
||||
for stream in [process.stdin, process.stdout, process.stderr]:
|
||||
try:
|
||||
stream.close()
|
||||
except OSError:
|
||||
# Raised if the stream is broken.
|
||||
pass
|
||||
|
||||
|
||||
class _InferenceStateProcess:
|
||||
def __init__(self, inference_state):
|
||||
self._inference_state_weakref = weakref.ref(inference_state)
|
||||
self._inference_state_id = id(inference_state)
|
||||
self._handles = {}
|
||||
|
||||
def get_or_create_access_handle(self, obj):
|
||||
id_ = id(obj)
|
||||
try:
|
||||
return self.get_access_handle(id_)
|
||||
except KeyError:
|
||||
access = DirectObjectAccess(self._inference_state_weakref(), obj)
|
||||
handle = AccessHandle(self, access, id_)
|
||||
self.set_access_handle(handle)
|
||||
return handle
|
||||
|
||||
def get_access_handle(self, id_):
|
||||
return self._handles[id_]
|
||||
|
||||
def set_access_handle(self, handle):
|
||||
self._handles[handle.id] = handle
|
||||
|
||||
|
||||
class InferenceStateSameProcess(_InferenceStateProcess):
|
||||
"""
|
||||
Basically just an easy access to functions.py. It has the same API
|
||||
as InferenceStateSubprocess and does the same thing without using a subprocess.
|
||||
This is necessary for the Interpreter process.
|
||||
"""
|
||||
def __getattr__(self, name):
|
||||
return partial(_get_function(name), self._inference_state_weakref())
|
||||
|
||||
|
||||
class InferenceStateSubprocess(_InferenceStateProcess):
|
||||
def __init__(self, inference_state, compiled_subprocess):
|
||||
super().__init__(inference_state)
|
||||
self._used = False
|
||||
self._compiled_subprocess = compiled_subprocess
|
||||
|
||||
def __getattr__(self, name):
|
||||
func = _get_function(name)
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
self._used = True
|
||||
|
||||
result = self._compiled_subprocess.run(
|
||||
self._inference_state_weakref(),
|
||||
func,
|
||||
args=args,
|
||||
kwargs=kwargs,
|
||||
)
|
||||
# IMO it should be possible to create a hook in pickle.load to
|
||||
# mess with the loaded objects. However it's extremely complicated
|
||||
# to work around this so just do it with this call. ~ dave
|
||||
return self._convert_access_handles(result)
|
||||
|
||||
return wrapper
|
||||
|
||||
def _convert_access_handles(self, obj):
|
||||
if isinstance(obj, SignatureParam):
|
||||
return SignatureParam(*self._convert_access_handles(tuple(obj)))
|
||||
elif isinstance(obj, tuple):
|
||||
return tuple(self._convert_access_handles(o) for o in obj)
|
||||
elif isinstance(obj, list):
|
||||
return [self._convert_access_handles(o) for o in obj]
|
||||
elif isinstance(obj, AccessHandle):
|
||||
try:
|
||||
# Rewrite the access handle to one we're already having.
|
||||
obj = self.get_access_handle(obj.id)
|
||||
except KeyError:
|
||||
obj.add_subprocess(self)
|
||||
self.set_access_handle(obj)
|
||||
elif isinstance(obj, AccessPath):
|
||||
return AccessPath(self._convert_access_handles(obj.accesses))
|
||||
return obj
|
||||
|
||||
def __del__(self):
|
||||
if self._used and not self._compiled_subprocess.is_crashed:
|
||||
self._compiled_subprocess.delete_inference_state(self._inference_state_id)
|
||||
|
||||
|
||||
class CompiledSubprocess:
|
||||
is_crashed = False
|
||||
|
||||
def __init__(self, executable, env_vars=None):
|
||||
self._executable = executable
|
||||
self._env_vars = env_vars
|
||||
self._inference_state_deletion_queue = collections.deque()
|
||||
self._cleanup_callable = lambda: None
|
||||
|
||||
def __repr__(self):
|
||||
pid = os.getpid()
|
||||
return '<%s _executable=%r, is_crashed=%r, pid=%r>' % (
|
||||
self.__class__.__name__,
|
||||
self._executable,
|
||||
self.is_crashed,
|
||||
pid,
|
||||
)
|
||||
|
||||
@memoize_method
|
||||
def _get_process(self):
|
||||
debug.dbg('Start environment subprocess %s', self._executable)
|
||||
parso_path = sys.modules['parso'].__file__
|
||||
args = (
|
||||
self._executable,
|
||||
_MAIN_PATH,
|
||||
os.path.dirname(os.path.dirname(parso_path)),
|
||||
'.'.join(str(x) for x in sys.version_info[:3]),
|
||||
)
|
||||
process = _GeneralizedPopen(
|
||||
args,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
env=self._env_vars
|
||||
)
|
||||
self._stderr_queue = queue.Queue()
|
||||
self._stderr_thread = t = Thread(
|
||||
target=_enqueue_output,
|
||||
args=(process.stderr, self._stderr_queue)
|
||||
)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
# Ensure the subprocess is properly cleaned up when the object
|
||||
# is garbage collected.
|
||||
self._cleanup_callable = weakref.finalize(self,
|
||||
_cleanup_process,
|
||||
process,
|
||||
t)
|
||||
return process
|
||||
|
||||
def run(self, inference_state, function, args=(), kwargs={}):
|
||||
# Delete old inference_states.
|
||||
while True:
|
||||
try:
|
||||
inference_state_id = self._inference_state_deletion_queue.pop()
|
||||
except IndexError:
|
||||
break
|
||||
else:
|
||||
self._send(inference_state_id, None)
|
||||
|
||||
assert callable(function)
|
||||
return self._send(id(inference_state), function, args, kwargs)
|
||||
|
||||
def get_sys_path(self):
|
||||
return self._send(None, functions.get_sys_path, (), {})
|
||||
|
||||
def _kill(self):
|
||||
self.is_crashed = True
|
||||
self._cleanup_callable()
|
||||
|
||||
def _send(self, inference_state_id, function, args=(), kwargs={}):
|
||||
if self.is_crashed:
|
||||
raise InternalError("The subprocess %s has crashed." % self._executable)
|
||||
|
||||
data = inference_state_id, function, args, kwargs
|
||||
try:
|
||||
pickle_dump(data, self._get_process().stdin, PICKLE_PROTOCOL)
|
||||
except BrokenPipeError:
|
||||
self._kill()
|
||||
raise InternalError("The subprocess %s was killed. Maybe out of memory?"
|
||||
% self._executable)
|
||||
|
||||
try:
|
||||
is_exception, traceback, result = pickle_load(self._get_process().stdout)
|
||||
except EOFError as eof_error:
|
||||
try:
|
||||
stderr = self._get_process().stderr.read().decode('utf-8', 'replace')
|
||||
except Exception as exc:
|
||||
stderr = '<empty/not available (%r)>' % exc
|
||||
self._kill()
|
||||
_add_stderr_to_debug(self._stderr_queue)
|
||||
raise InternalError(
|
||||
"The subprocess %s has crashed (%r, stderr=%s)." % (
|
||||
self._executable,
|
||||
eof_error,
|
||||
stderr,
|
||||
))
|
||||
|
||||
_add_stderr_to_debug(self._stderr_queue)
|
||||
|
||||
if is_exception:
|
||||
# Replace the attribute error message with a the traceback. It's
|
||||
# way more informative.
|
||||
result.args = (traceback,)
|
||||
raise result
|
||||
return result
|
||||
|
||||
def delete_inference_state(self, inference_state_id):
|
||||
"""
|
||||
Currently we are not deleting inference_state instantly. They only get
|
||||
deleted once the subprocess is used again. It would probably a better
|
||||
solution to move all of this into a thread. However, the memory usage
|
||||
of a single inference_state shouldn't be that high.
|
||||
"""
|
||||
# With an argument - the inference_state gets deleted.
|
||||
self._inference_state_deletion_queue.append(inference_state_id)
|
||||
|
||||
|
||||
class Listener:
|
||||
def __init__(self):
|
||||
self._inference_states = {}
|
||||
# TODO refactor so we don't need to process anymore just handle
|
||||
# controlling.
|
||||
self._process = _InferenceStateProcess(Listener)
|
||||
|
||||
def _get_inference_state(self, function, inference_state_id):
|
||||
from jedi.inference import InferenceState
|
||||
|
||||
try:
|
||||
inference_state = self._inference_states[inference_state_id]
|
||||
except KeyError:
|
||||
from jedi import InterpreterEnvironment
|
||||
inference_state = InferenceState(
|
||||
# The project is not actually needed. Nothing should need to
|
||||
# access it.
|
||||
project=None,
|
||||
environment=InterpreterEnvironment()
|
||||
)
|
||||
self._inference_states[inference_state_id] = inference_state
|
||||
return inference_state
|
||||
|
||||
def _run(self, inference_state_id, function, args, kwargs):
|
||||
if inference_state_id is None:
|
||||
return function(*args, **kwargs)
|
||||
elif function is None:
|
||||
del self._inference_states[inference_state_id]
|
||||
else:
|
||||
inference_state = self._get_inference_state(function, inference_state_id)
|
||||
|
||||
# Exchange all handles
|
||||
args = list(args)
|
||||
for i, arg in enumerate(args):
|
||||
if isinstance(arg, AccessHandle):
|
||||
args[i] = inference_state.compiled_subprocess.get_access_handle(arg.id)
|
||||
for key, value in kwargs.items():
|
||||
if isinstance(value, AccessHandle):
|
||||
kwargs[key] = inference_state.compiled_subprocess.get_access_handle(value.id)
|
||||
|
||||
return function(inference_state, *args, **kwargs)
|
||||
|
||||
def listen(self):
|
||||
stdout = sys.stdout
|
||||
# Mute stdout. Nobody should actually be able to write to it,
|
||||
# because stdout is used for IPC.
|
||||
sys.stdout = open(os.devnull, 'w')
|
||||
stdin = sys.stdin
|
||||
stdout = stdout.buffer
|
||||
stdin = stdin.buffer
|
||||
|
||||
while True:
|
||||
try:
|
||||
payload = pickle_load(stdin)
|
||||
except EOFError:
|
||||
# It looks like the parent process closed.
|
||||
# Don't make a big fuss here and just exit.
|
||||
exit(0)
|
||||
try:
|
||||
result = False, None, self._run(*payload)
|
||||
except Exception as e:
|
||||
result = True, traceback.format_exc(), e
|
||||
|
||||
pickle_dump(result, stdout, PICKLE_PROTOCOL)
|
||||
|
||||
|
||||
class AccessHandle:
|
||||
def __init__(self, subprocess, access, id_):
|
||||
self.access = access
|
||||
self._subprocess = subprocess
|
||||
self.id = id_
|
||||
|
||||
def add_subprocess(self, subprocess):
|
||||
self._subprocess = subprocess
|
||||
|
||||
def __repr__(self):
|
||||
try:
|
||||
detail = self.access
|
||||
except AttributeError:
|
||||
detail = '#' + str(self.id)
|
||||
return '<%s of %s>' % (self.__class__.__name__, detail)
|
||||
|
||||
def __getstate__(self):
|
||||
return self.id
|
||||
|
||||
def __setstate__(self, state):
|
||||
self.id = state
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name in ('id', 'access') or name.startswith('_'):
|
||||
raise AttributeError("Something went wrong with unpickling")
|
||||
|
||||
# print('getattr', name, file=sys.stderr)
|
||||
return partial(self._workaround, name)
|
||||
|
||||
def _workaround(self, name, *args, **kwargs):
|
||||
"""
|
||||
TODO Currently we're passing slice objects around. This should not
|
||||
happen. They are also the only unhashable objects that we're passing
|
||||
around.
|
||||
"""
|
||||
if args and isinstance(args[0], slice):
|
||||
return self._subprocess.get_compiled_method_return(self.id, name, *args, **kwargs)
|
||||
return self._cached_results(name, *args, **kwargs)
|
||||
|
||||
@memoize_method
|
||||
def _cached_results(self, name, *args, **kwargs):
|
||||
return self._subprocess.get_compiled_method_return(self.id, name, *args, **kwargs)
|
@ -0,0 +1,40 @@
|
||||
import os
|
||||
import sys
|
||||
from importlib.abc import MetaPathFinder
|
||||
from importlib.machinery import PathFinder
|
||||
|
||||
# Remove the first entry, because it's simply a directory entry that equals
|
||||
# this directory.
|
||||
del sys.path[0]
|
||||
|
||||
|
||||
def _get_paths():
|
||||
# Get the path to jedi.
|
||||
_d = os.path.dirname
|
||||
_jedi_path = _d(_d(_d(_d(_d(__file__)))))
|
||||
_parso_path = sys.argv[1]
|
||||
# The paths are the directory that jedi and parso lie in.
|
||||
return {'jedi': _jedi_path, 'parso': _parso_path}
|
||||
|
||||
|
||||
class _ExactImporter(MetaPathFinder):
|
||||
def __init__(self, path_dct):
|
||||
self._path_dct = path_dct
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if path is None and fullname in self._path_dct:
|
||||
p = self._path_dct[fullname]
|
||||
loader = PathFinder.find_module(fullname, path=[p])
|
||||
return loader
|
||||
return None
|
||||
|
||||
|
||||
# Try to import jedi/parso.
|
||||
sys.meta_path.insert(0, _ExactImporter(_get_paths()))
|
||||
from jedi.inference.compiled import subprocess # noqa: E402
|
||||
sys.meta_path.pop(0)
|
||||
|
||||
# Retrieve the pickle protocol.
|
||||
host_sys_version = [int(x) for x in sys.argv[2].split('.')]
|
||||
# And finally start the client.
|
||||
subprocess.Listener().listen()
|
@ -0,0 +1,255 @@
|
||||
import sys
|
||||
import os
|
||||
import inspect
|
||||
import importlib
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
from zipfile import ZipFile
|
||||
from zipimport import zipimporter, ZipImportError
|
||||
from importlib.machinery import all_suffixes
|
||||
|
||||
from jedi.inference.compiled import access
|
||||
from jedi import debug
|
||||
from jedi import parser_utils
|
||||
from jedi.file_io import KnownContentFileIO, ZipFileIO
|
||||
|
||||
|
||||
def get_sys_path():
|
||||
return sys.path
|
||||
|
||||
|
||||
def load_module(inference_state, **kwargs):
|
||||
return access.load_module(inference_state, **kwargs)
|
||||
|
||||
|
||||
def get_compiled_method_return(inference_state, id, attribute, *args, **kwargs):
|
||||
handle = inference_state.compiled_subprocess.get_access_handle(id)
|
||||
return getattr(handle.access, attribute)(*args, **kwargs)
|
||||
|
||||
|
||||
def create_simple_object(inference_state, obj):
|
||||
return access.create_access_path(inference_state, obj)
|
||||
|
||||
|
||||
def get_module_info(inference_state, sys_path=None, full_name=None, **kwargs):
|
||||
"""
|
||||
Returns Tuple[Union[NamespaceInfo, FileIO, None], Optional[bool]]
|
||||
"""
|
||||
if sys_path is not None:
|
||||
sys.path, temp = sys_path, sys.path
|
||||
try:
|
||||
return _find_module(full_name=full_name, **kwargs)
|
||||
except ImportError:
|
||||
return None, None
|
||||
finally:
|
||||
if sys_path is not None:
|
||||
sys.path = temp
|
||||
|
||||
|
||||
def get_builtin_module_names(inference_state):
|
||||
return sys.builtin_module_names
|
||||
|
||||
|
||||
def _test_raise_error(inference_state, exception_type):
|
||||
"""
|
||||
Raise an error to simulate certain problems for unit tests.
|
||||
"""
|
||||
raise exception_type
|
||||
|
||||
|
||||
def _test_print(inference_state, stderr=None, stdout=None):
|
||||
"""
|
||||
Force some prints in the subprocesses. This exists for unit tests.
|
||||
"""
|
||||
if stderr is not None:
|
||||
print(stderr, file=sys.stderr)
|
||||
sys.stderr.flush()
|
||||
if stdout is not None:
|
||||
print(stdout)
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
def _get_init_path(directory_path):
|
||||
"""
|
||||
The __init__ file can be searched in a directory. If found return it, else
|
||||
None.
|
||||
"""
|
||||
for suffix in all_suffixes():
|
||||
path = os.path.join(directory_path, '__init__' + suffix)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
return None
|
||||
|
||||
|
||||
def safe_literal_eval(inference_state, value):
|
||||
return parser_utils.safe_literal_eval(value)
|
||||
|
||||
|
||||
def iter_module_names(*args, **kwargs):
|
||||
return list(_iter_module_names(*args, **kwargs))
|
||||
|
||||
|
||||
def _iter_module_names(inference_state, paths):
|
||||
# Python modules/packages
|
||||
for path in paths:
|
||||
try:
|
||||
dir_entries = ((entry.name, entry.is_dir()) for entry in os.scandir(path))
|
||||
except OSError:
|
||||
try:
|
||||
zip_import_info = zipimporter(path)
|
||||
# Unfortunately, there is no public way to access zipimporter's
|
||||
# private _files member. We therefore have to use a
|
||||
# custom function to iterate over the files.
|
||||
dir_entries = _zip_list_subdirectory(
|
||||
zip_import_info.archive, zip_import_info.prefix)
|
||||
except ZipImportError:
|
||||
# The file might not exist or reading it might lead to an error.
|
||||
debug.warning("Not possible to list directory: %s", path)
|
||||
continue
|
||||
for name, is_dir in dir_entries:
|
||||
# First Namespaces then modules/stubs
|
||||
if is_dir:
|
||||
# pycache is obviously not an interesting namespace. Also the
|
||||
# name must be a valid identifier.
|
||||
if name != '__pycache__' and name.isidentifier():
|
||||
yield name
|
||||
else:
|
||||
if name.endswith('.pyi'): # Stub files
|
||||
modname = name[:-4]
|
||||
else:
|
||||
modname = inspect.getmodulename(name)
|
||||
|
||||
if modname and '.' not in modname:
|
||||
if modname != '__init__':
|
||||
yield modname
|
||||
|
||||
|
||||
def _find_module(string, path=None, full_name=None, is_global_search=True):
|
||||
"""
|
||||
Provides information about a module.
|
||||
|
||||
This function isolates the differences in importing libraries introduced with
|
||||
python 3.3 on; it gets a module name and optionally a path. It will return a
|
||||
tuple containin an open file for the module (if not builtin), the filename
|
||||
or the name of the module if it is a builtin one and a boolean indicating
|
||||
if the module is contained in a package.
|
||||
"""
|
||||
spec = None
|
||||
loader = None
|
||||
|
||||
for finder in sys.meta_path:
|
||||
if is_global_search and finder != importlib.machinery.PathFinder:
|
||||
p = None
|
||||
else:
|
||||
p = path
|
||||
try:
|
||||
find_spec = finder.find_spec
|
||||
except AttributeError:
|
||||
# These are old-school clases that still have a different API, just
|
||||
# ignore those.
|
||||
continue
|
||||
|
||||
spec = find_spec(string, p)
|
||||
if spec is not None:
|
||||
loader = spec.loader
|
||||
if loader is None and not spec.has_location:
|
||||
# This is a namespace package.
|
||||
full_name = string if not path else full_name
|
||||
implicit_ns_info = ImplicitNSInfo(full_name, spec.submodule_search_locations._path)
|
||||
return implicit_ns_info, True
|
||||
break
|
||||
|
||||
return _find_module_py33(string, path, loader)
|
||||
|
||||
|
||||
def _find_module_py33(string, path=None, loader=None, full_name=None, is_global_search=True):
|
||||
loader = loader or importlib.machinery.PathFinder.find_module(string, path)
|
||||
|
||||
if loader is None and path is None: # Fallback to find builtins
|
||||
try:
|
||||
with warnings.catch_warnings(record=True):
|
||||
# Mute "DeprecationWarning: Use importlib.util.find_spec()
|
||||
# instead." While we should replace that in the future, it's
|
||||
# probably good to wait until we deprecate Python 3.3, since
|
||||
# it was added in Python 3.4 and find_loader hasn't been
|
||||
# removed in 3.6.
|
||||
loader = importlib.find_loader(string)
|
||||
except ValueError as e:
|
||||
# See #491. Importlib might raise a ValueError, to avoid this, we
|
||||
# just raise an ImportError to fix the issue.
|
||||
raise ImportError("Originally " + repr(e))
|
||||
|
||||
if loader is None:
|
||||
raise ImportError("Couldn't find a loader for {}".format(string))
|
||||
|
||||
return _from_loader(loader, string)
|
||||
|
||||
|
||||
def _from_loader(loader, string):
|
||||
try:
|
||||
is_package_method = loader.is_package
|
||||
except AttributeError:
|
||||
is_package = False
|
||||
else:
|
||||
is_package = is_package_method(string)
|
||||
try:
|
||||
get_filename = loader.get_filename
|
||||
except AttributeError:
|
||||
return None, is_package
|
||||
else:
|
||||
module_path = get_filename(string)
|
||||
|
||||
# To avoid unicode and read bytes, "overwrite" loader.get_source if
|
||||
# possible.
|
||||
try:
|
||||
f = type(loader).get_source
|
||||
except AttributeError:
|
||||
raise ImportError("get_source was not defined on loader")
|
||||
|
||||
if f is not importlib.machinery.SourceFileLoader.get_source:
|
||||
# Unfortunately we are reading unicode here, not bytes.
|
||||
# It seems hard to get bytes, because the zip importer
|
||||
# logic just unpacks the zip file and returns a file descriptor
|
||||
# that we cannot as easily access. Therefore we just read it as
|
||||
# a string in the cases where get_source was overwritten.
|
||||
code = loader.get_source(string)
|
||||
else:
|
||||
code = _get_source(loader, string)
|
||||
|
||||
if code is None:
|
||||
return None, is_package
|
||||
if isinstance(loader, zipimporter):
|
||||
return ZipFileIO(module_path, code, Path(loader.archive)), is_package
|
||||
|
||||
return KnownContentFileIO(module_path, code), is_package
|
||||
|
||||
|
||||
def _get_source(loader, fullname):
|
||||
"""
|
||||
This method is here as a replacement for SourceLoader.get_source. That
|
||||
method returns unicode, but we prefer bytes.
|
||||
"""
|
||||
path = loader.get_filename(fullname)
|
||||
try:
|
||||
return loader.get_data(path)
|
||||
except OSError:
|
||||
raise ImportError('source not available through get_data()',
|
||||
name=fullname)
|
||||
|
||||
|
||||
def _zip_list_subdirectory(zip_path, zip_subdir_path):
|
||||
zip_file = ZipFile(zip_path)
|
||||
zip_subdir_path = Path(zip_subdir_path)
|
||||
zip_content_file_paths = zip_file.namelist()
|
||||
for raw_file_name in zip_content_file_paths:
|
||||
file_path = Path(raw_file_name)
|
||||
if file_path.parent == zip_subdir_path:
|
||||
file_path = file_path.relative_to(zip_subdir_path)
|
||||
yield file_path.name, raw_file_name.endswith("/")
|
||||
|
||||
|
||||
class ImplicitNSInfo:
|
||||
"""Stores information returned from an implicit namespace spec"""
|
||||
def __init__(self, name, paths):
|
||||
self.name = name
|
||||
self.paths = paths
|
612
.venv/Lib/site-packages/jedi/inference/compiled/value.py
Normal file
612
.venv/Lib/site-packages/jedi/inference/compiled/value.py
Normal file
@ -0,0 +1,612 @@
|
||||
"""
|
||||
Imitate the parser representation.
|
||||
"""
|
||||
import re
|
||||
from functools import partial
|
||||
from inspect import Parameter
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from jedi import debug
|
||||
from jedi.inference.utils import to_list
|
||||
from jedi.cache import memoize_method
|
||||
from jedi.inference.filters import AbstractFilter
|
||||
from jedi.inference.names import AbstractNameDefinition, ValueNameMixin, \
|
||||
ParamNameInterface
|
||||
from jedi.inference.base_value import Value, ValueSet, NO_VALUES
|
||||
from jedi.inference.lazy_value import LazyKnownValue
|
||||
from jedi.inference.compiled.access import _sentinel
|
||||
from jedi.inference.cache import inference_state_function_cache
|
||||
from jedi.inference.helpers import reraise_getitem_errors
|
||||
from jedi.inference.signature import BuiltinSignature
|
||||
from jedi.inference.context import CompiledContext, CompiledModuleContext
|
||||
|
||||
|
||||
class CheckAttribute:
|
||||
"""Raises :exc:`AttributeError` if the attribute X is not available."""
|
||||
def __init__(self, check_name=None):
|
||||
# Remove the py in front of e.g. py__call__.
|
||||
self.check_name = check_name
|
||||
|
||||
def __call__(self, func):
|
||||
self.func = func
|
||||
if self.check_name is None:
|
||||
self.check_name = func.__name__[2:]
|
||||
return self
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
# This might raise an AttributeError. That's wanted.
|
||||
instance.access_handle.getattr_paths(self.check_name)
|
||||
return partial(self.func, instance)
|
||||
|
||||
|
||||
class CompiledValue(Value):
|
||||
def __init__(self, inference_state, access_handle, parent_context=None):
|
||||
super().__init__(inference_state, parent_context)
|
||||
self.access_handle = access_handle
|
||||
|
||||
def py__call__(self, arguments):
|
||||
return_annotation = self.access_handle.get_return_annotation()
|
||||
if return_annotation is not None:
|
||||
# TODO the return annotation may also be a string.
|
||||
return create_from_access_path(
|
||||
self.inference_state,
|
||||
return_annotation
|
||||
).execute_annotation()
|
||||
|
||||
try:
|
||||
self.access_handle.getattr_paths('__call__')
|
||||
except AttributeError:
|
||||
return super().py__call__(arguments)
|
||||
else:
|
||||
if self.access_handle.is_class():
|
||||
from jedi.inference.value import CompiledInstance
|
||||
return ValueSet([
|
||||
CompiledInstance(self.inference_state, self.parent_context, self, arguments)
|
||||
])
|
||||
else:
|
||||
return ValueSet(self._execute_function(arguments))
|
||||
|
||||
@CheckAttribute()
|
||||
def py__class__(self):
|
||||
return create_from_access_path(self.inference_state, self.access_handle.py__class__())
|
||||
|
||||
@CheckAttribute()
|
||||
def py__mro__(self):
|
||||
return (self,) + tuple(
|
||||
create_from_access_path(self.inference_state, access)
|
||||
for access in self.access_handle.py__mro__accesses()
|
||||
)
|
||||
|
||||
@CheckAttribute()
|
||||
def py__bases__(self):
|
||||
return tuple(
|
||||
create_from_access_path(self.inference_state, access)
|
||||
for access in self.access_handle.py__bases__()
|
||||
)
|
||||
|
||||
def get_qualified_names(self):
|
||||
return self.access_handle.get_qualified_names()
|
||||
|
||||
def py__bool__(self):
|
||||
return self.access_handle.py__bool__()
|
||||
|
||||
def is_class(self):
|
||||
return self.access_handle.is_class()
|
||||
|
||||
def is_function(self):
|
||||
return self.access_handle.is_function()
|
||||
|
||||
def is_module(self):
|
||||
return self.access_handle.is_module()
|
||||
|
||||
def is_compiled(self):
|
||||
return True
|
||||
|
||||
def is_stub(self):
|
||||
return False
|
||||
|
||||
def is_instance(self):
|
||||
return self.access_handle.is_instance()
|
||||
|
||||
def py__doc__(self):
|
||||
return self.access_handle.py__doc__()
|
||||
|
||||
@to_list
|
||||
def get_param_names(self):
|
||||
try:
|
||||
signature_params = self.access_handle.get_signature_params()
|
||||
except ValueError: # Has no signature
|
||||
params_str, ret = self._parse_function_doc()
|
||||
if not params_str:
|
||||
tokens = []
|
||||
else:
|
||||
tokens = params_str.split(',')
|
||||
if self.access_handle.ismethoddescriptor():
|
||||
tokens.insert(0, 'self')
|
||||
for p in tokens:
|
||||
name, _, default = p.strip().partition('=')
|
||||
yield UnresolvableParamName(self, name, default)
|
||||
else:
|
||||
for signature_param in signature_params:
|
||||
yield SignatureParamName(self, signature_param)
|
||||
|
||||
def get_signatures(self):
|
||||
_, return_string = self._parse_function_doc()
|
||||
return [BuiltinSignature(self, return_string)]
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.access_handle.get_repr())
|
||||
|
||||
@memoize_method
|
||||
def _parse_function_doc(self):
|
||||
doc = self.py__doc__()
|
||||
if doc is None:
|
||||
return '', ''
|
||||
|
||||
return _parse_function_doc(doc)
|
||||
|
||||
@property
|
||||
def api_type(self):
|
||||
return self.access_handle.get_api_type()
|
||||
|
||||
def get_filters(self, is_instance=False, origin_scope=None):
|
||||
yield self._ensure_one_filter(is_instance)
|
||||
|
||||
@memoize_method
|
||||
def _ensure_one_filter(self, is_instance):
|
||||
return CompiledValueFilter(self.inference_state, self, is_instance)
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
with reraise_getitem_errors(IndexError, KeyError, TypeError):
|
||||
try:
|
||||
access = self.access_handle.py__simple_getitem__(index)
|
||||
except AttributeError:
|
||||
return super().py__simple_getitem__(index)
|
||||
if access is None:
|
||||
return super().py__simple_getitem__(index)
|
||||
|
||||
return ValueSet([create_from_access_path(self.inference_state, access)])
|
||||
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
all_access_paths = self.access_handle.py__getitem__all_values()
|
||||
if all_access_paths is None:
|
||||
# This means basically that no __getitem__ has been defined on this
|
||||
# object.
|
||||
return super().py__getitem__(index_value_set, contextualized_node)
|
||||
return ValueSet(
|
||||
create_from_access_path(self.inference_state, access)
|
||||
for access in all_access_paths
|
||||
)
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
if not self.access_handle.has_iter():
|
||||
yield from super().py__iter__(contextualized_node)
|
||||
|
||||
access_path_list = self.access_handle.py__iter__list()
|
||||
if access_path_list is None:
|
||||
# There is no __iter__ method on this object.
|
||||
return
|
||||
|
||||
for access in access_path_list:
|
||||
yield LazyKnownValue(create_from_access_path(self.inference_state, access))
|
||||
|
||||
def py__name__(self):
|
||||
return self.access_handle.py__name__()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
name = self.py__name__()
|
||||
if name is None:
|
||||
name = self.access_handle.get_repr()
|
||||
return CompiledValueName(self, name)
|
||||
|
||||
def _execute_function(self, params):
|
||||
from jedi.inference import docstrings
|
||||
from jedi.inference.compiled import builtin_from_name
|
||||
if self.api_type != 'function':
|
||||
return
|
||||
|
||||
for name in self._parse_function_doc()[1].split():
|
||||
try:
|
||||
# TODO wtf is this? this is exactly the same as the thing
|
||||
# below. It uses getattr as well.
|
||||
self.inference_state.builtins_module.access_handle.getattr_paths(name)
|
||||
except AttributeError:
|
||||
continue
|
||||
else:
|
||||
bltn_obj = builtin_from_name(self.inference_state, name)
|
||||
yield from self.inference_state.execute(bltn_obj, params)
|
||||
yield from docstrings.infer_return_types(self)
|
||||
|
||||
def get_safe_value(self, default=_sentinel):
|
||||
try:
|
||||
return self.access_handle.get_safe_value()
|
||||
except ValueError:
|
||||
if default == _sentinel:
|
||||
raise
|
||||
return default
|
||||
|
||||
def execute_operation(self, other, operator):
|
||||
try:
|
||||
return ValueSet([create_from_access_path(
|
||||
self.inference_state,
|
||||
self.access_handle.execute_operation(other.access_handle, operator)
|
||||
)])
|
||||
except TypeError:
|
||||
return NO_VALUES
|
||||
|
||||
def execute_annotation(self):
|
||||
if self.access_handle.get_repr() == 'None':
|
||||
# None as an annotation doesn't need to be executed.
|
||||
return ValueSet([self])
|
||||
|
||||
name, args = self.access_handle.get_annotation_name_and_args()
|
||||
arguments = [
|
||||
ValueSet([create_from_access_path(self.inference_state, path)])
|
||||
for path in args
|
||||
]
|
||||
if name == 'Union':
|
||||
return ValueSet.from_sets(arg.execute_annotation() for arg in arguments)
|
||||
elif name:
|
||||
# While with_generics only exists on very specific objects, we
|
||||
# should probably be fine, because we control all the typing
|
||||
# objects.
|
||||
return ValueSet([
|
||||
v.with_generics(arguments)
|
||||
for v in self.inference_state.typing_module.py__getattribute__(name)
|
||||
]).execute_annotation()
|
||||
return super().execute_annotation()
|
||||
|
||||
def negate(self):
|
||||
return create_from_access_path(self.inference_state, self.access_handle.negate())
|
||||
|
||||
def get_metaclasses(self):
|
||||
return NO_VALUES
|
||||
|
||||
def _as_context(self):
|
||||
return CompiledContext(self)
|
||||
|
||||
@property
|
||||
def array_type(self):
|
||||
return self.access_handle.get_array_type()
|
||||
|
||||
def get_key_values(self):
|
||||
return [
|
||||
create_from_access_path(self.inference_state, k)
|
||||
for k in self.access_handle.get_key_paths()
|
||||
]
|
||||
|
||||
def get_type_hint(self, add_class_info=True):
|
||||
if self.access_handle.get_repr() in ('None', "<class 'NoneType'>"):
|
||||
return 'None'
|
||||
return None
|
||||
|
||||
|
||||
class CompiledModule(CompiledValue):
|
||||
file_io = None # For modules
|
||||
|
||||
def _as_context(self):
|
||||
return CompiledModuleContext(self)
|
||||
|
||||
def py__path__(self):
|
||||
return self.access_handle.py__path__()
|
||||
|
||||
def is_package(self):
|
||||
return self.py__path__() is not None
|
||||
|
||||
@property
|
||||
def string_names(self):
|
||||
# For modules
|
||||
name = self.py__name__()
|
||||
if name is None:
|
||||
return ()
|
||||
return tuple(name.split('.'))
|
||||
|
||||
def py__file__(self) -> Optional[Path]:
|
||||
return self.access_handle.py__file__() # type: ignore[no-any-return]
|
||||
|
||||
|
||||
class CompiledName(AbstractNameDefinition):
|
||||
def __init__(self, inference_state, parent_value, name):
|
||||
self._inference_state = inference_state
|
||||
self.parent_context = parent_value.as_context()
|
||||
self._parent_value = parent_value
|
||||
self.string_name = name
|
||||
|
||||
def py__doc__(self):
|
||||
return self.infer_compiled_value().py__doc__()
|
||||
|
||||
def _get_qualified_names(self):
|
||||
parent_qualified_names = self.parent_context.get_qualified_names()
|
||||
if parent_qualified_names is None:
|
||||
return None
|
||||
return parent_qualified_names + (self.string_name,)
|
||||
|
||||
def get_defining_qualified_value(self):
|
||||
context = self.parent_context
|
||||
if context.is_module() or context.is_class():
|
||||
return self.parent_context.get_value() # Might be None
|
||||
|
||||
return None
|
||||
|
||||
def __repr__(self):
|
||||
try:
|
||||
name = self.parent_context.name # __name__ is not defined all the time
|
||||
except AttributeError:
|
||||
name = None
|
||||
return '<%s: (%s).%s>' % (self.__class__.__name__, name, self.string_name)
|
||||
|
||||
@property
|
||||
def api_type(self):
|
||||
return self.infer_compiled_value().api_type
|
||||
|
||||
def infer(self):
|
||||
return ValueSet([self.infer_compiled_value()])
|
||||
|
||||
@memoize_method
|
||||
def infer_compiled_value(self):
|
||||
return create_from_name(self._inference_state, self._parent_value, self.string_name)
|
||||
|
||||
|
||||
class SignatureParamName(ParamNameInterface, AbstractNameDefinition):
|
||||
def __init__(self, compiled_value, signature_param):
|
||||
self.parent_context = compiled_value.parent_context
|
||||
self._signature_param = signature_param
|
||||
|
||||
@property
|
||||
def string_name(self):
|
||||
return self._signature_param.name
|
||||
|
||||
def to_string(self):
|
||||
s = self._kind_string() + self.string_name
|
||||
if self._signature_param.has_annotation:
|
||||
s += ': ' + self._signature_param.annotation_string
|
||||
if self._signature_param.has_default:
|
||||
s += '=' + self._signature_param.default_string
|
||||
return s
|
||||
|
||||
def get_kind(self):
|
||||
return getattr(Parameter, self._signature_param.kind_name)
|
||||
|
||||
def infer(self):
|
||||
p = self._signature_param
|
||||
inference_state = self.parent_context.inference_state
|
||||
values = NO_VALUES
|
||||
if p.has_default:
|
||||
values = ValueSet([create_from_access_path(inference_state, p.default)])
|
||||
if p.has_annotation:
|
||||
annotation = create_from_access_path(inference_state, p.annotation)
|
||||
values |= annotation.execute_with_values()
|
||||
return values
|
||||
|
||||
|
||||
class UnresolvableParamName(ParamNameInterface, AbstractNameDefinition):
|
||||
def __init__(self, compiled_value, name, default):
|
||||
self.parent_context = compiled_value.parent_context
|
||||
self.string_name = name
|
||||
self._default = default
|
||||
|
||||
def get_kind(self):
|
||||
return Parameter.POSITIONAL_ONLY
|
||||
|
||||
def to_string(self):
|
||||
string = self.string_name
|
||||
if self._default:
|
||||
string += '=' + self._default
|
||||
return string
|
||||
|
||||
def infer(self):
|
||||
return NO_VALUES
|
||||
|
||||
|
||||
class CompiledValueName(ValueNameMixin, AbstractNameDefinition):
|
||||
def __init__(self, value, name):
|
||||
self.string_name = name
|
||||
self._value = value
|
||||
self.parent_context = value.parent_context
|
||||
|
||||
|
||||
class EmptyCompiledName(AbstractNameDefinition):
|
||||
"""
|
||||
Accessing some names will raise an exception. To avoid not having any
|
||||
completions, just give Jedi the option to return this object. It infers to
|
||||
nothing.
|
||||
"""
|
||||
def __init__(self, inference_state, name):
|
||||
self.parent_context = inference_state.builtins_module
|
||||
self.string_name = name
|
||||
|
||||
def infer(self):
|
||||
return NO_VALUES
|
||||
|
||||
|
||||
class CompiledValueFilter(AbstractFilter):
|
||||
def __init__(self, inference_state, compiled_value, is_instance=False):
|
||||
self._inference_state = inference_state
|
||||
self.compiled_value = compiled_value
|
||||
self.is_instance = is_instance
|
||||
|
||||
def get(self, name):
|
||||
access_handle = self.compiled_value.access_handle
|
||||
return self._get(
|
||||
name,
|
||||
lambda name, safe: access_handle.is_allowed_getattr(name, safe=safe),
|
||||
lambda name: name in access_handle.dir(),
|
||||
check_has_attribute=True
|
||||
)
|
||||
|
||||
def _get(self, name, allowed_getattr_callback, in_dir_callback, check_has_attribute=False):
|
||||
"""
|
||||
To remove quite a few access calls we introduced the callback here.
|
||||
"""
|
||||
if self._inference_state.allow_descriptor_getattr:
|
||||
pass
|
||||
|
||||
has_attribute, is_descriptor = allowed_getattr_callback(
|
||||
name,
|
||||
safe=not self._inference_state.allow_descriptor_getattr
|
||||
)
|
||||
if check_has_attribute and not has_attribute:
|
||||
return []
|
||||
|
||||
if (is_descriptor or not has_attribute) \
|
||||
and not self._inference_state.allow_descriptor_getattr:
|
||||
return [self._get_cached_name(name, is_empty=True)]
|
||||
|
||||
if self.is_instance and not in_dir_callback(name):
|
||||
return []
|
||||
return [self._get_cached_name(name)]
|
||||
|
||||
@memoize_method
|
||||
def _get_cached_name(self, name, is_empty=False):
|
||||
if is_empty:
|
||||
return EmptyCompiledName(self._inference_state, name)
|
||||
else:
|
||||
return self._create_name(name)
|
||||
|
||||
def values(self):
|
||||
from jedi.inference.compiled import builtin_from_name
|
||||
names = []
|
||||
needs_type_completions, dir_infos = self.compiled_value.access_handle.get_dir_infos()
|
||||
# We could use `safe=False` here as well, especially as a parameter to
|
||||
# get_dir_infos. But this would lead to a lot of property executions
|
||||
# that are probably not wanted. The drawback for this is that we
|
||||
# have a different name for `get` and `values`. For `get` we always
|
||||
# execute.
|
||||
for name in dir_infos:
|
||||
names += self._get(
|
||||
name,
|
||||
lambda name, safe: dir_infos[name],
|
||||
lambda name: name in dir_infos,
|
||||
)
|
||||
|
||||
# ``dir`` doesn't include the type names.
|
||||
if not self.is_instance and needs_type_completions:
|
||||
for filter in builtin_from_name(self._inference_state, 'type').get_filters():
|
||||
names += filter.values()
|
||||
return names
|
||||
|
||||
def _create_name(self, name):
|
||||
return CompiledName(
|
||||
self._inference_state,
|
||||
self.compiled_value,
|
||||
name
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s: %s>" % (self.__class__.__name__, self.compiled_value)
|
||||
|
||||
|
||||
docstr_defaults = {
|
||||
'floating point number': 'float',
|
||||
'character': 'str',
|
||||
'integer': 'int',
|
||||
'dictionary': 'dict',
|
||||
'string': 'str',
|
||||
}
|
||||
|
||||
|
||||
def _parse_function_doc(doc):
|
||||
"""
|
||||
Takes a function and returns the params and return value as a tuple.
|
||||
This is nothing more than a docstring parser.
|
||||
|
||||
TODO docstrings like utime(path, (atime, mtime)) and a(b [, b]) -> None
|
||||
TODO docstrings like 'tuple of integers'
|
||||
"""
|
||||
# parse round parentheses: def func(a, (b,c))
|
||||
try:
|
||||
count = 0
|
||||
start = doc.index('(')
|
||||
for i, s in enumerate(doc[start:]):
|
||||
if s == '(':
|
||||
count += 1
|
||||
elif s == ')':
|
||||
count -= 1
|
||||
if count == 0:
|
||||
end = start + i
|
||||
break
|
||||
param_str = doc[start + 1:end]
|
||||
except (ValueError, UnboundLocalError):
|
||||
# ValueError for doc.index
|
||||
# UnboundLocalError for undefined end in last line
|
||||
debug.dbg('no brackets found - no param')
|
||||
end = 0
|
||||
param_str = ''
|
||||
else:
|
||||
# remove square brackets, that show an optional param ( = None)
|
||||
def change_options(m):
|
||||
args = m.group(1).split(',')
|
||||
for i, a in enumerate(args):
|
||||
if a and '=' not in a:
|
||||
args[i] += '=None'
|
||||
return ','.join(args)
|
||||
|
||||
while True:
|
||||
param_str, changes = re.subn(r' ?\[([^\[\]]+)\]',
|
||||
change_options, param_str)
|
||||
if changes == 0:
|
||||
break
|
||||
param_str = param_str.replace('-', '_') # see: isinstance.__doc__
|
||||
|
||||
# parse return value
|
||||
r = re.search('-[>-]* ', doc[end:end + 7])
|
||||
if r is None:
|
||||
ret = ''
|
||||
else:
|
||||
index = end + r.end()
|
||||
# get result type, which can contain newlines
|
||||
pattern = re.compile(r'(,\n|[^\n-])+')
|
||||
ret_str = pattern.match(doc, index).group(0).strip()
|
||||
# New object -> object()
|
||||
ret_str = re.sub(r'[nN]ew (.*)', r'\1()', ret_str)
|
||||
|
||||
ret = docstr_defaults.get(ret_str, ret_str)
|
||||
|
||||
return param_str, ret
|
||||
|
||||
|
||||
def create_from_name(inference_state, compiled_value, name):
|
||||
access_paths = compiled_value.access_handle.getattr_paths(name, default=None)
|
||||
|
||||
value = None
|
||||
for access_path in access_paths:
|
||||
value = create_cached_compiled_value(
|
||||
inference_state,
|
||||
access_path,
|
||||
parent_context=None if value is None else value.as_context(),
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def _normalize_create_args(func):
|
||||
"""The cache doesn't care about keyword vs. normal args."""
|
||||
def wrapper(inference_state, obj, parent_context=None):
|
||||
return func(inference_state, obj, parent_context)
|
||||
return wrapper
|
||||
|
||||
|
||||
def create_from_access_path(inference_state, access_path):
|
||||
value = None
|
||||
for name, access in access_path.accesses:
|
||||
value = create_cached_compiled_value(
|
||||
inference_state,
|
||||
access,
|
||||
parent_context=None if value is None else value.as_context()
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
@_normalize_create_args
|
||||
@inference_state_function_cache()
|
||||
def create_cached_compiled_value(inference_state, access_handle, parent_context):
|
||||
assert not isinstance(parent_context, CompiledValue)
|
||||
if parent_context is None:
|
||||
cls = CompiledModule
|
||||
else:
|
||||
cls = CompiledValue
|
||||
return cls(inference_state, access_handle, parent_context)
|
Reference in New Issue
Block a user