mirror of
https://github.com/aykhans/AzSuicideDataVisualization.git
synced 2025-07-02 22:30:48 +00:00
first commit
This commit is contained in:
9
.venv/Lib/site-packages/jedi/inference/value/__init__.py
Normal file
9
.venv/Lib/site-packages/jedi/inference/value/__init__.py
Normal file
@ -0,0 +1,9 @@
|
||||
# Re-export symbols for wider use. We configure mypy and flake8 to be aware that
|
||||
# this file does this.
|
||||
|
||||
from jedi.inference.value.module import ModuleValue
|
||||
from jedi.inference.value.klass import ClassValue
|
||||
from jedi.inference.value.function import FunctionValue, \
|
||||
MethodValue
|
||||
from jedi.inference.value.instance import AnonymousInstance, BoundMethod, \
|
||||
CompiledInstance, AbstractInstanceValue, TreeInstance
|
34
.venv/Lib/site-packages/jedi/inference/value/decorator.py
Normal file
34
.venv/Lib/site-packages/jedi/inference/value/decorator.py
Normal file
@ -0,0 +1,34 @@
|
||||
'''
|
||||
Decorators are not really values, however we need some wrappers to improve
|
||||
docstrings and other things around decorators.
|
||||
'''
|
||||
|
||||
from jedi.inference.base_value import ValueWrapper, ValueSet
|
||||
|
||||
|
||||
class Decoratee(ValueWrapper):
|
||||
def __init__(self, wrapped_value, original_value):
|
||||
super().__init__(wrapped_value)
|
||||
self._original_value = original_value
|
||||
|
||||
def py__doc__(self):
|
||||
return self._original_value.py__doc__()
|
||||
|
||||
def py__get__(self, instance, class_value):
|
||||
return ValueSet(
|
||||
Decoratee(v, self._original_value)
|
||||
for v in self._wrapped_value.py__get__(instance, class_value)
|
||||
)
|
||||
|
||||
def get_signatures(self):
|
||||
signatures = self._wrapped_value.get_signatures()
|
||||
if signatures:
|
||||
return signatures
|
||||
# Fallback to signatures of the original function/class if the
|
||||
# decorator has no signature or it is not inferrable.
|
||||
#
|
||||
# __get__ means that it's a descriptor. In that case we don't return
|
||||
# signatures, because they are usually properties.
|
||||
if not self._wrapped_value.py__getattribute__('__get__'):
|
||||
return self._original_value.get_signatures()
|
||||
return []
|
200
.venv/Lib/site-packages/jedi/inference/value/dynamic_arrays.py
Normal file
200
.venv/Lib/site-packages/jedi/inference/value/dynamic_arrays.py
Normal file
@ -0,0 +1,200 @@
|
||||
"""
|
||||
A module to deal with stuff like `list.append` and `set.add`.
|
||||
|
||||
Array modifications
|
||||
*******************
|
||||
|
||||
If the content of an array (``set``/``list``) is requested somewhere, the
|
||||
current module will be checked for appearances of ``arr.append``,
|
||||
``arr.insert``, etc. If the ``arr`` name points to an actual array, the
|
||||
content will be added
|
||||
|
||||
This can be really cpu intensive, as you can imagine. Because |jedi| has to
|
||||
follow **every** ``append`` and check whether it's the right array. However this
|
||||
works pretty good, because in *slow* cases, the recursion detector and other
|
||||
settings will stop this process.
|
||||
|
||||
It is important to note that:
|
||||
|
||||
1. Array modfications work only in the current module.
|
||||
2. Jedi only checks Array additions; ``list.pop``, etc are ignored.
|
||||
"""
|
||||
from jedi import debug
|
||||
from jedi import settings
|
||||
from jedi.inference import recursion
|
||||
from jedi.inference.base_value import ValueSet, NO_VALUES, HelperValueMixin, \
|
||||
ValueWrapper
|
||||
from jedi.inference.lazy_value import LazyKnownValues
|
||||
from jedi.inference.helpers import infer_call_of_leaf
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
|
||||
_sentinel = object()
|
||||
|
||||
|
||||
def check_array_additions(context, sequence):
|
||||
""" Just a mapper function for the internal _internal_check_array_additions """
|
||||
if sequence.array_type not in ('list', 'set'):
|
||||
# TODO also check for dict updates
|
||||
return NO_VALUES
|
||||
|
||||
return _internal_check_array_additions(context, sequence)
|
||||
|
||||
|
||||
@inference_state_method_cache(default=NO_VALUES)
|
||||
@debug.increase_indent
|
||||
def _internal_check_array_additions(context, sequence):
|
||||
"""
|
||||
Checks if a `Array` has "add" (append, insert, extend) statements:
|
||||
|
||||
>>> a = [""]
|
||||
>>> a.append(1)
|
||||
"""
|
||||
from jedi.inference import arguments
|
||||
|
||||
debug.dbg('Dynamic array search for %s' % sequence, color='MAGENTA')
|
||||
module_context = context.get_root_context()
|
||||
if not settings.dynamic_array_additions or module_context.is_compiled():
|
||||
debug.dbg('Dynamic array search aborted.', color='MAGENTA')
|
||||
return NO_VALUES
|
||||
|
||||
def find_additions(context, arglist, add_name):
|
||||
params = list(arguments.TreeArguments(context.inference_state, context, arglist).unpack())
|
||||
result = set()
|
||||
if add_name in ['insert']:
|
||||
params = params[1:]
|
||||
if add_name in ['append', 'add', 'insert']:
|
||||
for key, lazy_value in params:
|
||||
result.add(lazy_value)
|
||||
elif add_name in ['extend', 'update']:
|
||||
for key, lazy_value in params:
|
||||
result |= set(lazy_value.infer().iterate())
|
||||
return result
|
||||
|
||||
temp_param_add, settings.dynamic_params_for_other_modules = \
|
||||
settings.dynamic_params_for_other_modules, False
|
||||
|
||||
is_list = sequence.name.string_name == 'list'
|
||||
search_names = (['append', 'extend', 'insert'] if is_list else ['add', 'update'])
|
||||
|
||||
added_types = set()
|
||||
for add_name in search_names:
|
||||
try:
|
||||
possible_names = module_context.tree_node.get_used_names()[add_name]
|
||||
except KeyError:
|
||||
continue
|
||||
else:
|
||||
for name in possible_names:
|
||||
value_node = context.tree_node
|
||||
if not (value_node.start_pos < name.start_pos < value_node.end_pos):
|
||||
continue
|
||||
trailer = name.parent
|
||||
power = trailer.parent
|
||||
trailer_pos = power.children.index(trailer)
|
||||
try:
|
||||
execution_trailer = power.children[trailer_pos + 1]
|
||||
except IndexError:
|
||||
continue
|
||||
else:
|
||||
if execution_trailer.type != 'trailer' \
|
||||
or execution_trailer.children[0] != '(' \
|
||||
or execution_trailer.children[1] == ')':
|
||||
continue
|
||||
|
||||
random_context = context.create_context(name)
|
||||
|
||||
with recursion.execution_allowed(context.inference_state, power) as allowed:
|
||||
if allowed:
|
||||
found = infer_call_of_leaf(
|
||||
random_context,
|
||||
name,
|
||||
cut_own_trailer=True
|
||||
)
|
||||
if sequence in found:
|
||||
# The arrays match. Now add the results
|
||||
added_types |= find_additions(
|
||||
random_context,
|
||||
execution_trailer.children[1],
|
||||
add_name
|
||||
)
|
||||
|
||||
# reset settings
|
||||
settings.dynamic_params_for_other_modules = temp_param_add
|
||||
debug.dbg('Dynamic array result %s', added_types, color='MAGENTA')
|
||||
return added_types
|
||||
|
||||
|
||||
def get_dynamic_array_instance(instance, arguments):
|
||||
"""Used for set() and list() instances."""
|
||||
ai = _DynamicArrayAdditions(instance, arguments)
|
||||
from jedi.inference import arguments
|
||||
return arguments.ValuesArguments([ValueSet([ai])])
|
||||
|
||||
|
||||
class _DynamicArrayAdditions(HelperValueMixin):
|
||||
"""
|
||||
Used for the usage of set() and list().
|
||||
This is definitely a hack, but a good one :-)
|
||||
It makes it possible to use set/list conversions.
|
||||
|
||||
This is not a proper context, because it doesn't have to be. It's not used
|
||||
in the wild, it's just used within typeshed as an argument to `__init__`
|
||||
for set/list and never used in any other place.
|
||||
"""
|
||||
def __init__(self, instance, arguments):
|
||||
self._instance = instance
|
||||
self._arguments = arguments
|
||||
|
||||
def py__class__(self):
|
||||
tuple_, = self._instance.inference_state.builtins_module.py__getattribute__('tuple')
|
||||
return tuple_
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
arguments = self._arguments
|
||||
try:
|
||||
_, lazy_value = next(arguments.unpack())
|
||||
except StopIteration:
|
||||
pass
|
||||
else:
|
||||
yield from lazy_value.infer().iterate()
|
||||
|
||||
from jedi.inference.arguments import TreeArguments
|
||||
if isinstance(arguments, TreeArguments):
|
||||
additions = _internal_check_array_additions(arguments.context, self._instance)
|
||||
yield from additions
|
||||
|
||||
def iterate(self, contextualized_node=None, is_async=False):
|
||||
return self.py__iter__(contextualized_node)
|
||||
|
||||
|
||||
class _Modification(ValueWrapper):
|
||||
def __init__(self, wrapped_value, assigned_values, contextualized_key):
|
||||
super().__init__(wrapped_value)
|
||||
self._assigned_values = assigned_values
|
||||
self._contextualized_key = contextualized_key
|
||||
|
||||
def py__getitem__(self, *args, **kwargs):
|
||||
return self._wrapped_value.py__getitem__(*args, **kwargs) | self._assigned_values
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
actual = [
|
||||
v.get_safe_value(_sentinel)
|
||||
for v in self._contextualized_key.infer()
|
||||
]
|
||||
if index in actual:
|
||||
return self._assigned_values
|
||||
return self._wrapped_value.py__simple_getitem__(index)
|
||||
|
||||
|
||||
class DictModification(_Modification):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
yield from self._wrapped_value.py__iter__(contextualized_node)
|
||||
yield self._contextualized_key
|
||||
|
||||
def get_key_values(self):
|
||||
return self._wrapped_value.get_key_values() | self._contextualized_key.infer()
|
||||
|
||||
|
||||
class ListModification(_Modification):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
yield from self._wrapped_value.py__iter__(contextualized_node)
|
||||
yield LazyKnownValues(self._assigned_values)
|
461
.venv/Lib/site-packages/jedi/inference/value/function.py
Normal file
461
.venv/Lib/site-packages/jedi/inference/value/function.py
Normal file
@ -0,0 +1,461 @@
|
||||
from parso.python import tree
|
||||
|
||||
from jedi import debug
|
||||
from jedi.inference.cache import inference_state_method_cache, CachedMetaClass
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference import recursion
|
||||
from jedi.inference import docstrings
|
||||
from jedi.inference import flow_analysis
|
||||
from jedi.inference.signature import TreeSignature
|
||||
from jedi.inference.filters import ParserTreeFilter, FunctionExecutionFilter, \
|
||||
AnonymousFunctionExecutionFilter
|
||||
from jedi.inference.names import ValueName, AbstractNameDefinition, \
|
||||
AnonymousParamName, ParamName, NameWrapper
|
||||
from jedi.inference.base_value import ContextualizedNode, NO_VALUES, \
|
||||
ValueSet, TreeValue, ValueWrapper
|
||||
from jedi.inference.lazy_value import LazyKnownValues, LazyKnownValue, \
|
||||
LazyTreeValue
|
||||
from jedi.inference.context import ValueContext, TreeContextMixin
|
||||
from jedi.inference.value import iterable
|
||||
from jedi import parser_utils
|
||||
from jedi.inference.parser_cache import get_yield_exprs
|
||||
from jedi.inference.helpers import values_from_qualified_names
|
||||
from jedi.inference.gradual.generics import TupleGenericManager
|
||||
|
||||
|
||||
class LambdaName(AbstractNameDefinition):
|
||||
string_name = '<lambda>'
|
||||
api_type = 'function'
|
||||
|
||||
def __init__(self, lambda_value):
|
||||
self._lambda_value = lambda_value
|
||||
self.parent_context = lambda_value.parent_context
|
||||
|
||||
@property
|
||||
def start_pos(self):
|
||||
return self._lambda_value.tree_node.start_pos
|
||||
|
||||
def infer(self):
|
||||
return ValueSet([self._lambda_value])
|
||||
|
||||
|
||||
class FunctionAndClassBase(TreeValue):
|
||||
def get_qualified_names(self):
|
||||
if self.parent_context.is_class():
|
||||
n = self.parent_context.get_qualified_names()
|
||||
if n is None:
|
||||
# This means that the parent class lives within a function.
|
||||
return None
|
||||
return n + (self.py__name__(),)
|
||||
elif self.parent_context.is_module():
|
||||
return (self.py__name__(),)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class FunctionMixin:
|
||||
api_type = 'function'
|
||||
|
||||
def get_filters(self, origin_scope=None):
|
||||
cls = self.py__class__()
|
||||
for instance in cls.execute_with_values():
|
||||
yield from instance.get_filters(origin_scope=origin_scope)
|
||||
|
||||
def py__get__(self, instance, class_value):
|
||||
from jedi.inference.value.instance import BoundMethod
|
||||
if instance is None:
|
||||
# Calling the Foo.bar results in the original bar function.
|
||||
return ValueSet([self])
|
||||
return ValueSet([BoundMethod(instance, class_value.as_context(), self)])
|
||||
|
||||
def get_param_names(self):
|
||||
return [AnonymousParamName(self, param.name)
|
||||
for param in self.tree_node.get_params()]
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if self.tree_node.type == 'lambdef':
|
||||
return LambdaName(self)
|
||||
return ValueName(self, self.tree_node.name)
|
||||
|
||||
def is_function(self):
|
||||
return True
|
||||
|
||||
def py__name__(self):
|
||||
return self.name.string_name
|
||||
|
||||
def get_type_hint(self, add_class_info=True):
|
||||
return_annotation = self.tree_node.annotation
|
||||
if return_annotation is None:
|
||||
def param_name_to_str(n):
|
||||
s = n.string_name
|
||||
annotation = n.infer().get_type_hint()
|
||||
if annotation is not None:
|
||||
s += ': ' + annotation
|
||||
if n.default_node is not None:
|
||||
s += '=' + n.default_node.get_code(include_prefix=False)
|
||||
return s
|
||||
|
||||
function_execution = self.as_context()
|
||||
result = function_execution.infer()
|
||||
return_hint = result.get_type_hint()
|
||||
body = self.py__name__() + '(%s)' % ', '.join([
|
||||
param_name_to_str(n)
|
||||
for n in function_execution.get_param_names()
|
||||
])
|
||||
if return_hint is None:
|
||||
return body
|
||||
else:
|
||||
return_hint = return_annotation.get_code(include_prefix=False)
|
||||
body = self.py__name__() + self.tree_node.children[2].get_code(include_prefix=False)
|
||||
|
||||
return body + ' -> ' + return_hint
|
||||
|
||||
def py__call__(self, arguments):
|
||||
function_execution = self.as_context(arguments)
|
||||
return function_execution.infer()
|
||||
|
||||
def _as_context(self, arguments=None):
|
||||
if arguments is None:
|
||||
return AnonymousFunctionExecution(self)
|
||||
return FunctionExecutionContext(self, arguments)
|
||||
|
||||
def get_signatures(self):
|
||||
return [TreeSignature(f) for f in self.get_signature_functions()]
|
||||
|
||||
|
||||
class FunctionValue(FunctionMixin, FunctionAndClassBase, metaclass=CachedMetaClass):
|
||||
@classmethod
|
||||
def from_context(cls, context, tree_node):
|
||||
def create(tree_node):
|
||||
if context.is_class():
|
||||
return MethodValue(
|
||||
context.inference_state,
|
||||
context,
|
||||
parent_context=parent_context,
|
||||
tree_node=tree_node
|
||||
)
|
||||
else:
|
||||
return cls(
|
||||
context.inference_state,
|
||||
parent_context=parent_context,
|
||||
tree_node=tree_node
|
||||
)
|
||||
|
||||
overloaded_funcs = list(_find_overload_functions(context, tree_node))
|
||||
|
||||
parent_context = context
|
||||
while parent_context.is_class() or parent_context.is_instance():
|
||||
parent_context = parent_context.parent_context
|
||||
|
||||
function = create(tree_node)
|
||||
|
||||
if overloaded_funcs:
|
||||
return OverloadedFunctionValue(
|
||||
function,
|
||||
# Get them into the correct order: lower line first.
|
||||
list(reversed([create(f) for f in overloaded_funcs]))
|
||||
)
|
||||
return function
|
||||
|
||||
def py__class__(self):
|
||||
c, = values_from_qualified_names(self.inference_state, 'types', 'FunctionType')
|
||||
return c
|
||||
|
||||
def get_default_param_context(self):
|
||||
return self.parent_context
|
||||
|
||||
def get_signature_functions(self):
|
||||
return [self]
|
||||
|
||||
|
||||
class FunctionNameInClass(NameWrapper):
|
||||
def __init__(self, class_context, name):
|
||||
super().__init__(name)
|
||||
self._class_context = class_context
|
||||
|
||||
def get_defining_qualified_value(self):
|
||||
return self._class_context.get_value() # Might be None.
|
||||
|
||||
|
||||
class MethodValue(FunctionValue):
|
||||
def __init__(self, inference_state, class_context, *args, **kwargs):
|
||||
super().__init__(inference_state, *args, **kwargs)
|
||||
self.class_context = class_context
|
||||
|
||||
def get_default_param_context(self):
|
||||
return self.class_context
|
||||
|
||||
def get_qualified_names(self):
|
||||
# Need to implement this, because the parent value of a method
|
||||
# value is not the class value but the module.
|
||||
names = self.class_context.get_qualified_names()
|
||||
if names is None:
|
||||
return None
|
||||
return names + (self.py__name__(),)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return FunctionNameInClass(self.class_context, super().name)
|
||||
|
||||
|
||||
class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
def infer_annotations(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@inference_state_method_cache(default=NO_VALUES)
|
||||
@recursion.execution_recursion_decorator()
|
||||
def get_return_values(self, check_yields=False):
|
||||
funcdef = self.tree_node
|
||||
if funcdef.type == 'lambdef':
|
||||
return self.infer_node(funcdef.children[-1])
|
||||
|
||||
if check_yields:
|
||||
value_set = NO_VALUES
|
||||
returns = get_yield_exprs(self.inference_state, funcdef)
|
||||
else:
|
||||
value_set = self.infer_annotations()
|
||||
if value_set:
|
||||
# If there are annotations, prefer them over anything else.
|
||||
# This will make it faster.
|
||||
return value_set
|
||||
value_set |= docstrings.infer_return_types(self._value)
|
||||
returns = funcdef.iter_return_stmts()
|
||||
|
||||
for r in returns:
|
||||
if check_yields:
|
||||
value_set |= ValueSet.from_sets(
|
||||
lazy_value.infer()
|
||||
for lazy_value in self._get_yield_lazy_value(r)
|
||||
)
|
||||
else:
|
||||
check = flow_analysis.reachability_check(self, funcdef, r)
|
||||
if check is flow_analysis.UNREACHABLE:
|
||||
debug.dbg('Return unreachable: %s', r)
|
||||
else:
|
||||
try:
|
||||
children = r.children
|
||||
except AttributeError:
|
||||
ctx = compiled.builtin_from_name(self.inference_state, 'None')
|
||||
value_set |= ValueSet([ctx])
|
||||
else:
|
||||
value_set |= self.infer_node(children[1])
|
||||
if check is flow_analysis.REACHABLE:
|
||||
debug.dbg('Return reachable: %s', r)
|
||||
break
|
||||
return value_set
|
||||
|
||||
def _get_yield_lazy_value(self, yield_expr):
|
||||
if yield_expr.type == 'keyword':
|
||||
# `yield` just yields None.
|
||||
ctx = compiled.builtin_from_name(self.inference_state, 'None')
|
||||
yield LazyKnownValue(ctx)
|
||||
return
|
||||
|
||||
node = yield_expr.children[1]
|
||||
if node.type == 'yield_arg': # It must be a yield from.
|
||||
cn = ContextualizedNode(self, node.children[1])
|
||||
yield from cn.infer().iterate(cn)
|
||||
else:
|
||||
yield LazyTreeValue(self, node)
|
||||
|
||||
@recursion.execution_recursion_decorator(default=iter([]))
|
||||
def get_yield_lazy_values(self, is_async=False):
|
||||
# TODO: if is_async, wrap yield statements in Awaitable/async_generator_asend
|
||||
for_parents = [(y, tree.search_ancestor(y, 'for_stmt', 'funcdef',
|
||||
'while_stmt', 'if_stmt'))
|
||||
for y in get_yield_exprs(self.inference_state, self.tree_node)]
|
||||
|
||||
# Calculate if the yields are placed within the same for loop.
|
||||
yields_order = []
|
||||
last_for_stmt = None
|
||||
for yield_, for_stmt in for_parents:
|
||||
# For really simple for loops we can predict the order. Otherwise
|
||||
# we just ignore it.
|
||||
parent = for_stmt.parent
|
||||
if parent.type == 'suite':
|
||||
parent = parent.parent
|
||||
if for_stmt.type == 'for_stmt' and parent == self.tree_node \
|
||||
and parser_utils.for_stmt_defines_one_name(for_stmt): # Simplicity for now.
|
||||
if for_stmt == last_for_stmt:
|
||||
yields_order[-1][1].append(yield_)
|
||||
else:
|
||||
yields_order.append((for_stmt, [yield_]))
|
||||
elif for_stmt == self.tree_node:
|
||||
yields_order.append((None, [yield_]))
|
||||
else:
|
||||
types = self.get_return_values(check_yields=True)
|
||||
if types:
|
||||
yield LazyKnownValues(types, min=0, max=float('inf'))
|
||||
return
|
||||
last_for_stmt = for_stmt
|
||||
|
||||
for for_stmt, yields in yields_order:
|
||||
if for_stmt is None:
|
||||
# No for_stmt, just normal yields.
|
||||
for yield_ in yields:
|
||||
yield from self._get_yield_lazy_value(yield_)
|
||||
else:
|
||||
input_node = for_stmt.get_testlist()
|
||||
cn = ContextualizedNode(self, input_node)
|
||||
ordered = cn.infer().iterate(cn)
|
||||
ordered = list(ordered)
|
||||
for lazy_value in ordered:
|
||||
dct = {str(for_stmt.children[1].value): lazy_value.infer()}
|
||||
with self.predefine_names(for_stmt, dct):
|
||||
for yield_in_same_for_stmt in yields:
|
||||
yield from self._get_yield_lazy_value(yield_in_same_for_stmt)
|
||||
|
||||
def merge_yield_values(self, is_async=False):
|
||||
return ValueSet.from_sets(
|
||||
lazy_value.infer()
|
||||
for lazy_value in self.get_yield_lazy_values()
|
||||
)
|
||||
|
||||
def is_generator(self):
|
||||
return bool(get_yield_exprs(self.inference_state, self.tree_node))
|
||||
|
||||
def infer(self):
|
||||
"""
|
||||
Created to be used by inheritance.
|
||||
"""
|
||||
inference_state = self.inference_state
|
||||
is_coroutine = self.tree_node.parent.type in ('async_stmt', 'async_funcdef')
|
||||
from jedi.inference.gradual.base import GenericClass
|
||||
|
||||
if is_coroutine:
|
||||
if self.is_generator():
|
||||
async_generator_classes = inference_state.typing_module \
|
||||
.py__getattribute__('AsyncGenerator')
|
||||
|
||||
yield_values = self.merge_yield_values(is_async=True)
|
||||
# The contravariant doesn't seem to be defined.
|
||||
generics = (yield_values.py__class__(), NO_VALUES)
|
||||
return ValueSet(
|
||||
GenericClass(c, TupleGenericManager(generics))
|
||||
for c in async_generator_classes
|
||||
).execute_annotation()
|
||||
else:
|
||||
async_classes = inference_state.typing_module.py__getattribute__('Coroutine')
|
||||
return_values = self.get_return_values()
|
||||
# Only the first generic is relevant.
|
||||
generics = (return_values.py__class__(), NO_VALUES, NO_VALUES)
|
||||
return ValueSet(
|
||||
GenericClass(c, TupleGenericManager(generics)) for c in async_classes
|
||||
).execute_annotation()
|
||||
else:
|
||||
# If there are annotations, prefer them over anything else.
|
||||
if self.is_generator() and not self.infer_annotations():
|
||||
return ValueSet([iterable.Generator(inference_state, self)])
|
||||
else:
|
||||
return self.get_return_values()
|
||||
|
||||
|
||||
class FunctionExecutionContext(BaseFunctionExecutionContext):
|
||||
def __init__(self, function_value, arguments):
|
||||
super().__init__(function_value)
|
||||
self._arguments = arguments
|
||||
|
||||
def get_filters(self, until_position=None, origin_scope=None):
|
||||
yield FunctionExecutionFilter(
|
||||
self, self._value,
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope,
|
||||
arguments=self._arguments
|
||||
)
|
||||
|
||||
def infer_annotations(self):
|
||||
from jedi.inference.gradual.annotation import infer_return_types
|
||||
return infer_return_types(self._value, self._arguments)
|
||||
|
||||
def get_param_names(self):
|
||||
return [
|
||||
ParamName(self._value, param.name, self._arguments)
|
||||
for param in self._value.tree_node.get_params()
|
||||
]
|
||||
|
||||
|
||||
class AnonymousFunctionExecution(BaseFunctionExecutionContext):
|
||||
def infer_annotations(self):
|
||||
# I don't think inferring anonymous executions is a big thing.
|
||||
# Anonymous contexts are mostly there for the user to work in. ~ dave
|
||||
return NO_VALUES
|
||||
|
||||
def get_filters(self, until_position=None, origin_scope=None):
|
||||
yield AnonymousFunctionExecutionFilter(
|
||||
self, self._value,
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope,
|
||||
)
|
||||
|
||||
def get_param_names(self):
|
||||
return self._value.get_param_names()
|
||||
|
||||
|
||||
class OverloadedFunctionValue(FunctionMixin, ValueWrapper):
|
||||
def __init__(self, function, overloaded_functions):
|
||||
super().__init__(function)
|
||||
self._overloaded_functions = overloaded_functions
|
||||
|
||||
def py__call__(self, arguments):
|
||||
debug.dbg("Execute overloaded function %s", self._wrapped_value, color='BLUE')
|
||||
function_executions = []
|
||||
for signature in self.get_signatures():
|
||||
function_execution = signature.value.as_context(arguments)
|
||||
function_executions.append(function_execution)
|
||||
if signature.matches_signature(arguments):
|
||||
return function_execution.infer()
|
||||
|
||||
if self.inference_state.is_analysis:
|
||||
# In this case we want precision.
|
||||
return NO_VALUES
|
||||
return ValueSet.from_sets(fe.infer() for fe in function_executions)
|
||||
|
||||
def get_signature_functions(self):
|
||||
return self._overloaded_functions
|
||||
|
||||
def get_type_hint(self, add_class_info=True):
|
||||
return 'Union[%s]' % ', '.join(f.get_type_hint() for f in self._overloaded_functions)
|
||||
|
||||
|
||||
def _find_overload_functions(context, tree_node):
|
||||
def _is_overload_decorated(funcdef):
|
||||
if funcdef.parent.type == 'decorated':
|
||||
decorators = funcdef.parent.children[0]
|
||||
if decorators.type == 'decorator':
|
||||
decorators = [decorators]
|
||||
else:
|
||||
decorators = decorators.children
|
||||
for decorator in decorators:
|
||||
dotted_name = decorator.children[1]
|
||||
if dotted_name.type == 'name' and dotted_name.value == 'overload':
|
||||
# TODO check with values if it's the right overload
|
||||
return True
|
||||
return False
|
||||
|
||||
if tree_node.type == 'lambdef':
|
||||
return
|
||||
|
||||
if _is_overload_decorated(tree_node):
|
||||
yield tree_node
|
||||
|
||||
while True:
|
||||
filter = ParserTreeFilter(
|
||||
context,
|
||||
until_position=tree_node.start_pos
|
||||
)
|
||||
names = filter.get(tree_node.name.value)
|
||||
assert isinstance(names, list)
|
||||
if not names:
|
||||
break
|
||||
|
||||
found = False
|
||||
for name in names:
|
||||
funcdef = name.tree_name.parent
|
||||
if funcdef.type == 'funcdef' and _is_overload_decorated(funcdef):
|
||||
tree_node = funcdef
|
||||
found = True
|
||||
yield funcdef
|
||||
|
||||
if not found:
|
||||
break
|
610
.venv/Lib/site-packages/jedi/inference/value/instance.py
Normal file
610
.venv/Lib/site-packages/jedi/inference/value/instance.py
Normal file
@ -0,0 +1,610 @@
|
||||
from abc import abstractproperty
|
||||
|
||||
from parso.tree import search_ancestor
|
||||
|
||||
from jedi import debug
|
||||
from jedi import settings
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference.compiled.value import CompiledValueFilter
|
||||
from jedi.inference.helpers import values_from_qualified_names, is_big_annoying_library
|
||||
from jedi.inference.filters import AbstractFilter, AnonymousFunctionExecutionFilter
|
||||
from jedi.inference.names import ValueName, TreeNameDefinition, ParamName, \
|
||||
NameWrapper
|
||||
from jedi.inference.base_value import Value, NO_VALUES, ValueSet, \
|
||||
iterator_to_value_set, ValueWrapper
|
||||
from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.arguments import ValuesArguments, TreeArgumentsWrapper
|
||||
from jedi.inference.value.function import \
|
||||
FunctionValue, FunctionMixin, OverloadedFunctionValue, \
|
||||
BaseFunctionExecutionContext, FunctionExecutionContext, FunctionNameInClass
|
||||
from jedi.inference.value.klass import ClassFilter
|
||||
from jedi.inference.value.dynamic_arrays import get_dynamic_array_instance
|
||||
from jedi.parser_utils import function_is_staticmethod, function_is_classmethod
|
||||
|
||||
|
||||
class InstanceExecutedParamName(ParamName):
|
||||
def __init__(self, instance, function_value, tree_name):
|
||||
super().__init__(
|
||||
function_value, tree_name, arguments=None)
|
||||
self._instance = instance
|
||||
|
||||
def infer(self):
|
||||
return ValueSet([self._instance])
|
||||
|
||||
def matches_signature(self):
|
||||
return True
|
||||
|
||||
|
||||
class AnonymousMethodExecutionFilter(AnonymousFunctionExecutionFilter):
|
||||
def __init__(self, instance, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._instance = instance
|
||||
|
||||
def _convert_param(self, param, name):
|
||||
if param.position_index == 0:
|
||||
if function_is_classmethod(self._function_value.tree_node):
|
||||
return InstanceExecutedParamName(
|
||||
self._instance.py__class__(),
|
||||
self._function_value,
|
||||
name
|
||||
)
|
||||
elif not function_is_staticmethod(self._function_value.tree_node):
|
||||
return InstanceExecutedParamName(
|
||||
self._instance,
|
||||
self._function_value,
|
||||
name
|
||||
)
|
||||
return super()._convert_param(param, name)
|
||||
|
||||
|
||||
class AnonymousMethodExecutionContext(BaseFunctionExecutionContext):
|
||||
def __init__(self, instance, value):
|
||||
super().__init__(value)
|
||||
self.instance = instance
|
||||
|
||||
def get_filters(self, until_position=None, origin_scope=None):
|
||||
yield AnonymousMethodExecutionFilter(
|
||||
self.instance, self, self._value,
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope,
|
||||
)
|
||||
|
||||
def get_param_names(self):
|
||||
param_names = list(self._value.get_param_names())
|
||||
# set the self name
|
||||
param_names[0] = InstanceExecutedParamName(
|
||||
self.instance,
|
||||
self._value,
|
||||
param_names[0].tree_name
|
||||
)
|
||||
return param_names
|
||||
|
||||
|
||||
class MethodExecutionContext(FunctionExecutionContext):
|
||||
def __init__(self, instance, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.instance = instance
|
||||
|
||||
|
||||
class AbstractInstanceValue(Value):
|
||||
api_type = 'instance'
|
||||
|
||||
def __init__(self, inference_state, parent_context, class_value):
|
||||
super().__init__(inference_state, parent_context)
|
||||
# Generated instances are classes that are just generated by self
|
||||
# (No arguments) used.
|
||||
self.class_value = class_value
|
||||
|
||||
def is_instance(self):
|
||||
return True
|
||||
|
||||
def get_qualified_names(self):
|
||||
return self.class_value.get_qualified_names()
|
||||
|
||||
def get_annotated_class_object(self):
|
||||
return self.class_value # This is the default.
|
||||
|
||||
def py__class__(self):
|
||||
return self.class_value
|
||||
|
||||
def py__bool__(self):
|
||||
# Signalize that we don't know about the bool type.
|
||||
return None
|
||||
|
||||
@abstractproperty
|
||||
def name(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_signatures(self):
|
||||
call_funcs = self.py__getattribute__('__call__').py__get__(self, self.class_value)
|
||||
return [s.bind(self) for s in call_funcs.get_signatures()]
|
||||
|
||||
def get_function_slot_names(self, name):
|
||||
# Python classes don't look at the dictionary of the instance when
|
||||
# looking up `__call__`. This is something that has to do with Python's
|
||||
# internal slot system (note: not __slots__, but C slots).
|
||||
for filter in self.get_filters(include_self_names=False):
|
||||
names = filter.get(name)
|
||||
if names:
|
||||
return names
|
||||
return []
|
||||
|
||||
def execute_function_slots(self, names, *inferred_args):
|
||||
return ValueSet.from_sets(
|
||||
name.infer().execute_with_values(*inferred_args)
|
||||
for name in names
|
||||
)
|
||||
|
||||
def get_type_hint(self, add_class_info=True):
|
||||
return self.py__name__()
|
||||
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
names = self.get_function_slot_names('__getitem__')
|
||||
if not names:
|
||||
return super().py__getitem__(
|
||||
index_value_set,
|
||||
contextualized_node,
|
||||
)
|
||||
|
||||
args = ValuesArguments([index_value_set])
|
||||
return ValueSet.from_sets(name.infer().execute(args) for name in names)
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
iter_slot_names = self.get_function_slot_names('__iter__')
|
||||
if not iter_slot_names:
|
||||
return super().py__iter__(contextualized_node)
|
||||
|
||||
def iterate():
|
||||
for generator in self.execute_function_slots(iter_slot_names):
|
||||
yield from generator.py__next__(contextualized_node)
|
||||
return iterate()
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (self.__class__.__name__, self.class_value)
|
||||
|
||||
|
||||
class CompiledInstance(AbstractInstanceValue):
|
||||
# This is not really a compiled class, it's just an instance from a
|
||||
# compiled class.
|
||||
def __init__(self, inference_state, parent_context, class_value, arguments):
|
||||
super().__init__(inference_state, parent_context, class_value)
|
||||
self._arguments = arguments
|
||||
|
||||
def get_filters(self, origin_scope=None, include_self_names=True):
|
||||
class_value = self.get_annotated_class_object()
|
||||
class_filters = class_value.get_filters(
|
||||
origin_scope=origin_scope,
|
||||
is_instance=True,
|
||||
)
|
||||
for f in class_filters:
|
||||
yield CompiledInstanceClassFilter(self, f)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return compiled.CompiledValueName(self, self.class_value.name.string_name)
|
||||
|
||||
def is_stub(self):
|
||||
return False
|
||||
|
||||
|
||||
class _BaseTreeInstance(AbstractInstanceValue):
|
||||
@property
|
||||
def array_type(self):
|
||||
name = self.class_value.py__name__()
|
||||
if name in ['list', 'set', 'dict'] \
|
||||
and self.parent_context.get_root_context().is_builtins_module():
|
||||
return name
|
||||
return None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return ValueName(self, self.class_value.name.tree_name)
|
||||
|
||||
def get_filters(self, origin_scope=None, include_self_names=True):
|
||||
class_value = self.get_annotated_class_object()
|
||||
if include_self_names:
|
||||
for cls in class_value.py__mro__():
|
||||
if not cls.is_compiled():
|
||||
# In this case we're excluding compiled objects that are
|
||||
# not fake objects. It doesn't make sense for normal
|
||||
# compiled objects to search for self variables.
|
||||
yield SelfAttributeFilter(self, class_value, cls.as_context(), origin_scope)
|
||||
|
||||
class_filters = class_value.get_filters(
|
||||
origin_scope=origin_scope,
|
||||
is_instance=True,
|
||||
)
|
||||
for f in class_filters:
|
||||
if isinstance(f, ClassFilter):
|
||||
yield InstanceClassFilter(self, f)
|
||||
elif isinstance(f, CompiledValueFilter):
|
||||
yield CompiledInstanceClassFilter(self, f)
|
||||
else:
|
||||
# Propably from the metaclass.
|
||||
yield f
|
||||
|
||||
@inference_state_method_cache()
|
||||
def create_instance_context(self, class_context, node):
|
||||
new = node
|
||||
while True:
|
||||
func_node = new
|
||||
new = search_ancestor(new, 'funcdef', 'classdef')
|
||||
if class_context.tree_node is new:
|
||||
func = FunctionValue.from_context(class_context, func_node)
|
||||
bound_method = BoundMethod(self, class_context, func)
|
||||
if func_node.name.value == '__init__':
|
||||
context = bound_method.as_context(self._arguments)
|
||||
else:
|
||||
context = bound_method.as_context()
|
||||
break
|
||||
return context.create_context(node)
|
||||
|
||||
def py__getattribute__alternatives(self, string_name):
|
||||
'''
|
||||
Since nothing was inferred, now check the __getattr__ and
|
||||
__getattribute__ methods. Stubs don't need to be checked, because
|
||||
they don't contain any logic.
|
||||
'''
|
||||
if self.is_stub():
|
||||
return NO_VALUES
|
||||
|
||||
name = compiled.create_simple_object(self.inference_state, string_name)
|
||||
|
||||
# This is a little bit special. `__getattribute__` is in Python
|
||||
# executed before `__getattr__`. But: I know no use case, where
|
||||
# this could be practical and where Jedi would return wrong types.
|
||||
# If you ever find something, let me know!
|
||||
# We are inversing this, because a hand-crafted `__getattribute__`
|
||||
# could still call another hand-crafted `__getattr__`, but not the
|
||||
# other way around.
|
||||
if is_big_annoying_library(self.parent_context):
|
||||
return NO_VALUES
|
||||
names = (self.get_function_slot_names('__getattr__')
|
||||
or self.get_function_slot_names('__getattribute__'))
|
||||
return self.execute_function_slots(names, name)
|
||||
|
||||
def py__next__(self, contextualized_node=None):
|
||||
name = u'__next__'
|
||||
next_slot_names = self.get_function_slot_names(name)
|
||||
if next_slot_names:
|
||||
yield LazyKnownValues(
|
||||
self.execute_function_slots(next_slot_names)
|
||||
)
|
||||
else:
|
||||
debug.warning('Instance has no __next__ function in %s.', self)
|
||||
|
||||
def py__call__(self, arguments):
|
||||
names = self.get_function_slot_names('__call__')
|
||||
if not names:
|
||||
# Means the Instance is not callable.
|
||||
return super().py__call__(arguments)
|
||||
|
||||
return ValueSet.from_sets(name.infer().execute(arguments) for name in names)
|
||||
|
||||
def py__get__(self, instance, class_value):
|
||||
"""
|
||||
obj may be None.
|
||||
"""
|
||||
# Arguments in __get__ descriptors are obj, class.
|
||||
# `method` is the new parent of the array, don't know if that's good.
|
||||
for cls in self.class_value.py__mro__():
|
||||
result = cls.py__get__on_class(self, instance, class_value)
|
||||
if result is not NotImplemented:
|
||||
return result
|
||||
|
||||
names = self.get_function_slot_names('__get__')
|
||||
if names:
|
||||
if instance is None:
|
||||
instance = compiled.builtin_from_name(self.inference_state, 'None')
|
||||
return self.execute_function_slots(names, instance, class_value)
|
||||
else:
|
||||
return ValueSet([self])
|
||||
|
||||
|
||||
class TreeInstance(_BaseTreeInstance):
|
||||
def __init__(self, inference_state, parent_context, class_value, arguments):
|
||||
# I don't think that dynamic append lookups should happen here. That
|
||||
# sounds more like something that should go to py__iter__.
|
||||
if class_value.py__name__() in ['list', 'set'] \
|
||||
and parent_context.get_root_context().is_builtins_module():
|
||||
# compare the module path with the builtin name.
|
||||
if settings.dynamic_array_additions:
|
||||
arguments = get_dynamic_array_instance(self, arguments)
|
||||
|
||||
super().__init__(inference_state, parent_context, class_value)
|
||||
self._arguments = arguments
|
||||
self.tree_node = class_value.tree_node
|
||||
|
||||
# This can recurse, if the initialization of the class includes a reference
|
||||
# to itself.
|
||||
@inference_state_method_cache(default=None)
|
||||
def _get_annotated_class_object(self):
|
||||
from jedi.inference.gradual.annotation import py__annotations__, \
|
||||
infer_type_vars_for_execution
|
||||
|
||||
args = InstanceArguments(self, self._arguments)
|
||||
for signature in self.class_value.py__getattribute__('__init__').get_signatures():
|
||||
# Just take the first result, it should always be one, because we
|
||||
# control the typeshed code.
|
||||
funcdef = signature.value.tree_node
|
||||
if funcdef is None or funcdef.type != 'funcdef' \
|
||||
or not signature.matches_signature(args):
|
||||
# First check if the signature even matches, if not we don't
|
||||
# need to infer anything.
|
||||
continue
|
||||
bound_method = BoundMethod(self, self.class_value.as_context(), signature.value)
|
||||
all_annotations = py__annotations__(funcdef)
|
||||
type_var_dict = infer_type_vars_for_execution(bound_method, args, all_annotations)
|
||||
if type_var_dict:
|
||||
defined, = self.class_value.define_generics(
|
||||
infer_type_vars_for_execution(signature.value, args, all_annotations),
|
||||
)
|
||||
debug.dbg('Inferred instance value as %s', defined, color='BLUE')
|
||||
return defined
|
||||
return None
|
||||
|
||||
def get_annotated_class_object(self):
|
||||
return self._get_annotated_class_object() or self.class_value
|
||||
|
||||
def get_key_values(self):
|
||||
values = NO_VALUES
|
||||
if self.array_type == 'dict':
|
||||
for i, (key, instance) in enumerate(self._arguments.unpack()):
|
||||
if key is None and i == 0:
|
||||
values |= ValueSet.from_sets(
|
||||
v.get_key_values()
|
||||
for v in instance.infer()
|
||||
if v.array_type == 'dict'
|
||||
)
|
||||
if key:
|
||||
values |= ValueSet([compiled.create_simple_object(
|
||||
self.inference_state,
|
||||
key,
|
||||
)])
|
||||
|
||||
return values
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
if self.array_type == 'dict':
|
||||
# Logic for dict({'foo': bar}) and dict(foo=bar)
|
||||
# reversed, because:
|
||||
# >>> dict({'a': 1}, a=3)
|
||||
# {'a': 3}
|
||||
# TODO tuple initializations
|
||||
# >>> dict([('a', 4)])
|
||||
# {'a': 4}
|
||||
for key, lazy_context in reversed(list(self._arguments.unpack())):
|
||||
if key is None:
|
||||
values = ValueSet.from_sets(
|
||||
dct_value.py__simple_getitem__(index)
|
||||
for dct_value in lazy_context.infer()
|
||||
if dct_value.array_type == 'dict'
|
||||
)
|
||||
if values:
|
||||
return values
|
||||
else:
|
||||
if key == index:
|
||||
return lazy_context.infer()
|
||||
return super().py__simple_getitem__(index)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s(%s)>" % (self.__class__.__name__, self.class_value,
|
||||
self._arguments)
|
||||
|
||||
|
||||
class AnonymousInstance(_BaseTreeInstance):
|
||||
_arguments = None
|
||||
|
||||
|
||||
class CompiledInstanceName(NameWrapper):
|
||||
@iterator_to_value_set
|
||||
def infer(self):
|
||||
for result_value in self._wrapped_name.infer():
|
||||
if result_value.api_type == 'function':
|
||||
yield CompiledBoundMethod(result_value)
|
||||
else:
|
||||
yield result_value
|
||||
|
||||
|
||||
class CompiledInstanceClassFilter(AbstractFilter):
|
||||
def __init__(self, instance, f):
|
||||
self._instance = instance
|
||||
self._class_filter = f
|
||||
|
||||
def get(self, name):
|
||||
return self._convert(self._class_filter.get(name))
|
||||
|
||||
def values(self):
|
||||
return self._convert(self._class_filter.values())
|
||||
|
||||
def _convert(self, names):
|
||||
return [CompiledInstanceName(n) for n in names]
|
||||
|
||||
|
||||
class BoundMethod(FunctionMixin, ValueWrapper):
|
||||
def __init__(self, instance, class_context, function):
|
||||
super().__init__(function)
|
||||
self.instance = instance
|
||||
self._class_context = class_context
|
||||
|
||||
def is_bound_method(self):
|
||||
return True
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return FunctionNameInClass(
|
||||
self._class_context,
|
||||
super().name
|
||||
)
|
||||
|
||||
def py__class__(self):
|
||||
c, = values_from_qualified_names(self.inference_state, 'types', 'MethodType')
|
||||
return c
|
||||
|
||||
def _get_arguments(self, arguments):
|
||||
assert arguments is not None
|
||||
return InstanceArguments(self.instance, arguments)
|
||||
|
||||
def _as_context(self, arguments=None):
|
||||
if arguments is None:
|
||||
return AnonymousMethodExecutionContext(self.instance, self)
|
||||
|
||||
arguments = self._get_arguments(arguments)
|
||||
return MethodExecutionContext(self.instance, self, arguments)
|
||||
|
||||
def py__call__(self, arguments):
|
||||
if isinstance(self._wrapped_value, OverloadedFunctionValue):
|
||||
return self._wrapped_value.py__call__(self._get_arguments(arguments))
|
||||
|
||||
function_execution = self.as_context(arguments)
|
||||
return function_execution.infer()
|
||||
|
||||
def get_signature_functions(self):
|
||||
return [
|
||||
BoundMethod(self.instance, self._class_context, f)
|
||||
for f in self._wrapped_value.get_signature_functions()
|
||||
]
|
||||
|
||||
def get_signatures(self):
|
||||
return [sig.bind(self) for sig in super().get_signatures()]
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self._wrapped_value)
|
||||
|
||||
|
||||
class CompiledBoundMethod(ValueWrapper):
|
||||
def is_bound_method(self):
|
||||
return True
|
||||
|
||||
def get_signatures(self):
|
||||
return [sig.bind(self) for sig in self._wrapped_value.get_signatures()]
|
||||
|
||||
|
||||
class SelfName(TreeNameDefinition):
|
||||
"""
|
||||
This name calculates the parent_context lazily.
|
||||
"""
|
||||
def __init__(self, instance, class_context, tree_name):
|
||||
self._instance = instance
|
||||
self.class_context = class_context
|
||||
self.tree_name = tree_name
|
||||
|
||||
@property
|
||||
def parent_context(self):
|
||||
return self._instance.create_instance_context(self.class_context, self.tree_name)
|
||||
|
||||
def get_defining_qualified_value(self):
|
||||
return self._instance
|
||||
|
||||
def infer(self):
|
||||
stmt = search_ancestor(self.tree_name, 'expr_stmt')
|
||||
if stmt is not None:
|
||||
if stmt.children[1].type == "annassign":
|
||||
from jedi.inference.gradual.annotation import infer_annotation
|
||||
values = infer_annotation(
|
||||
self.parent_context, stmt.children[1].children[1]
|
||||
).execute_annotation()
|
||||
if values:
|
||||
return values
|
||||
return super().infer()
|
||||
|
||||
|
||||
class LazyInstanceClassName(NameWrapper):
|
||||
def __init__(self, instance, class_member_name):
|
||||
super().__init__(class_member_name)
|
||||
self._instance = instance
|
||||
|
||||
@iterator_to_value_set
|
||||
def infer(self):
|
||||
for result_value in self._wrapped_name.infer():
|
||||
yield from result_value.py__get__(self._instance, self._instance.py__class__())
|
||||
|
||||
def get_signatures(self):
|
||||
return self.infer().get_signatures()
|
||||
|
||||
def get_defining_qualified_value(self):
|
||||
return self._instance
|
||||
|
||||
|
||||
class InstanceClassFilter(AbstractFilter):
|
||||
"""
|
||||
This filter is special in that it uses the class filter and wraps the
|
||||
resulting names in LazyInstanceClassName. The idea is that the class name
|
||||
filtering can be very flexible and always be reflected in instances.
|
||||
"""
|
||||
def __init__(self, instance, class_filter):
|
||||
self._instance = instance
|
||||
self._class_filter = class_filter
|
||||
|
||||
def get(self, name):
|
||||
return self._convert(self._class_filter.get(name))
|
||||
|
||||
def values(self):
|
||||
return self._convert(self._class_filter.values())
|
||||
|
||||
def _convert(self, names):
|
||||
return [
|
||||
LazyInstanceClassName(self._instance, n)
|
||||
for n in names
|
||||
]
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s for %s>' % (self.__class__.__name__, self._class_filter)
|
||||
|
||||
|
||||
class SelfAttributeFilter(ClassFilter):
|
||||
"""
|
||||
This class basically filters all the use cases where `self.*` was assigned.
|
||||
"""
|
||||
def __init__(self, instance, instance_class, node_context, origin_scope):
|
||||
super().__init__(
|
||||
class_value=instance_class,
|
||||
node_context=node_context,
|
||||
origin_scope=origin_scope,
|
||||
is_instance=True,
|
||||
)
|
||||
self._instance = instance
|
||||
|
||||
def _filter(self, names):
|
||||
start, end = self._parser_scope.start_pos, self._parser_scope.end_pos
|
||||
names = [n for n in names if start < n.start_pos < end]
|
||||
return self._filter_self_names(names)
|
||||
|
||||
def _filter_self_names(self, names):
|
||||
for name in names:
|
||||
trailer = name.parent
|
||||
if trailer.type == 'trailer' \
|
||||
and len(trailer.parent.children) == 2 \
|
||||
and trailer.children[0] == '.':
|
||||
if name.is_definition() and self._access_possible(name):
|
||||
# TODO filter non-self assignments instead of this bad
|
||||
# filter.
|
||||
if self._is_in_right_scope(trailer.parent.children[0], name):
|
||||
yield name
|
||||
|
||||
def _is_in_right_scope(self, self_name, name):
|
||||
self_context = self._node_context.create_context(self_name)
|
||||
names = self_context.goto(self_name, position=self_name.start_pos)
|
||||
return any(
|
||||
n.api_type == 'param'
|
||||
and n.tree_name.get_definition().position_index == 0
|
||||
and n.parent_context.tree_node is self._parser_scope
|
||||
for n in names
|
||||
)
|
||||
|
||||
def _convert_names(self, names):
|
||||
return [SelfName(self._instance, self._node_context, name) for name in names]
|
||||
|
||||
def _check_flows(self, names):
|
||||
return names
|
||||
|
||||
|
||||
class InstanceArguments(TreeArgumentsWrapper):
|
||||
def __init__(self, instance, arguments):
|
||||
super().__init__(arguments)
|
||||
self.instance = instance
|
||||
|
||||
def unpack(self, func=None):
|
||||
yield None, LazyKnownValue(self.instance)
|
||||
yield from self._wrapped_arguments.unpack(func)
|
647
.venv/Lib/site-packages/jedi/inference/value/iterable.py
Normal file
647
.venv/Lib/site-packages/jedi/inference/value/iterable.py
Normal file
@ -0,0 +1,647 @@
|
||||
"""
|
||||
Contains all classes and functions to deal with lists, dicts, generators and
|
||||
iterators in general.
|
||||
"""
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference import analysis
|
||||
from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues, \
|
||||
LazyTreeValue
|
||||
from jedi.inference.helpers import get_int_or_none, is_string, \
|
||||
reraise_getitem_errors, SimpleGetItemNotFound
|
||||
from jedi.inference.utils import safe_property, to_list
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.filters import LazyAttributeOverwrite, publish_method
|
||||
from jedi.inference.base_value import ValueSet, Value, NO_VALUES, \
|
||||
ContextualizedNode, iterate_values, sentinel, \
|
||||
LazyValueWrapper
|
||||
from jedi.parser_utils import get_sync_comp_fors
|
||||
from jedi.inference.context import CompForContext
|
||||
from jedi.inference.value.dynamic_arrays import check_array_additions
|
||||
|
||||
|
||||
class IterableMixin:
|
||||
def py__next__(self, contextualized_node=None):
|
||||
return self.py__iter__(contextualized_node)
|
||||
|
||||
def py__stop_iteration_returns(self):
|
||||
return ValueSet([compiled.builtin_from_name(self.inference_state, 'None')])
|
||||
|
||||
# At the moment, safe values are simple values like "foo", 1 and not
|
||||
# lists/dicts. Therefore as a small speed optimization we can just do the
|
||||
# default instead of resolving the lazy wrapped values, that are just
|
||||
# doing this in the end as well.
|
||||
# This mostly speeds up patterns like `sys.version_info >= (3, 0)` in
|
||||
# typeshed.
|
||||
get_safe_value = Value.get_safe_value
|
||||
|
||||
|
||||
class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
|
||||
array_type = None
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
instance, = self._get_cls().execute_annotation()
|
||||
return instance
|
||||
|
||||
def _get_cls(self):
|
||||
generator, = self.inference_state.typing_module.py__getattribute__('Generator')
|
||||
return generator
|
||||
|
||||
def py__bool__(self):
|
||||
return True
|
||||
|
||||
@publish_method('__iter__')
|
||||
def _iter(self, arguments):
|
||||
return ValueSet([self])
|
||||
|
||||
@publish_method('send')
|
||||
@publish_method('__next__')
|
||||
def _next(self, arguments):
|
||||
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
|
||||
|
||||
def py__stop_iteration_returns(self):
|
||||
return ValueSet([compiled.builtin_from_name(self.inference_state, 'None')])
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return compiled.CompiledValueName(self, 'Generator')
|
||||
|
||||
def get_annotated_class_object(self):
|
||||
from jedi.inference.gradual.generics import TupleGenericManager
|
||||
gen_values = self.merge_types_of_iterate().py__class__()
|
||||
gm = TupleGenericManager((gen_values, NO_VALUES, NO_VALUES))
|
||||
return self._get_cls().with_generics(gm)
|
||||
|
||||
|
||||
class Generator(GeneratorBase):
|
||||
"""Handling of `yield` functions."""
|
||||
def __init__(self, inference_state, func_execution_context):
|
||||
super().__init__(inference_state)
|
||||
self._func_execution_context = func_execution_context
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
iterators = self._func_execution_context.infer_annotations()
|
||||
if iterators:
|
||||
return iterators.iterate(contextualized_node)
|
||||
return self._func_execution_context.get_yield_lazy_values()
|
||||
|
||||
def py__stop_iteration_returns(self):
|
||||
return self._func_execution_context.get_return_values()
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self._func_execution_context)
|
||||
|
||||
|
||||
def comprehension_from_atom(inference_state, value, atom):
|
||||
bracket = atom.children[0]
|
||||
test_list_comp = atom.children[1]
|
||||
|
||||
if bracket == '{':
|
||||
if atom.children[1].children[1] == ':':
|
||||
sync_comp_for = test_list_comp.children[3]
|
||||
if sync_comp_for.type == 'comp_for':
|
||||
sync_comp_for = sync_comp_for.children[1]
|
||||
|
||||
return DictComprehension(
|
||||
inference_state,
|
||||
value,
|
||||
sync_comp_for_node=sync_comp_for,
|
||||
key_node=test_list_comp.children[0],
|
||||
value_node=test_list_comp.children[2],
|
||||
)
|
||||
else:
|
||||
cls = SetComprehension
|
||||
elif bracket == '(':
|
||||
cls = GeneratorComprehension
|
||||
elif bracket == '[':
|
||||
cls = ListComprehension
|
||||
|
||||
sync_comp_for = test_list_comp.children[1]
|
||||
if sync_comp_for.type == 'comp_for':
|
||||
sync_comp_for = sync_comp_for.children[1]
|
||||
|
||||
return cls(
|
||||
inference_state,
|
||||
defining_context=value,
|
||||
sync_comp_for_node=sync_comp_for,
|
||||
entry_node=test_list_comp.children[0],
|
||||
)
|
||||
|
||||
|
||||
class ComprehensionMixin:
|
||||
@inference_state_method_cache()
|
||||
def _get_comp_for_context(self, parent_context, comp_for):
|
||||
return CompForContext(parent_context, comp_for)
|
||||
|
||||
def _nested(self, comp_fors, parent_context=None):
|
||||
comp_for = comp_fors[0]
|
||||
|
||||
is_async = comp_for.parent.type == 'comp_for'
|
||||
|
||||
input_node = comp_for.children[3]
|
||||
parent_context = parent_context or self._defining_context
|
||||
input_types = parent_context.infer_node(input_node)
|
||||
|
||||
cn = ContextualizedNode(parent_context, input_node)
|
||||
iterated = input_types.iterate(cn, is_async=is_async)
|
||||
exprlist = comp_for.children[1]
|
||||
for i, lazy_value in enumerate(iterated):
|
||||
types = lazy_value.infer()
|
||||
dct = unpack_tuple_to_dict(parent_context, types, exprlist)
|
||||
context = self._get_comp_for_context(
|
||||
parent_context,
|
||||
comp_for,
|
||||
)
|
||||
with context.predefine_names(comp_for, dct):
|
||||
try:
|
||||
yield from self._nested(comp_fors[1:], context)
|
||||
except IndexError:
|
||||
iterated = context.infer_node(self._entry_node)
|
||||
if self.array_type == 'dict':
|
||||
yield iterated, context.infer_node(self._value_node)
|
||||
else:
|
||||
yield iterated
|
||||
|
||||
@inference_state_method_cache(default=[])
|
||||
@to_list
|
||||
def _iterate(self):
|
||||
comp_fors = tuple(get_sync_comp_fors(self._sync_comp_for_node))
|
||||
yield from self._nested(comp_fors)
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for set_ in self._iterate():
|
||||
yield LazyKnownValues(set_)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self._sync_comp_for_node)
|
||||
|
||||
|
||||
class _DictMixin:
|
||||
def _get_generics(self):
|
||||
return tuple(c_set.py__class__() for c_set in self.get_mapping_item_values())
|
||||
|
||||
|
||||
class Sequence(LazyAttributeOverwrite, IterableMixin):
|
||||
api_type = 'instance'
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return compiled.CompiledValueName(self, self.array_type)
|
||||
|
||||
def _get_generics(self):
|
||||
return (self.merge_types_of_iterate().py__class__(),)
|
||||
|
||||
@inference_state_method_cache(default=())
|
||||
def _cached_generics(self):
|
||||
return self._get_generics()
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
from jedi.inference.gradual.base import GenericClass
|
||||
from jedi.inference.gradual.generics import TupleGenericManager
|
||||
klass = compiled.builtin_from_name(self.inference_state, self.array_type)
|
||||
c, = GenericClass(
|
||||
klass,
|
||||
TupleGenericManager(self._cached_generics())
|
||||
).execute_annotation()
|
||||
return c
|
||||
|
||||
def py__bool__(self):
|
||||
return None # We don't know the length, because of appends.
|
||||
|
||||
@safe_property
|
||||
def parent(self):
|
||||
return self.inference_state.builtins_module
|
||||
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
if self.array_type == 'dict':
|
||||
return self._dict_values()
|
||||
return iterate_values(ValueSet([self]))
|
||||
|
||||
|
||||
class _BaseComprehension(ComprehensionMixin):
|
||||
def __init__(self, inference_state, defining_context, sync_comp_for_node, entry_node):
|
||||
assert sync_comp_for_node.type == 'sync_comp_for'
|
||||
super().__init__(inference_state)
|
||||
self._defining_context = defining_context
|
||||
self._sync_comp_for_node = sync_comp_for_node
|
||||
self._entry_node = entry_node
|
||||
|
||||
|
||||
class ListComprehension(_BaseComprehension, Sequence):
|
||||
array_type = 'list'
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
if isinstance(index, slice):
|
||||
return ValueSet([self])
|
||||
|
||||
all_types = list(self.py__iter__())
|
||||
with reraise_getitem_errors(IndexError, TypeError):
|
||||
lazy_value = all_types[index]
|
||||
return lazy_value.infer()
|
||||
|
||||
|
||||
class SetComprehension(_BaseComprehension, Sequence):
|
||||
array_type = 'set'
|
||||
|
||||
|
||||
class GeneratorComprehension(_BaseComprehension, GeneratorBase):
|
||||
pass
|
||||
|
||||
|
||||
class _DictKeyMixin:
|
||||
# TODO merge with _DictMixin?
|
||||
def get_mapping_item_values(self):
|
||||
return self._dict_keys(), self._dict_values()
|
||||
|
||||
def get_key_values(self):
|
||||
# TODO merge with _dict_keys?
|
||||
return self._dict_keys()
|
||||
|
||||
|
||||
class DictComprehension(ComprehensionMixin, Sequence, _DictKeyMixin):
|
||||
array_type = 'dict'
|
||||
|
||||
def __init__(self, inference_state, defining_context, sync_comp_for_node, key_node, value_node):
|
||||
assert sync_comp_for_node.type == 'sync_comp_for'
|
||||
super().__init__(inference_state)
|
||||
self._defining_context = defining_context
|
||||
self._sync_comp_for_node = sync_comp_for_node
|
||||
self._entry_node = key_node
|
||||
self._value_node = value_node
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for keys, values in self._iterate():
|
||||
yield LazyKnownValues(keys)
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
for keys, values in self._iterate():
|
||||
for k in keys:
|
||||
# Be careful in the future if refactoring, index could be a
|
||||
# slice object.
|
||||
if k.get_safe_value(default=object()) == index:
|
||||
return values
|
||||
raise SimpleGetItemNotFound()
|
||||
|
||||
def _dict_keys(self):
|
||||
return ValueSet.from_sets(keys for keys, values in self._iterate())
|
||||
|
||||
def _dict_values(self):
|
||||
return ValueSet.from_sets(values for keys, values in self._iterate())
|
||||
|
||||
@publish_method('values')
|
||||
def _imitate_values(self, arguments):
|
||||
lazy_value = LazyKnownValues(self._dict_values())
|
||||
return ValueSet([FakeList(self.inference_state, [lazy_value])])
|
||||
|
||||
@publish_method('items')
|
||||
def _imitate_items(self, arguments):
|
||||
lazy_values = [
|
||||
LazyKnownValue(
|
||||
FakeTuple(
|
||||
self.inference_state,
|
||||
[LazyKnownValues(key),
|
||||
LazyKnownValues(value)]
|
||||
)
|
||||
)
|
||||
for key, value in self._iterate()
|
||||
]
|
||||
|
||||
return ValueSet([FakeList(self.inference_state, lazy_values)])
|
||||
|
||||
def exact_key_items(self):
|
||||
# NOTE: A smarter thing can probably done here to achieve better
|
||||
# completions, but at least like this jedi doesn't crash
|
||||
return []
|
||||
|
||||
|
||||
class SequenceLiteralValue(Sequence):
|
||||
_TUPLE_LIKE = 'testlist_star_expr', 'testlist', 'subscriptlist'
|
||||
mapping = {'(': 'tuple',
|
||||
'[': 'list',
|
||||
'{': 'set'}
|
||||
|
||||
def __init__(self, inference_state, defining_context, atom):
|
||||
super().__init__(inference_state)
|
||||
self.atom = atom
|
||||
self._defining_context = defining_context
|
||||
|
||||
if self.atom.type in self._TUPLE_LIKE:
|
||||
self.array_type = 'tuple'
|
||||
else:
|
||||
self.array_type = SequenceLiteralValue.mapping[atom.children[0]]
|
||||
"""The builtin name of the array (list, set, tuple or dict)."""
|
||||
|
||||
def _get_generics(self):
|
||||
if self.array_type == 'tuple':
|
||||
return tuple(x.infer().py__class__() for x in self.py__iter__())
|
||||
return super()._get_generics()
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
"""Here the index is an int/str. Raises IndexError/KeyError."""
|
||||
if isinstance(index, slice):
|
||||
return ValueSet([self])
|
||||
else:
|
||||
with reraise_getitem_errors(TypeError, KeyError, IndexError):
|
||||
node = self.get_tree_entries()[index]
|
||||
if node == ':' or node.type == 'subscript':
|
||||
return NO_VALUES
|
||||
return self._defining_context.infer_node(node)
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
"""
|
||||
While values returns the possible values for any array field, this
|
||||
function returns the value for a certain index.
|
||||
"""
|
||||
for node in self.get_tree_entries():
|
||||
if node == ':' or node.type == 'subscript':
|
||||
# TODO this should probably use at least part of the code
|
||||
# of infer_subscript_list.
|
||||
yield LazyKnownValue(Slice(self._defining_context, None, None, None))
|
||||
else:
|
||||
yield LazyTreeValue(self._defining_context, node)
|
||||
yield from check_array_additions(self._defining_context, self)
|
||||
|
||||
def py__len__(self):
|
||||
# This function is not really used often. It's more of a try.
|
||||
return len(self.get_tree_entries())
|
||||
|
||||
def get_tree_entries(self):
|
||||
c = self.atom.children
|
||||
|
||||
if self.atom.type in self._TUPLE_LIKE:
|
||||
return c[::2]
|
||||
|
||||
array_node = c[1]
|
||||
if array_node in (']', '}', ')'):
|
||||
return [] # Direct closing bracket, doesn't contain items.
|
||||
|
||||
if array_node.type == 'testlist_comp':
|
||||
# filter out (for now) pep 448 single-star unpacking
|
||||
return [value for value in array_node.children[::2]
|
||||
if value.type != "star_expr"]
|
||||
elif array_node.type == 'dictorsetmaker':
|
||||
kv = []
|
||||
iterator = iter(array_node.children)
|
||||
for key in iterator:
|
||||
if key == "**":
|
||||
# dict with pep 448 double-star unpacking
|
||||
# for now ignoring the values imported by **
|
||||
next(iterator)
|
||||
next(iterator, None) # Possible comma.
|
||||
else:
|
||||
op = next(iterator, None)
|
||||
if op is None or op == ',':
|
||||
if key.type == "star_expr":
|
||||
# pep 448 single-star unpacking
|
||||
# for now ignoring values imported by *
|
||||
pass
|
||||
else:
|
||||
kv.append(key) # A set.
|
||||
else:
|
||||
assert op == ':' # A dict.
|
||||
kv.append((key, next(iterator)))
|
||||
next(iterator, None) # Possible comma.
|
||||
return kv
|
||||
else:
|
||||
if array_node.type == "star_expr":
|
||||
# pep 448 single-star unpacking
|
||||
# for now ignoring values imported by *
|
||||
return []
|
||||
else:
|
||||
return [array_node]
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (self.__class__.__name__, self.atom)
|
||||
|
||||
|
||||
class DictLiteralValue(_DictMixin, SequenceLiteralValue, _DictKeyMixin):
|
||||
array_type = 'dict'
|
||||
|
||||
def __init__(self, inference_state, defining_context, atom):
|
||||
# Intentionally don't call the super class. This is definitely a sign
|
||||
# that the architecture is bad and we should refactor.
|
||||
Sequence.__init__(self, inference_state)
|
||||
self._defining_context = defining_context
|
||||
self.atom = atom
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
"""Here the index is an int/str. Raises IndexError/KeyError."""
|
||||
compiled_value_index = compiled.create_simple_object(self.inference_state, index)
|
||||
for key, value in self.get_tree_entries():
|
||||
for k in self._defining_context.infer_node(key):
|
||||
for key_v in k.execute_operation(compiled_value_index, '=='):
|
||||
if key_v.get_safe_value():
|
||||
return self._defining_context.infer_node(value)
|
||||
raise SimpleGetItemNotFound('No key found in dictionary %s.' % self)
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
"""
|
||||
While values returns the possible values for any array field, this
|
||||
function returns the value for a certain index.
|
||||
"""
|
||||
# Get keys.
|
||||
types = NO_VALUES
|
||||
for k, _ in self.get_tree_entries():
|
||||
types |= self._defining_context.infer_node(k)
|
||||
# We don't know which dict index comes first, therefore always
|
||||
# yield all the types.
|
||||
for _ in types:
|
||||
yield LazyKnownValues(types)
|
||||
|
||||
@publish_method('values')
|
||||
def _imitate_values(self, arguments):
|
||||
lazy_value = LazyKnownValues(self._dict_values())
|
||||
return ValueSet([FakeList(self.inference_state, [lazy_value])])
|
||||
|
||||
@publish_method('items')
|
||||
def _imitate_items(self, arguments):
|
||||
lazy_values = [
|
||||
LazyKnownValue(FakeTuple(
|
||||
self.inference_state,
|
||||
(LazyTreeValue(self._defining_context, key_node),
|
||||
LazyTreeValue(self._defining_context, value_node))
|
||||
)) for key_node, value_node in self.get_tree_entries()
|
||||
]
|
||||
|
||||
return ValueSet([FakeList(self.inference_state, lazy_values)])
|
||||
|
||||
def exact_key_items(self):
|
||||
"""
|
||||
Returns a generator of tuples like dict.items(), where the key is
|
||||
resolved (as a string) and the values are still lazy values.
|
||||
"""
|
||||
for key_node, value in self.get_tree_entries():
|
||||
for key in self._defining_context.infer_node(key_node):
|
||||
if is_string(key):
|
||||
yield key.get_safe_value(), LazyTreeValue(self._defining_context, value)
|
||||
|
||||
def _dict_values(self):
|
||||
return ValueSet.from_sets(
|
||||
self._defining_context.infer_node(v)
|
||||
for k, v in self.get_tree_entries()
|
||||
)
|
||||
|
||||
def _dict_keys(self):
|
||||
return ValueSet.from_sets(
|
||||
self._defining_context.infer_node(k)
|
||||
for k, v in self.get_tree_entries()
|
||||
)
|
||||
|
||||
|
||||
class _FakeSequence(Sequence):
|
||||
def __init__(self, inference_state, lazy_value_list):
|
||||
"""
|
||||
type should be one of "tuple", "list"
|
||||
"""
|
||||
super().__init__(inference_state)
|
||||
self._lazy_value_list = lazy_value_list
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
if isinstance(index, slice):
|
||||
return ValueSet([self])
|
||||
|
||||
with reraise_getitem_errors(IndexError, TypeError):
|
||||
lazy_value = self._lazy_value_list[index]
|
||||
return lazy_value.infer()
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
return self._lazy_value_list
|
||||
|
||||
def py__bool__(self):
|
||||
return bool(len(self._lazy_value_list))
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self._lazy_value_list)
|
||||
|
||||
|
||||
class FakeTuple(_FakeSequence):
|
||||
array_type = 'tuple'
|
||||
|
||||
|
||||
class FakeList(_FakeSequence):
|
||||
array_type = 'tuple'
|
||||
|
||||
|
||||
class FakeDict(_DictMixin, Sequence, _DictKeyMixin):
|
||||
array_type = 'dict'
|
||||
|
||||
def __init__(self, inference_state, dct):
|
||||
super().__init__(inference_state)
|
||||
self._dct = dct
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for key in self._dct:
|
||||
yield LazyKnownValue(compiled.create_simple_object(self.inference_state, key))
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
with reraise_getitem_errors(KeyError, TypeError):
|
||||
lazy_value = self._dct[index]
|
||||
return lazy_value.infer()
|
||||
|
||||
@publish_method('values')
|
||||
def _values(self, arguments):
|
||||
return ValueSet([FakeTuple(
|
||||
self.inference_state,
|
||||
[LazyKnownValues(self._dict_values())]
|
||||
)])
|
||||
|
||||
def _dict_values(self):
|
||||
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self._dct.values())
|
||||
|
||||
def _dict_keys(self):
|
||||
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
|
||||
|
||||
def exact_key_items(self):
|
||||
return self._dct.items()
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self._dct)
|
||||
|
||||
|
||||
class MergedArray(Sequence):
|
||||
def __init__(self, inference_state, arrays):
|
||||
super().__init__(inference_state)
|
||||
self.array_type = arrays[-1].array_type
|
||||
self._arrays = arrays
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for array in self._arrays:
|
||||
yield from array.py__iter__()
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
|
||||
|
||||
|
||||
def unpack_tuple_to_dict(context, types, exprlist):
|
||||
"""
|
||||
Unpacking tuple assignments in for statements and expr_stmts.
|
||||
"""
|
||||
if exprlist.type == 'name':
|
||||
return {exprlist.value: types}
|
||||
elif exprlist.type == 'atom' and exprlist.children[0] in ('(', '['):
|
||||
return unpack_tuple_to_dict(context, types, exprlist.children[1])
|
||||
elif exprlist.type in ('testlist', 'testlist_comp', 'exprlist',
|
||||
'testlist_star_expr'):
|
||||
dct = {}
|
||||
parts = iter(exprlist.children[::2])
|
||||
n = 0
|
||||
for lazy_value in types.iterate(ContextualizedNode(context, exprlist)):
|
||||
n += 1
|
||||
try:
|
||||
part = next(parts)
|
||||
except StopIteration:
|
||||
analysis.add(context, 'value-error-too-many-values', part,
|
||||
message="ValueError: too many values to unpack (expected %s)" % n)
|
||||
else:
|
||||
dct.update(unpack_tuple_to_dict(context, lazy_value.infer(), part))
|
||||
has_parts = next(parts, None)
|
||||
if types and has_parts is not None:
|
||||
analysis.add(context, 'value-error-too-few-values', has_parts,
|
||||
message="ValueError: need more than %s values to unpack" % n)
|
||||
return dct
|
||||
elif exprlist.type == 'power' or exprlist.type == 'atom_expr':
|
||||
# Something like ``arr[x], var = ...``.
|
||||
# This is something that is not yet supported, would also be difficult
|
||||
# to write into a dict.
|
||||
return {}
|
||||
elif exprlist.type == 'star_expr': # `a, *b, c = x` type unpackings
|
||||
# Currently we're not supporting them.
|
||||
return {}
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class Slice(LazyValueWrapper):
|
||||
def __init__(self, python_context, start, stop, step):
|
||||
self.inference_state = python_context.inference_state
|
||||
self._context = python_context
|
||||
# All of them are either a Precedence or None.
|
||||
self._start = start
|
||||
self._stop = stop
|
||||
self._step = step
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
value = compiled.builtin_from_name(self._context.inference_state, 'slice')
|
||||
slice_value, = value.execute_with_values()
|
||||
return slice_value
|
||||
|
||||
def get_safe_value(self, default=sentinel):
|
||||
"""
|
||||
Imitate CompiledValue.obj behavior and return a ``builtin.slice()``
|
||||
object.
|
||||
"""
|
||||
def get(element):
|
||||
if element is None:
|
||||
return None
|
||||
|
||||
result = self._context.infer_node(element)
|
||||
if len(result) != 1:
|
||||
# For simplicity, we want slices to be clear defined with just
|
||||
# one type. Otherwise we will return an empty slice object.
|
||||
raise IndexError
|
||||
|
||||
value, = result
|
||||
return get_int_or_none(value)
|
||||
|
||||
try:
|
||||
return slice(get(self._start), get(self._stop), get(self._step))
|
||||
except IndexError:
|
||||
return slice(None, None, None)
|
403
.venv/Lib/site-packages/jedi/inference/value/klass.py
Normal file
403
.venv/Lib/site-packages/jedi/inference/value/klass.py
Normal file
@ -0,0 +1,403 @@
|
||||
"""
|
||||
Like described in the :mod:`parso.python.tree` module,
|
||||
there's a need for an ast like module to represent the states of parsed
|
||||
modules.
|
||||
|
||||
But now there are also structures in Python that need a little bit more than
|
||||
that. An ``Instance`` for example is only a ``Class`` before it is
|
||||
instantiated. This class represents these cases.
|
||||
|
||||
So, why is there also a ``Class`` class here? Well, there are decorators and
|
||||
they change classes in Python 3.
|
||||
|
||||
Representation modules also define "magic methods". Those methods look like
|
||||
``py__foo__`` and are typically mappable to the Python equivalents ``__call__``
|
||||
and others. Here's a list:
|
||||
|
||||
====================================== ========================================
|
||||
**Method** **Description**
|
||||
-------------------------------------- ----------------------------------------
|
||||
py__call__(arguments: Array) On callable objects, returns types.
|
||||
py__bool__() Returns True/False/None; None means that
|
||||
there's no certainty.
|
||||
py__bases__() Returns a list of base classes.
|
||||
py__iter__() Returns a generator of a set of types.
|
||||
py__class__() Returns the class of an instance.
|
||||
py__simple_getitem__(index: int/str) Returns a a set of types of the index.
|
||||
Can raise an IndexError/KeyError.
|
||||
py__getitem__(indexes: ValueSet) Returns a a set of types of the index.
|
||||
py__file__() Only on modules. Returns None if does
|
||||
not exist.
|
||||
py__package__() -> List[str] Only on modules. For the import system.
|
||||
py__path__() Only on modules. For the import system.
|
||||
py__get__(call_object) Only on instances. Simulates
|
||||
descriptors.
|
||||
py__doc__() Returns the docstring for a value.
|
||||
====================================== ========================================
|
||||
|
||||
"""
|
||||
from jedi import debug
|
||||
from jedi.parser_utils import get_cached_parent_scope, expr_is_dotted, \
|
||||
function_is_property
|
||||
from jedi.inference.cache import inference_state_method_cache, CachedMetaClass, \
|
||||
inference_state_method_generator_cache
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference.lazy_value import LazyKnownValues, LazyTreeValue
|
||||
from jedi.inference.filters import ParserTreeFilter
|
||||
from jedi.inference.names import TreeNameDefinition, ValueName
|
||||
from jedi.inference.arguments import unpack_arglist, ValuesArguments
|
||||
from jedi.inference.base_value import ValueSet, iterator_to_value_set, \
|
||||
NO_VALUES
|
||||
from jedi.inference.context import ClassContext
|
||||
from jedi.inference.value.function import FunctionAndClassBase
|
||||
from jedi.inference.gradual.generics import LazyGenericManager, TupleGenericManager
|
||||
from jedi.plugins import plugin_manager
|
||||
|
||||
|
||||
class ClassName(TreeNameDefinition):
|
||||
def __init__(self, class_value, tree_name, name_context, apply_decorators):
|
||||
super().__init__(name_context, tree_name)
|
||||
self._apply_decorators = apply_decorators
|
||||
self._class_value = class_value
|
||||
|
||||
@iterator_to_value_set
|
||||
def infer(self):
|
||||
# We're using a different value to infer, so we cannot call super().
|
||||
from jedi.inference.syntax_tree import tree_name_to_values
|
||||
inferred = tree_name_to_values(
|
||||
self.parent_context.inference_state, self.parent_context, self.tree_name)
|
||||
|
||||
for result_value in inferred:
|
||||
if self._apply_decorators:
|
||||
yield from result_value.py__get__(instance=None, class_value=self._class_value)
|
||||
else:
|
||||
yield result_value
|
||||
|
||||
@property
|
||||
def api_type(self):
|
||||
type_ = super().api_type
|
||||
if type_ == 'function':
|
||||
definition = self.tree_name.get_definition()
|
||||
if function_is_property(definition):
|
||||
# This essentially checks if there is an @property before
|
||||
# the function. @property could be something different, but
|
||||
# any programmer that redefines property as something that
|
||||
# is not really a property anymore, should be shot. (i.e.
|
||||
# this is a heuristic).
|
||||
return 'property'
|
||||
return type_
|
||||
|
||||
|
||||
class ClassFilter(ParserTreeFilter):
|
||||
def __init__(self, class_value, node_context=None, until_position=None,
|
||||
origin_scope=None, is_instance=False):
|
||||
super().__init__(
|
||||
class_value.as_context(), node_context,
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope,
|
||||
)
|
||||
self._class_value = class_value
|
||||
self._is_instance = is_instance
|
||||
|
||||
def _convert_names(self, names):
|
||||
return [
|
||||
ClassName(
|
||||
class_value=self._class_value,
|
||||
tree_name=name,
|
||||
name_context=self._node_context,
|
||||
apply_decorators=not self._is_instance,
|
||||
) for name in names
|
||||
]
|
||||
|
||||
def _equals_origin_scope(self):
|
||||
node = self._origin_scope
|
||||
while node is not None:
|
||||
if node == self._parser_scope or node == self.parent_context:
|
||||
return True
|
||||
node = get_cached_parent_scope(self._parso_cache_node, node)
|
||||
return False
|
||||
|
||||
def _access_possible(self, name):
|
||||
# Filter for ClassVar variables
|
||||
# TODO this is not properly done, yet. It just checks for the string
|
||||
# ClassVar in the annotation, which can be quite imprecise. If we
|
||||
# wanted to do this correct, we would have to infer the ClassVar.
|
||||
if not self._is_instance:
|
||||
expr_stmt = name.get_definition()
|
||||
if expr_stmt is not None and expr_stmt.type == 'expr_stmt':
|
||||
annassign = expr_stmt.children[1]
|
||||
if annassign.type == 'annassign':
|
||||
# If there is an =, the variable is obviously also
|
||||
# defined on the class.
|
||||
if 'ClassVar' not in annassign.children[1].get_code() \
|
||||
and '=' not in annassign.children:
|
||||
return False
|
||||
|
||||
# Filter for name mangling of private variables like __foo
|
||||
return not name.value.startswith('__') or name.value.endswith('__') \
|
||||
or self._equals_origin_scope()
|
||||
|
||||
def _filter(self, names):
|
||||
names = super()._filter(names)
|
||||
return [name for name in names if self._access_possible(name)]
|
||||
|
||||
|
||||
class ClassMixin:
|
||||
def is_class(self):
|
||||
return True
|
||||
|
||||
def is_class_mixin(self):
|
||||
return True
|
||||
|
||||
def py__call__(self, arguments):
|
||||
from jedi.inference.value import TreeInstance
|
||||
|
||||
from jedi.inference.gradual.typing import TypedDict
|
||||
if self.is_typeddict():
|
||||
return ValueSet([TypedDict(self)])
|
||||
return ValueSet([TreeInstance(self.inference_state, self.parent_context, self, arguments)])
|
||||
|
||||
def py__class__(self):
|
||||
return compiled.builtin_from_name(self.inference_state, 'type')
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return ValueName(self, self.tree_node.name)
|
||||
|
||||
def py__name__(self):
|
||||
return self.name.string_name
|
||||
|
||||
@inference_state_method_generator_cache()
|
||||
def py__mro__(self):
|
||||
mro = [self]
|
||||
yield self
|
||||
# TODO Do a proper mro resolution. Currently we are just listing
|
||||
# classes. However, it's a complicated algorithm.
|
||||
for lazy_cls in self.py__bases__():
|
||||
# TODO there's multiple different mro paths possible if this yields
|
||||
# multiple possibilities. Could be changed to be more correct.
|
||||
for cls in lazy_cls.infer():
|
||||
# TODO detect for TypeError: duplicate base class str,
|
||||
# e.g. `class X(str, str): pass`
|
||||
try:
|
||||
mro_method = cls.py__mro__
|
||||
except AttributeError:
|
||||
# TODO add a TypeError like:
|
||||
"""
|
||||
>>> class Y(lambda: test): pass
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
TypeError: function() argument 1 must be code, not str
|
||||
>>> class Y(1): pass
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
TypeError: int() takes at most 2 arguments (3 given)
|
||||
"""
|
||||
debug.warning('Super class of %s is not a class: %s', self, cls)
|
||||
else:
|
||||
for cls_new in mro_method():
|
||||
if cls_new not in mro:
|
||||
mro.append(cls_new)
|
||||
yield cls_new
|
||||
|
||||
def get_filters(self, origin_scope=None, is_instance=False,
|
||||
include_metaclasses=True, include_type_when_class=True):
|
||||
if include_metaclasses:
|
||||
metaclasses = self.get_metaclasses()
|
||||
if metaclasses:
|
||||
yield from self.get_metaclass_filters(metaclasses, is_instance)
|
||||
|
||||
for cls in self.py__mro__():
|
||||
if cls.is_compiled():
|
||||
yield from cls.get_filters(is_instance=is_instance)
|
||||
else:
|
||||
yield ClassFilter(
|
||||
self, node_context=cls.as_context(),
|
||||
origin_scope=origin_scope,
|
||||
is_instance=is_instance
|
||||
)
|
||||
if not is_instance and include_type_when_class:
|
||||
from jedi.inference.compiled import builtin_from_name
|
||||
type_ = builtin_from_name(self.inference_state, 'type')
|
||||
assert isinstance(type_, ClassValue)
|
||||
if type_ != self:
|
||||
# We are not using execute_with_values here, because the
|
||||
# plugin function for type would get executed instead of an
|
||||
# instance creation.
|
||||
args = ValuesArguments([])
|
||||
for instance in type_.py__call__(args):
|
||||
instance_filters = instance.get_filters()
|
||||
# Filter out self filters
|
||||
next(instance_filters, None)
|
||||
next(instance_filters, None)
|
||||
x = next(instance_filters, None)
|
||||
assert x is not None
|
||||
yield x
|
||||
|
||||
def get_signatures(self):
|
||||
# Since calling staticmethod without a function is illegal, the Jedi
|
||||
# plugin doesn't return anything. Therefore call directly and get what
|
||||
# we want: An instance of staticmethod.
|
||||
metaclasses = self.get_metaclasses()
|
||||
if metaclasses:
|
||||
sigs = self.get_metaclass_signatures(metaclasses)
|
||||
if sigs:
|
||||
return sigs
|
||||
args = ValuesArguments([])
|
||||
init_funcs = self.py__call__(args).py__getattribute__('__init__')
|
||||
return [sig.bind(self) for sig in init_funcs.get_signatures()]
|
||||
|
||||
def _as_context(self):
|
||||
return ClassContext(self)
|
||||
|
||||
def get_type_hint(self, add_class_info=True):
|
||||
if add_class_info:
|
||||
return 'Type[%s]' % self.py__name__()
|
||||
return self.py__name__()
|
||||
|
||||
@inference_state_method_cache(default=False)
|
||||
def is_typeddict(self):
|
||||
# TODO Do a proper mro resolution. Currently we are just listing
|
||||
# classes. However, it's a complicated algorithm.
|
||||
from jedi.inference.gradual.typing import TypedDictClass
|
||||
for lazy_cls in self.py__bases__():
|
||||
if not isinstance(lazy_cls, LazyTreeValue):
|
||||
return False
|
||||
tree_node = lazy_cls.data
|
||||
# Only resolve simple classes, stuff like Iterable[str] are more
|
||||
# intensive to resolve and if generics are involved, we know it's
|
||||
# not a TypedDict.
|
||||
if not expr_is_dotted(tree_node):
|
||||
return False
|
||||
|
||||
for cls in lazy_cls.infer():
|
||||
if isinstance(cls, TypedDictClass):
|
||||
return True
|
||||
try:
|
||||
method = cls.is_typeddict
|
||||
except AttributeError:
|
||||
# We're only dealing with simple classes, so just returning
|
||||
# here should be fine. This only happens with e.g. compiled
|
||||
# classes.
|
||||
return False
|
||||
else:
|
||||
if method():
|
||||
return True
|
||||
return False
|
||||
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
from jedi.inference.gradual.base import GenericClass
|
||||
if not index_value_set:
|
||||
debug.warning('Class indexes inferred to nothing. Returning class instead')
|
||||
return ValueSet([self])
|
||||
return ValueSet(
|
||||
GenericClass(
|
||||
self,
|
||||
LazyGenericManager(
|
||||
context_of_index=contextualized_node.context,
|
||||
index_value=index_value,
|
||||
)
|
||||
)
|
||||
for index_value in index_value_set
|
||||
)
|
||||
|
||||
def with_generics(self, generics_tuple):
|
||||
from jedi.inference.gradual.base import GenericClass
|
||||
return GenericClass(
|
||||
self,
|
||||
TupleGenericManager(generics_tuple)
|
||||
)
|
||||
|
||||
def define_generics(self, type_var_dict):
|
||||
from jedi.inference.gradual.base import GenericClass
|
||||
|
||||
def remap_type_vars():
|
||||
"""
|
||||
The TypeVars in the resulting classes have sometimes different names
|
||||
and we need to check for that, e.g. a signature can be:
|
||||
|
||||
def iter(iterable: Iterable[_T]) -> Iterator[_T]: ...
|
||||
|
||||
However, the iterator is defined as Iterator[_T_co], which means it has
|
||||
a different type var name.
|
||||
"""
|
||||
for type_var in self.list_type_vars():
|
||||
yield type_var_dict.get(type_var.py__name__(), NO_VALUES)
|
||||
|
||||
if type_var_dict:
|
||||
return ValueSet([GenericClass(
|
||||
self,
|
||||
TupleGenericManager(tuple(remap_type_vars()))
|
||||
)])
|
||||
return ValueSet({self})
|
||||
|
||||
|
||||
class ClassValue(ClassMixin, FunctionAndClassBase, metaclass=CachedMetaClass):
|
||||
api_type = 'class'
|
||||
|
||||
@inference_state_method_cache()
|
||||
def list_type_vars(self):
|
||||
found = []
|
||||
arglist = self.tree_node.get_super_arglist()
|
||||
if arglist is None:
|
||||
return []
|
||||
|
||||
for stars, node in unpack_arglist(arglist):
|
||||
if stars:
|
||||
continue # These are not relevant for this search.
|
||||
|
||||
from jedi.inference.gradual.annotation import find_unknown_type_vars
|
||||
for type_var in find_unknown_type_vars(self.parent_context, node):
|
||||
if type_var not in found:
|
||||
# The order matters and it's therefore a list.
|
||||
found.append(type_var)
|
||||
return found
|
||||
|
||||
def _get_bases_arguments(self):
|
||||
arglist = self.tree_node.get_super_arglist()
|
||||
if arglist:
|
||||
from jedi.inference import arguments
|
||||
return arguments.TreeArguments(self.inference_state, self.parent_context, arglist)
|
||||
return None
|
||||
|
||||
@inference_state_method_cache(default=())
|
||||
def py__bases__(self):
|
||||
args = self._get_bases_arguments()
|
||||
if args is not None:
|
||||
lst = [value for key, value in args.unpack() if key is None]
|
||||
if lst:
|
||||
return lst
|
||||
|
||||
if self.py__name__() == 'object' \
|
||||
and self.parent_context.is_builtins_module():
|
||||
return []
|
||||
return [LazyKnownValues(
|
||||
self.inference_state.builtins_module.py__getattribute__('object')
|
||||
)]
|
||||
|
||||
@plugin_manager.decorate()
|
||||
def get_metaclass_filters(self, metaclasses, is_instance):
|
||||
debug.warning('Unprocessed metaclass %s', metaclasses)
|
||||
return []
|
||||
|
||||
@inference_state_method_cache(default=NO_VALUES)
|
||||
def get_metaclasses(self):
|
||||
args = self._get_bases_arguments()
|
||||
if args is not None:
|
||||
m = [value for key, value in args.unpack() if key == 'metaclass']
|
||||
metaclasses = ValueSet.from_sets(lazy_value.infer() for lazy_value in m)
|
||||
metaclasses = ValueSet(m for m in metaclasses if m.is_class())
|
||||
if metaclasses:
|
||||
return metaclasses
|
||||
|
||||
for lazy_base in self.py__bases__():
|
||||
for value in lazy_base.infer():
|
||||
if value.is_class():
|
||||
values = value.get_metaclasses()
|
||||
if values:
|
||||
return values
|
||||
return NO_VALUES
|
||||
|
||||
@plugin_manager.decorate()
|
||||
def get_metaclass_signatures(self, metaclasses):
|
||||
return []
|
230
.venv/Lib/site-packages/jedi/inference/value/module.py
Normal file
230
.venv/Lib/site-packages/jedi/inference/value/module.py
Normal file
@ -0,0 +1,230 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.names import AbstractNameDefinition, ModuleName
|
||||
from jedi.inference.filters import GlobalNameFilter, ParserTreeFilter, DictFilter, MergedFilter
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference.base_value import TreeValue
|
||||
from jedi.inference.names import SubModuleName
|
||||
from jedi.inference.helpers import values_from_qualified_names
|
||||
from jedi.inference.compiled import create_simple_object
|
||||
from jedi.inference.base_value import ValueSet
|
||||
from jedi.inference.context import ModuleContext
|
||||
|
||||
|
||||
class _ModuleAttributeName(AbstractNameDefinition):
|
||||
"""
|
||||
For module attributes like __file__, __str__ and so on.
|
||||
"""
|
||||
api_type = 'instance'
|
||||
|
||||
def __init__(self, parent_module, string_name, string_value=None):
|
||||
self.parent_context = parent_module
|
||||
self.string_name = string_name
|
||||
self._string_value = string_value
|
||||
|
||||
def infer(self):
|
||||
if self._string_value is not None:
|
||||
s = self._string_value
|
||||
return ValueSet([
|
||||
create_simple_object(self.parent_context.inference_state, s)
|
||||
])
|
||||
return compiled.get_string_value_set(self.parent_context.inference_state)
|
||||
|
||||
|
||||
class SubModuleDictMixin:
|
||||
@inference_state_method_cache()
|
||||
def sub_modules_dict(self):
|
||||
"""
|
||||
Lists modules in the directory of this module (if this module is a
|
||||
package).
|
||||
"""
|
||||
names = {}
|
||||
if self.is_package():
|
||||
mods = self.inference_state.compiled_subprocess.iter_module_names(
|
||||
self.py__path__()
|
||||
)
|
||||
for name in mods:
|
||||
# It's obviously a relative import to the current module.
|
||||
names[name] = SubModuleName(self.as_context(), name)
|
||||
|
||||
# In the case of an import like `from x.` we don't need to
|
||||
# add all the variables, this is only about submodules.
|
||||
return names
|
||||
|
||||
|
||||
class ModuleMixin(SubModuleDictMixin):
|
||||
_module_name_class = ModuleName
|
||||
|
||||
def get_filters(self, origin_scope=None):
|
||||
yield MergedFilter(
|
||||
ParserTreeFilter(
|
||||
parent_context=self.as_context(),
|
||||
origin_scope=origin_scope
|
||||
),
|
||||
GlobalNameFilter(self.as_context()),
|
||||
)
|
||||
yield DictFilter(self.sub_modules_dict())
|
||||
yield DictFilter(self._module_attributes_dict())
|
||||
yield from self.iter_star_filters()
|
||||
|
||||
def py__class__(self):
|
||||
c, = values_from_qualified_names(self.inference_state, 'types', 'ModuleType')
|
||||
return c
|
||||
|
||||
def is_module(self):
|
||||
return True
|
||||
|
||||
def is_stub(self):
|
||||
return False
|
||||
|
||||
@property # type: ignore[misc]
|
||||
@inference_state_method_cache()
|
||||
def name(self):
|
||||
return self._module_name_class(self, self.string_names[-1])
|
||||
|
||||
@inference_state_method_cache()
|
||||
def _module_attributes_dict(self):
|
||||
names = ['__package__', '__doc__', '__name__']
|
||||
# All the additional module attributes are strings.
|
||||
dct = dict((n, _ModuleAttributeName(self, n)) for n in names)
|
||||
path = self.py__file__()
|
||||
if path is not None:
|
||||
dct['__file__'] = _ModuleAttributeName(self, '__file__', str(path))
|
||||
return dct
|
||||
|
||||
def iter_star_filters(self):
|
||||
for star_module in self.star_imports():
|
||||
f = next(star_module.get_filters(), None)
|
||||
assert f is not None
|
||||
yield f
|
||||
|
||||
# I'm not sure if the star import cache is really that effective anymore
|
||||
# with all the other really fast import caches. Recheck. Also we would need
|
||||
# to push the star imports into InferenceState.module_cache, if we reenable this.
|
||||
@inference_state_method_cache([])
|
||||
def star_imports(self):
|
||||
from jedi.inference.imports import Importer
|
||||
|
||||
modules = []
|
||||
module_context = self.as_context()
|
||||
for i in self.tree_node.iter_imports():
|
||||
if i.is_star_import():
|
||||
new = Importer(
|
||||
self.inference_state,
|
||||
import_path=i.get_paths()[-1],
|
||||
module_context=module_context,
|
||||
level=i.level
|
||||
).follow()
|
||||
|
||||
for module in new:
|
||||
if isinstance(module, ModuleValue):
|
||||
modules += module.star_imports()
|
||||
modules += new
|
||||
return modules
|
||||
|
||||
def get_qualified_names(self):
|
||||
"""
|
||||
A module doesn't have a qualified name, but it's important to note that
|
||||
it's reachable and not `None`. With this information we can add
|
||||
qualified names on top for all value children.
|
||||
"""
|
||||
return ()
|
||||
|
||||
|
||||
class ModuleValue(ModuleMixin, TreeValue):
|
||||
api_type = 'module'
|
||||
|
||||
def __init__(self, inference_state, module_node, code_lines, file_io=None,
|
||||
string_names=None, is_package=False):
|
||||
super().__init__(
|
||||
inference_state,
|
||||
parent_context=None,
|
||||
tree_node=module_node
|
||||
)
|
||||
self.file_io = file_io
|
||||
if file_io is None:
|
||||
self._path: Optional[Path] = None
|
||||
else:
|
||||
self._path = file_io.path
|
||||
self.string_names = string_names # Optional[Tuple[str, ...]]
|
||||
self.code_lines = code_lines
|
||||
self._is_package = is_package
|
||||
|
||||
def is_stub(self):
|
||||
if self._path is not None and self._path.suffix == '.pyi':
|
||||
# Currently this is the way how we identify stubs when e.g. goto is
|
||||
# used in them. This could be changed if stubs would be identified
|
||||
# sooner and used as StubModuleValue.
|
||||
return True
|
||||
return super().is_stub()
|
||||
|
||||
def py__name__(self):
|
||||
if self.string_names is None:
|
||||
return None
|
||||
return '.'.join(self.string_names)
|
||||
|
||||
def py__file__(self) -> Optional[Path]:
|
||||
"""
|
||||
In contrast to Python's __file__ can be None.
|
||||
"""
|
||||
if self._path is None:
|
||||
return None
|
||||
|
||||
return self._path.absolute()
|
||||
|
||||
def is_package(self):
|
||||
return self._is_package
|
||||
|
||||
def py__package__(self):
|
||||
if self.string_names is None:
|
||||
return []
|
||||
|
||||
if self._is_package:
|
||||
return self.string_names
|
||||
return self.string_names[:-1]
|
||||
|
||||
def py__path__(self):
|
||||
"""
|
||||
In case of a package, this returns Python's __path__ attribute, which
|
||||
is a list of paths (strings).
|
||||
Returns None if the module is not a package.
|
||||
"""
|
||||
if not self._is_package:
|
||||
return None
|
||||
|
||||
# A namespace package is typically auto generated and ~10 lines long.
|
||||
first_few_lines = ''.join(self.code_lines[:50])
|
||||
# these are strings that need to be used for namespace packages,
|
||||
# the first one is ``pkgutil``, the second ``pkg_resources``.
|
||||
options = ('declare_namespace(__name__)', 'extend_path(__path__')
|
||||
if options[0] in first_few_lines or options[1] in first_few_lines:
|
||||
# It is a namespace, now try to find the rest of the
|
||||
# modules on sys_path or whatever the search_path is.
|
||||
paths = set()
|
||||
for s in self.inference_state.get_sys_path():
|
||||
other = os.path.join(s, self.name.string_name)
|
||||
if os.path.isdir(other):
|
||||
paths.add(other)
|
||||
if paths:
|
||||
return list(paths)
|
||||
# Nested namespace packages will not be supported. Nobody ever
|
||||
# asked for it and in Python 3 they are there without using all the
|
||||
# crap above.
|
||||
|
||||
# Default to the of this file.
|
||||
file = self.py__file__()
|
||||
assert file is not None # Shouldn't be a package in the first place.
|
||||
return [os.path.dirname(file)]
|
||||
|
||||
def _as_context(self):
|
||||
return ModuleContext(self)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s: %s@%s-%s is_stub=%s>" % (
|
||||
self.__class__.__name__, self.py__name__(),
|
||||
self.tree_node.start_pos[0], self.tree_node.end_pos[0],
|
||||
self.is_stub()
|
||||
)
|
74
.venv/Lib/site-packages/jedi/inference/value/namespace.py
Normal file
74
.venv/Lib/site-packages/jedi/inference/value/namespace.py
Normal file
@ -0,0 +1,74 @@
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.filters import DictFilter
|
||||
from jedi.inference.names import ValueNameMixin, AbstractNameDefinition
|
||||
from jedi.inference.base_value import Value
|
||||
from jedi.inference.value.module import SubModuleDictMixin
|
||||
from jedi.inference.context import NamespaceContext
|
||||
|
||||
|
||||
class ImplicitNSName(ValueNameMixin, AbstractNameDefinition):
|
||||
"""
|
||||
Accessing names for implicit namespace packages should infer to nothing.
|
||||
This object will prevent Jedi from raising exceptions
|
||||
"""
|
||||
def __init__(self, implicit_ns_value, string_name):
|
||||
self._value = implicit_ns_value
|
||||
self.string_name = string_name
|
||||
|
||||
|
||||
class ImplicitNamespaceValue(Value, SubModuleDictMixin):
|
||||
"""
|
||||
Provides support for implicit namespace packages
|
||||
"""
|
||||
api_type = 'namespace'
|
||||
parent_context = None
|
||||
|
||||
def __init__(self, inference_state, string_names, paths):
|
||||
super().__init__(inference_state, parent_context=None)
|
||||
self.inference_state = inference_state
|
||||
self.string_names = string_names
|
||||
self._paths = paths
|
||||
|
||||
def get_filters(self, origin_scope=None):
|
||||
yield DictFilter(self.sub_modules_dict())
|
||||
|
||||
def get_qualified_names(self):
|
||||
return ()
|
||||
|
||||
@property # type: ignore[misc]
|
||||
@inference_state_method_cache()
|
||||
def name(self):
|
||||
string_name = self.py__package__()[-1]
|
||||
return ImplicitNSName(self, string_name)
|
||||
|
||||
def py__file__(self) -> Optional[Path]:
|
||||
return None
|
||||
|
||||
def py__package__(self):
|
||||
"""Return the fullname
|
||||
"""
|
||||
return self.string_names
|
||||
|
||||
def py__path__(self):
|
||||
return self._paths
|
||||
|
||||
def py__name__(self):
|
||||
return '.'.join(self.string_names)
|
||||
|
||||
def is_namespace(self):
|
||||
return True
|
||||
|
||||
def is_stub(self):
|
||||
return False
|
||||
|
||||
def is_package(self):
|
||||
return True
|
||||
|
||||
def as_context(self):
|
||||
return NamespaceContext(self)
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.py__name__())
|
Reference in New Issue
Block a user