mirror of
https://github.com/aykhans/AzSuicideDataVisualization.git
synced 2025-07-02 22:30:48 +00:00
first commit
This commit is contained in:
@ -0,0 +1,4 @@
|
||||
"""
|
||||
It is unfortunately not well documented how stubs and annotations work in Jedi.
|
||||
If somebody needs an introduction, please let me know.
|
||||
"""
|
470
.venv/Lib/site-packages/jedi/inference/gradual/annotation.py
Normal file
470
.venv/Lib/site-packages/jedi/inference/gradual/annotation.py
Normal file
@ -0,0 +1,470 @@
|
||||
"""
|
||||
PEP 0484 ( https://www.python.org/dev/peps/pep-0484/ ) describes type hints
|
||||
through function annotations. There is a strong suggestion in this document
|
||||
that only the type of type hinting defined in PEP0484 should be allowed
|
||||
as annotations in future python versions.
|
||||
"""
|
||||
|
||||
import re
|
||||
from inspect import Parameter
|
||||
|
||||
from parso import ParserSyntaxError, parse
|
||||
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.base_value import ValueSet, NO_VALUES
|
||||
from jedi.inference.gradual.base import DefineGenericBaseClass, GenericClass
|
||||
from jedi.inference.gradual.generics import TupleGenericManager
|
||||
from jedi.inference.gradual.type_var import TypeVar
|
||||
from jedi.inference.helpers import is_string
|
||||
from jedi.inference.compiled import builtin_from_name
|
||||
from jedi.inference.param import get_executed_param_names
|
||||
from jedi import debug
|
||||
from jedi import parser_utils
|
||||
|
||||
|
||||
def infer_annotation(context, annotation):
|
||||
"""
|
||||
Inferes an annotation node. This means that it inferes the part of
|
||||
`int` here:
|
||||
|
||||
foo: int = 3
|
||||
|
||||
Also checks for forward references (strings)
|
||||
"""
|
||||
value_set = context.infer_node(annotation)
|
||||
if len(value_set) != 1:
|
||||
debug.warning("Inferred typing index %s should lead to 1 object, "
|
||||
" not %s" % (annotation, value_set))
|
||||
return value_set
|
||||
|
||||
inferred_value = list(value_set)[0]
|
||||
if is_string(inferred_value):
|
||||
result = _get_forward_reference_node(context, inferred_value.get_safe_value())
|
||||
if result is not None:
|
||||
return context.infer_node(result)
|
||||
return value_set
|
||||
|
||||
|
||||
def _infer_annotation_string(context, string, index=None):
|
||||
node = _get_forward_reference_node(context, string)
|
||||
if node is None:
|
||||
return NO_VALUES
|
||||
|
||||
value_set = context.infer_node(node)
|
||||
if index is not None:
|
||||
value_set = value_set.filter(
|
||||
lambda value: (
|
||||
value.array_type == 'tuple'
|
||||
and len(list(value.py__iter__())) >= index
|
||||
)
|
||||
).py__simple_getitem__(index)
|
||||
return value_set
|
||||
|
||||
|
||||
def _get_forward_reference_node(context, string):
|
||||
try:
|
||||
new_node = context.inference_state.grammar.parse(
|
||||
string,
|
||||
start_symbol='eval_input',
|
||||
error_recovery=False
|
||||
)
|
||||
except ParserSyntaxError:
|
||||
debug.warning('Annotation not parsed: %s' % string)
|
||||
return None
|
||||
else:
|
||||
module = context.tree_node.get_root_node()
|
||||
parser_utils.move(new_node, module.end_pos[0])
|
||||
new_node.parent = context.tree_node
|
||||
return new_node
|
||||
|
||||
|
||||
def _split_comment_param_declaration(decl_text):
|
||||
"""
|
||||
Split decl_text on commas, but group generic expressions
|
||||
together.
|
||||
|
||||
For example, given "foo, Bar[baz, biz]" we return
|
||||
['foo', 'Bar[baz, biz]'].
|
||||
|
||||
"""
|
||||
try:
|
||||
node = parse(decl_text, error_recovery=False).children[0]
|
||||
except ParserSyntaxError:
|
||||
debug.warning('Comment annotation is not valid Python: %s' % decl_text)
|
||||
return []
|
||||
|
||||
if node.type in ['name', 'atom_expr', 'power']:
|
||||
return [node.get_code().strip()]
|
||||
|
||||
params = []
|
||||
try:
|
||||
children = node.children
|
||||
except AttributeError:
|
||||
return []
|
||||
else:
|
||||
for child in children:
|
||||
if child.type in ['name', 'atom_expr', 'power']:
|
||||
params.append(child.get_code().strip())
|
||||
|
||||
return params
|
||||
|
||||
|
||||
@inference_state_method_cache()
|
||||
def infer_param(function_value, param, ignore_stars=False):
|
||||
values = _infer_param(function_value, param)
|
||||
if ignore_stars or not values:
|
||||
return values
|
||||
inference_state = function_value.inference_state
|
||||
if param.star_count == 1:
|
||||
tuple_ = builtin_from_name(inference_state, 'tuple')
|
||||
return ValueSet([GenericClass(
|
||||
tuple_,
|
||||
TupleGenericManager((values,)),
|
||||
)])
|
||||
elif param.star_count == 2:
|
||||
dct = builtin_from_name(inference_state, 'dict')
|
||||
generics = (
|
||||
ValueSet([builtin_from_name(inference_state, 'str')]),
|
||||
values
|
||||
)
|
||||
return ValueSet([GenericClass(
|
||||
dct,
|
||||
TupleGenericManager(generics),
|
||||
)])
|
||||
return values
|
||||
|
||||
|
||||
def _infer_param(function_value, param):
|
||||
"""
|
||||
Infers the type of a function parameter, using type annotations.
|
||||
"""
|
||||
annotation = param.annotation
|
||||
if annotation is None:
|
||||
# If no Python 3-style annotation, look for a comment annotation.
|
||||
# Identify parameters to function in the same sequence as they would
|
||||
# appear in a type comment.
|
||||
all_params = [child for child in param.parent.children
|
||||
if child.type == 'param']
|
||||
|
||||
node = param.parent.parent
|
||||
comment = parser_utils.get_following_comment_same_line(node)
|
||||
if comment is None:
|
||||
return NO_VALUES
|
||||
|
||||
match = re.match(r"^#\s*type:\s*\(([^#]*)\)\s*->", comment)
|
||||
if not match:
|
||||
return NO_VALUES
|
||||
params_comments = _split_comment_param_declaration(match.group(1))
|
||||
|
||||
# Find the specific param being investigated
|
||||
index = all_params.index(param)
|
||||
# If the number of parameters doesn't match length of type comment,
|
||||
# ignore first parameter (assume it's self).
|
||||
if len(params_comments) != len(all_params):
|
||||
debug.warning(
|
||||
"Comments length != Params length %s %s",
|
||||
params_comments, all_params
|
||||
)
|
||||
if function_value.is_bound_method():
|
||||
if index == 0:
|
||||
# Assume it's self, which is already handled
|
||||
return NO_VALUES
|
||||
index -= 1
|
||||
if index >= len(params_comments):
|
||||
return NO_VALUES
|
||||
|
||||
param_comment = params_comments[index]
|
||||
return _infer_annotation_string(
|
||||
function_value.get_default_param_context(),
|
||||
param_comment
|
||||
)
|
||||
# Annotations are like default params and resolve in the same way.
|
||||
context = function_value.get_default_param_context()
|
||||
return infer_annotation(context, annotation)
|
||||
|
||||
|
||||
def py__annotations__(funcdef):
|
||||
dct = {}
|
||||
for function_param in funcdef.get_params():
|
||||
param_annotation = function_param.annotation
|
||||
if param_annotation is not None:
|
||||
dct[function_param.name.value] = param_annotation
|
||||
|
||||
return_annotation = funcdef.annotation
|
||||
if return_annotation:
|
||||
dct['return'] = return_annotation
|
||||
return dct
|
||||
|
||||
|
||||
def resolve_forward_references(context, all_annotations):
|
||||
def resolve(node):
|
||||
if node is None or node.type != 'string':
|
||||
return node
|
||||
|
||||
node = _get_forward_reference_node(
|
||||
context,
|
||||
context.inference_state.compiled_subprocess.safe_literal_eval(
|
||||
node.value,
|
||||
),
|
||||
)
|
||||
|
||||
if node is None:
|
||||
# There was a string, but it's not a valid annotation
|
||||
return None
|
||||
|
||||
# The forward reference tree has an additional root node ('eval_input')
|
||||
# that we don't want. Extract the node we do want, that is equivalent to
|
||||
# the nodes returned by `py__annotations__` for a non-quoted node.
|
||||
node = node.children[0]
|
||||
|
||||
return node
|
||||
|
||||
return {name: resolve(node) for name, node in all_annotations.items()}
|
||||
|
||||
|
||||
@inference_state_method_cache()
|
||||
def infer_return_types(function, arguments):
|
||||
"""
|
||||
Infers the type of a function's return value,
|
||||
according to type annotations.
|
||||
"""
|
||||
context = function.get_default_param_context()
|
||||
all_annotations = resolve_forward_references(
|
||||
context,
|
||||
py__annotations__(function.tree_node),
|
||||
)
|
||||
annotation = all_annotations.get("return", None)
|
||||
if annotation is None:
|
||||
# If there is no Python 3-type annotation, look for an annotation
|
||||
# comment.
|
||||
node = function.tree_node
|
||||
comment = parser_utils.get_following_comment_same_line(node)
|
||||
if comment is None:
|
||||
return NO_VALUES
|
||||
|
||||
match = re.match(r"^#\s*type:\s*\([^#]*\)\s*->\s*([^#]*)", comment)
|
||||
if not match:
|
||||
return NO_VALUES
|
||||
|
||||
return _infer_annotation_string(
|
||||
context,
|
||||
match.group(1).strip()
|
||||
).execute_annotation()
|
||||
|
||||
unknown_type_vars = find_unknown_type_vars(context, annotation)
|
||||
annotation_values = infer_annotation(context, annotation)
|
||||
if not unknown_type_vars:
|
||||
return annotation_values.execute_annotation()
|
||||
|
||||
type_var_dict = infer_type_vars_for_execution(function, arguments, all_annotations)
|
||||
|
||||
return ValueSet.from_sets(
|
||||
ann.define_generics(type_var_dict)
|
||||
if isinstance(ann, (DefineGenericBaseClass, TypeVar)) else ValueSet({ann})
|
||||
for ann in annotation_values
|
||||
).execute_annotation()
|
||||
|
||||
|
||||
def infer_type_vars_for_execution(function, arguments, annotation_dict):
|
||||
"""
|
||||
Some functions use type vars that are not defined by the class, but rather
|
||||
only defined in the function. See for example `iter`. In those cases we
|
||||
want to:
|
||||
|
||||
1. Search for undefined type vars.
|
||||
2. Infer type vars with the execution state we have.
|
||||
3. Return the union of all type vars that have been found.
|
||||
"""
|
||||
context = function.get_default_param_context()
|
||||
|
||||
annotation_variable_results = {}
|
||||
executed_param_names = get_executed_param_names(function, arguments)
|
||||
for executed_param_name in executed_param_names:
|
||||
try:
|
||||
annotation_node = annotation_dict[executed_param_name.string_name]
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
annotation_variables = find_unknown_type_vars(context, annotation_node)
|
||||
if annotation_variables:
|
||||
# Infer unknown type var
|
||||
annotation_value_set = context.infer_node(annotation_node)
|
||||
kind = executed_param_name.get_kind()
|
||||
actual_value_set = executed_param_name.infer()
|
||||
if kind is Parameter.VAR_POSITIONAL:
|
||||
actual_value_set = actual_value_set.merge_types_of_iterate()
|
||||
elif kind is Parameter.VAR_KEYWORD:
|
||||
# TODO _dict_values is not public.
|
||||
actual_value_set = actual_value_set.try_merge('_dict_values')
|
||||
merge_type_var_dicts(
|
||||
annotation_variable_results,
|
||||
annotation_value_set.infer_type_vars(actual_value_set),
|
||||
)
|
||||
return annotation_variable_results
|
||||
|
||||
|
||||
def infer_return_for_callable(arguments, param_values, result_values):
|
||||
all_type_vars = {}
|
||||
for pv in param_values:
|
||||
if pv.array_type == 'list':
|
||||
type_var_dict = _infer_type_vars_for_callable(arguments, pv.py__iter__())
|
||||
all_type_vars.update(type_var_dict)
|
||||
|
||||
return ValueSet.from_sets(
|
||||
v.define_generics(all_type_vars)
|
||||
if isinstance(v, (DefineGenericBaseClass, TypeVar))
|
||||
else ValueSet({v})
|
||||
for v in result_values
|
||||
).execute_annotation()
|
||||
|
||||
|
||||
def _infer_type_vars_for_callable(arguments, lazy_params):
|
||||
"""
|
||||
Infers type vars for the Calllable class:
|
||||
|
||||
def x() -> Callable[[Callable[..., _T]], _T]: ...
|
||||
"""
|
||||
annotation_variable_results = {}
|
||||
for (_, lazy_value), lazy_callable_param in zip(arguments.unpack(), lazy_params):
|
||||
callable_param_values = lazy_callable_param.infer()
|
||||
# Infer unknown type var
|
||||
actual_value_set = lazy_value.infer()
|
||||
merge_type_var_dicts(
|
||||
annotation_variable_results,
|
||||
callable_param_values.infer_type_vars(actual_value_set),
|
||||
)
|
||||
return annotation_variable_results
|
||||
|
||||
|
||||
def merge_type_var_dicts(base_dict, new_dict):
|
||||
for type_var_name, values in new_dict.items():
|
||||
if values:
|
||||
try:
|
||||
base_dict[type_var_name] |= values
|
||||
except KeyError:
|
||||
base_dict[type_var_name] = values
|
||||
|
||||
|
||||
def merge_pairwise_generics(annotation_value, annotated_argument_class):
|
||||
"""
|
||||
Match up the generic parameters from the given argument class to the
|
||||
target annotation.
|
||||
|
||||
This walks the generic parameters immediately within the annotation and
|
||||
argument's type, in order to determine the concrete values of the
|
||||
annotation's parameters for the current case.
|
||||
|
||||
For example, given the following code:
|
||||
|
||||
def values(mapping: Mapping[K, V]) -> List[V]: ...
|
||||
|
||||
for val in values({1: 'a'}):
|
||||
val
|
||||
|
||||
Then this function should be given representations of `Mapping[K, V]`
|
||||
and `Mapping[int, str]`, so that it can determine that `K` is `int and
|
||||
`V` is `str`.
|
||||
|
||||
Note that it is responsibility of the caller to traverse the MRO of the
|
||||
argument type as needed in order to find the type matching the
|
||||
annotation (in this case finding `Mapping[int, str]` as a parent of
|
||||
`Dict[int, str]`).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
||||
`annotation_value`: represents the annotation to infer the concrete
|
||||
parameter types of.
|
||||
|
||||
`annotated_argument_class`: represents the annotated class of the
|
||||
argument being passed to the object annotated by `annotation_value`.
|
||||
"""
|
||||
|
||||
type_var_dict = {}
|
||||
|
||||
if not isinstance(annotated_argument_class, DefineGenericBaseClass):
|
||||
return type_var_dict
|
||||
|
||||
annotation_generics = annotation_value.get_generics()
|
||||
actual_generics = annotated_argument_class.get_generics()
|
||||
|
||||
for annotation_generics_set, actual_generic_set in zip(annotation_generics, actual_generics):
|
||||
merge_type_var_dicts(
|
||||
type_var_dict,
|
||||
annotation_generics_set.infer_type_vars(actual_generic_set.execute_annotation()),
|
||||
)
|
||||
|
||||
return type_var_dict
|
||||
|
||||
|
||||
def find_type_from_comment_hint_for(context, node, name):
|
||||
return _find_type_from_comment_hint(context, node, node.children[1], name)
|
||||
|
||||
|
||||
def find_type_from_comment_hint_with(context, node, name):
|
||||
assert len(node.children[1].children) == 3, \
|
||||
"Can only be here when children[1] is 'foo() as f'"
|
||||
varlist = node.children[1].children[2]
|
||||
return _find_type_from_comment_hint(context, node, varlist, name)
|
||||
|
||||
|
||||
def find_type_from_comment_hint_assign(context, node, name):
|
||||
return _find_type_from_comment_hint(context, node, node.children[0], name)
|
||||
|
||||
|
||||
def _find_type_from_comment_hint(context, node, varlist, name):
|
||||
index = None
|
||||
if varlist.type in ("testlist_star_expr", "exprlist", "testlist"):
|
||||
# something like "a, b = 1, 2"
|
||||
index = 0
|
||||
for child in varlist.children:
|
||||
if child == name:
|
||||
break
|
||||
if child.type == "operator":
|
||||
continue
|
||||
index += 1
|
||||
else:
|
||||
return []
|
||||
|
||||
comment = parser_utils.get_following_comment_same_line(node)
|
||||
if comment is None:
|
||||
return []
|
||||
match = re.match(r"^#\s*type:\s*([^#]*)", comment)
|
||||
if match is None:
|
||||
return []
|
||||
return _infer_annotation_string(
|
||||
context, match.group(1).strip(), index
|
||||
).execute_annotation()
|
||||
|
||||
|
||||
def find_unknown_type_vars(context, node):
|
||||
def check_node(node):
|
||||
if node.type in ('atom_expr', 'power'):
|
||||
trailer = node.children[-1]
|
||||
if trailer.type == 'trailer' and trailer.children[0] == '[':
|
||||
for subscript_node in _unpack_subscriptlist(trailer.children[1]):
|
||||
check_node(subscript_node)
|
||||
else:
|
||||
found[:] = _filter_type_vars(context.infer_node(node), found)
|
||||
|
||||
found = [] # We're not using a set, because the order matters.
|
||||
check_node(node)
|
||||
return found
|
||||
|
||||
|
||||
def _filter_type_vars(value_set, found=()):
|
||||
new_found = list(found)
|
||||
for type_var in value_set:
|
||||
if isinstance(type_var, TypeVar) and type_var not in found:
|
||||
new_found.append(type_var)
|
||||
return new_found
|
||||
|
||||
|
||||
def _unpack_subscriptlist(subscriptlist):
|
||||
if subscriptlist.type == 'subscriptlist':
|
||||
for subscript in subscriptlist.children[::2]:
|
||||
if subscript.type != 'subscript':
|
||||
yield subscript
|
||||
else:
|
||||
if subscriptlist.type != 'subscript':
|
||||
yield subscriptlist
|
434
.venv/Lib/site-packages/jedi/inference/gradual/base.py
Normal file
434
.venv/Lib/site-packages/jedi/inference/gradual/base.py
Normal file
@ -0,0 +1,434 @@
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.base_value import ValueSet, NO_VALUES, Value, \
|
||||
iterator_to_value_set, LazyValueWrapper, ValueWrapper
|
||||
from jedi.inference.compiled import builtin_from_name
|
||||
from jedi.inference.value.klass import ClassFilter
|
||||
from jedi.inference.value.klass import ClassMixin
|
||||
from jedi.inference.utils import to_list
|
||||
from jedi.inference.names import AbstractNameDefinition, ValueName
|
||||
from jedi.inference.context import ClassContext
|
||||
from jedi.inference.gradual.generics import TupleGenericManager
|
||||
|
||||
|
||||
class _BoundTypeVarName(AbstractNameDefinition):
|
||||
"""
|
||||
This type var was bound to a certain type, e.g. int.
|
||||
"""
|
||||
def __init__(self, type_var, value_set):
|
||||
self._type_var = type_var
|
||||
self.parent_context = type_var.parent_context
|
||||
self._value_set = value_set
|
||||
|
||||
def infer(self):
|
||||
def iter_():
|
||||
for value in self._value_set:
|
||||
# Replace any with the constraints if they are there.
|
||||
from jedi.inference.gradual.typing import AnyClass
|
||||
if isinstance(value, AnyClass):
|
||||
yield from self._type_var.constraints
|
||||
else:
|
||||
yield value
|
||||
return ValueSet(iter_())
|
||||
|
||||
def py__name__(self):
|
||||
return self._type_var.py__name__()
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s %s -> %s>' % (self.__class__.__name__, self.py__name__(), self._value_set)
|
||||
|
||||
|
||||
class _TypeVarFilter:
|
||||
"""
|
||||
A filter for all given variables in a class.
|
||||
|
||||
A = TypeVar('A')
|
||||
B = TypeVar('B')
|
||||
class Foo(Mapping[A, B]):
|
||||
...
|
||||
|
||||
In this example we would have two type vars given: A and B
|
||||
"""
|
||||
def __init__(self, generics, type_vars):
|
||||
self._generics = generics
|
||||
self._type_vars = type_vars
|
||||
|
||||
def get(self, name):
|
||||
for i, type_var in enumerate(self._type_vars):
|
||||
if type_var.py__name__() == name:
|
||||
try:
|
||||
return [_BoundTypeVarName(type_var, self._generics[i])]
|
||||
except IndexError:
|
||||
return [type_var.name]
|
||||
return []
|
||||
|
||||
def values(self):
|
||||
# The values are not relevant. If it's not searched exactly, the type
|
||||
# vars are just global and should be looked up as that.
|
||||
return []
|
||||
|
||||
|
||||
class _AnnotatedClassContext(ClassContext):
|
||||
def get_filters(self, *args, **kwargs):
|
||||
filters = super().get_filters(
|
||||
*args, **kwargs
|
||||
)
|
||||
yield from filters
|
||||
|
||||
# The type vars can only be looked up if it's a global search and
|
||||
# not a direct lookup on the class.
|
||||
yield self._value.get_type_var_filter()
|
||||
|
||||
|
||||
class DefineGenericBaseClass(LazyValueWrapper):
|
||||
def __init__(self, generics_manager):
|
||||
self._generics_manager = generics_manager
|
||||
|
||||
def _create_instance_with_generics(self, generics_manager):
|
||||
raise NotImplementedError
|
||||
|
||||
@inference_state_method_cache()
|
||||
def get_generics(self):
|
||||
return self._generics_manager.to_tuple()
|
||||
|
||||
def define_generics(self, type_var_dict):
|
||||
from jedi.inference.gradual.type_var import TypeVar
|
||||
changed = False
|
||||
new_generics = []
|
||||
for generic_set in self.get_generics():
|
||||
values = NO_VALUES
|
||||
for generic in generic_set:
|
||||
if isinstance(generic, (DefineGenericBaseClass, TypeVar)):
|
||||
result = generic.define_generics(type_var_dict)
|
||||
values |= result
|
||||
if result != ValueSet({generic}):
|
||||
changed = True
|
||||
else:
|
||||
values |= ValueSet([generic])
|
||||
new_generics.append(values)
|
||||
|
||||
if not changed:
|
||||
# There might not be any type vars that change. In that case just
|
||||
# return itself, because it does not make sense to potentially lose
|
||||
# cached results.
|
||||
return ValueSet([self])
|
||||
|
||||
return ValueSet([self._create_instance_with_generics(
|
||||
TupleGenericManager(tuple(new_generics))
|
||||
)])
|
||||
|
||||
def is_same_class(self, other):
|
||||
if not isinstance(other, DefineGenericBaseClass):
|
||||
return False
|
||||
|
||||
if self.tree_node != other.tree_node:
|
||||
# TODO not sure if this is nice.
|
||||
return False
|
||||
given_params1 = self.get_generics()
|
||||
given_params2 = other.get_generics()
|
||||
|
||||
if len(given_params1) != len(given_params2):
|
||||
# If the amount of type vars doesn't match, the class doesn't
|
||||
# match.
|
||||
return False
|
||||
|
||||
# Now compare generics
|
||||
return all(
|
||||
any(
|
||||
# TODO why is this ordering the correct one?
|
||||
cls2.is_same_class(cls1)
|
||||
# TODO I'm still not sure gather_annotation_classes is a good
|
||||
# idea. They are essentially here to avoid comparing Tuple <=>
|
||||
# tuple and instead compare tuple <=> tuple, but at the moment
|
||||
# the whole `is_same_class` and `is_sub_class` matching is just
|
||||
# not in the best shape.
|
||||
for cls1 in class_set1.gather_annotation_classes()
|
||||
for cls2 in class_set2.gather_annotation_classes()
|
||||
) for class_set1, class_set2 in zip(given_params1, given_params2)
|
||||
)
|
||||
|
||||
def get_signatures(self):
|
||||
return []
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s%s>' % (
|
||||
self.__class__.__name__,
|
||||
self._wrapped_value,
|
||||
list(self.get_generics()),
|
||||
)
|
||||
|
||||
|
||||
class GenericClass(DefineGenericBaseClass, ClassMixin):
|
||||
"""
|
||||
A class that is defined with generics, might be something simple like:
|
||||
|
||||
class Foo(Generic[T]): ...
|
||||
my_foo_int_cls = Foo[int]
|
||||
"""
|
||||
def __init__(self, class_value, generics_manager):
|
||||
super().__init__(generics_manager)
|
||||
self._class_value = class_value
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
return self._class_value
|
||||
|
||||
def get_type_hint(self, add_class_info=True):
|
||||
n = self.py__name__()
|
||||
# Not sure if this is the best way to do this, but all of these types
|
||||
# are a bit special in that they have type aliases and other ways to
|
||||
# become lower case. It's probably better to make them upper case,
|
||||
# because that's what you can use in annotations.
|
||||
n = dict(list="List", dict="Dict", set="Set", tuple="Tuple").get(n, n)
|
||||
s = n + self._generics_manager.get_type_hint()
|
||||
if add_class_info:
|
||||
return 'Type[%s]' % s
|
||||
return s
|
||||
|
||||
def get_type_var_filter(self):
|
||||
return _TypeVarFilter(self.get_generics(), self.list_type_vars())
|
||||
|
||||
def py__call__(self, arguments):
|
||||
instance, = super().py__call__(arguments)
|
||||
return ValueSet([_GenericInstanceWrapper(instance)])
|
||||
|
||||
def _as_context(self):
|
||||
return _AnnotatedClassContext(self)
|
||||
|
||||
@to_list
|
||||
def py__bases__(self):
|
||||
for base in self._wrapped_value.py__bases__():
|
||||
yield _LazyGenericBaseClass(self, base, self._generics_manager)
|
||||
|
||||
def _create_instance_with_generics(self, generics_manager):
|
||||
return GenericClass(self._class_value, generics_manager)
|
||||
|
||||
def is_sub_class_of(self, class_value):
|
||||
if super().is_sub_class_of(class_value):
|
||||
return True
|
||||
return self._class_value.is_sub_class_of(class_value)
|
||||
|
||||
def with_generics(self, generics_tuple):
|
||||
return self._class_value.with_generics(generics_tuple)
|
||||
|
||||
def infer_type_vars(self, value_set):
|
||||
# Circular
|
||||
from jedi.inference.gradual.annotation import merge_pairwise_generics, merge_type_var_dicts
|
||||
|
||||
annotation_name = self.py__name__()
|
||||
type_var_dict = {}
|
||||
if annotation_name == 'Iterable':
|
||||
annotation_generics = self.get_generics()
|
||||
if annotation_generics:
|
||||
return annotation_generics[0].infer_type_vars(
|
||||
value_set.merge_types_of_iterate(),
|
||||
)
|
||||
else:
|
||||
# Note: we need to handle the MRO _in order_, so we need to extract
|
||||
# the elements from the set first, then handle them, even if we put
|
||||
# them back in a set afterwards.
|
||||
for py_class in value_set:
|
||||
if py_class.is_instance() and not py_class.is_compiled():
|
||||
py_class = py_class.get_annotated_class_object()
|
||||
else:
|
||||
continue
|
||||
|
||||
if py_class.api_type != 'class':
|
||||
# Functions & modules don't have an MRO and we're not
|
||||
# expecting a Callable (those are handled separately within
|
||||
# TypingClassValueWithIndex).
|
||||
continue
|
||||
|
||||
for parent_class in py_class.py__mro__():
|
||||
class_name = parent_class.py__name__()
|
||||
if annotation_name == class_name:
|
||||
merge_type_var_dicts(
|
||||
type_var_dict,
|
||||
merge_pairwise_generics(self, parent_class),
|
||||
)
|
||||
break
|
||||
|
||||
return type_var_dict
|
||||
|
||||
|
||||
class _LazyGenericBaseClass:
|
||||
def __init__(self, class_value, lazy_base_class, generics_manager):
|
||||
self._class_value = class_value
|
||||
self._lazy_base_class = lazy_base_class
|
||||
self._generics_manager = generics_manager
|
||||
|
||||
@iterator_to_value_set
|
||||
def infer(self):
|
||||
for base in self._lazy_base_class.infer():
|
||||
if isinstance(base, GenericClass):
|
||||
# Here we have to recalculate the given types.
|
||||
yield GenericClass.create_cached(
|
||||
base.inference_state,
|
||||
base._wrapped_value,
|
||||
TupleGenericManager(tuple(self._remap_type_vars(base))),
|
||||
)
|
||||
else:
|
||||
if base.is_class_mixin():
|
||||
# This case basically allows classes like `class Foo(List)`
|
||||
# to be used like `Foo[int]`. The generics are not
|
||||
# necessary and can be used later.
|
||||
yield GenericClass.create_cached(
|
||||
base.inference_state,
|
||||
base,
|
||||
self._generics_manager,
|
||||
)
|
||||
else:
|
||||
yield base
|
||||
|
||||
def _remap_type_vars(self, base):
|
||||
from jedi.inference.gradual.type_var import TypeVar
|
||||
filter = self._class_value.get_type_var_filter()
|
||||
for type_var_set in base.get_generics():
|
||||
new = NO_VALUES
|
||||
for type_var in type_var_set:
|
||||
if isinstance(type_var, TypeVar):
|
||||
names = filter.get(type_var.py__name__())
|
||||
new |= ValueSet.from_sets(
|
||||
name.infer() for name in names
|
||||
)
|
||||
else:
|
||||
# Mostly will be type vars, except if in some cases
|
||||
# a concrete type will already be there. In that
|
||||
# case just add it to the value set.
|
||||
new |= ValueSet([type_var])
|
||||
yield new
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self._lazy_base_class)
|
||||
|
||||
|
||||
class _GenericInstanceWrapper(ValueWrapper):
|
||||
def py__stop_iteration_returns(self):
|
||||
for cls in self._wrapped_value.class_value.py__mro__():
|
||||
if cls.py__name__() == 'Generator':
|
||||
generics = cls.get_generics()
|
||||
try:
|
||||
return generics[2].execute_annotation()
|
||||
except IndexError:
|
||||
pass
|
||||
elif cls.py__name__() == 'Iterator':
|
||||
return ValueSet([builtin_from_name(self.inference_state, 'None')])
|
||||
return self._wrapped_value.py__stop_iteration_returns()
|
||||
|
||||
def get_type_hint(self, add_class_info=True):
|
||||
return self._wrapped_value.class_value.get_type_hint(add_class_info=False)
|
||||
|
||||
|
||||
class _PseudoTreeNameClass(Value):
|
||||
"""
|
||||
In typeshed, some classes are defined like this:
|
||||
|
||||
Tuple: _SpecialForm = ...
|
||||
|
||||
Now this is not a real class, therefore we have to do some workarounds like
|
||||
this class. Essentially this class makes it possible to goto that `Tuple`
|
||||
name, without affecting anything else negatively.
|
||||
"""
|
||||
api_type = 'class'
|
||||
|
||||
def __init__(self, parent_context, tree_name):
|
||||
super().__init__(
|
||||
parent_context.inference_state,
|
||||
parent_context
|
||||
)
|
||||
self._tree_name = tree_name
|
||||
|
||||
@property
|
||||
def tree_node(self):
|
||||
return self._tree_name
|
||||
|
||||
def get_filters(self, *args, **kwargs):
|
||||
# TODO this is obviously wrong. Is it though?
|
||||
class EmptyFilter(ClassFilter):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def get(self, name, **kwargs):
|
||||
return []
|
||||
|
||||
def values(self, **kwargs):
|
||||
return []
|
||||
|
||||
yield EmptyFilter()
|
||||
|
||||
def py__class__(self):
|
||||
# This might not be 100% correct, but it is good enough. The details of
|
||||
# the typing library are not really an issue for Jedi.
|
||||
return builtin_from_name(self.inference_state, 'type')
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return ValueName(self, self._tree_name)
|
||||
|
||||
def get_qualified_names(self):
|
||||
return (self._tree_name.value,)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (self.__class__.__name__, self._tree_name.value)
|
||||
|
||||
|
||||
class BaseTypingValue(LazyValueWrapper):
|
||||
def __init__(self, parent_context, tree_name):
|
||||
self.inference_state = parent_context.inference_state
|
||||
self.parent_context = parent_context
|
||||
self._tree_name = tree_name
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return ValueName(self, self._tree_name)
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
return _PseudoTreeNameClass(self.parent_context, self._tree_name)
|
||||
|
||||
def get_signatures(self):
|
||||
return self._wrapped_value.get_signatures()
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (self.__class__.__name__, self._tree_name.value)
|
||||
|
||||
|
||||
class BaseTypingClassWithGenerics(DefineGenericBaseClass):
|
||||
def __init__(self, parent_context, tree_name, generics_manager):
|
||||
super().__init__(generics_manager)
|
||||
self.inference_state = parent_context.inference_state
|
||||
self.parent_context = parent_context
|
||||
self._tree_name = tree_name
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
return _PseudoTreeNameClass(self.parent_context, self._tree_name)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s%s)' % (self.__class__.__name__, self._tree_name.value,
|
||||
self._generics_manager)
|
||||
|
||||
|
||||
class BaseTypingInstance(LazyValueWrapper):
|
||||
def __init__(self, parent_context, class_value, tree_name, generics_manager):
|
||||
self.inference_state = class_value.inference_state
|
||||
self.parent_context = parent_context
|
||||
self._class_value = class_value
|
||||
self._tree_name = tree_name
|
||||
self._generics_manager = generics_manager
|
||||
|
||||
def py__class__(self):
|
||||
return self._class_value
|
||||
|
||||
def get_annotated_class_object(self):
|
||||
return self._class_value
|
||||
|
||||
def get_qualified_names(self):
|
||||
return (self.py__name__(),)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return ValueName(self, self._tree_name)
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
object_, = builtin_from_name(self.inference_state, 'object').execute_annotation()
|
||||
return object_
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self._generics_manager)
|
209
.venv/Lib/site-packages/jedi/inference/gradual/conversion.py
Normal file
209
.venv/Lib/site-packages/jedi/inference/gradual/conversion.py
Normal file
@ -0,0 +1,209 @@
|
||||
from jedi import debug
|
||||
from jedi.inference.base_value import ValueSet, \
|
||||
NO_VALUES
|
||||
from jedi.inference.utils import to_list
|
||||
from jedi.inference.gradual.stub_value import StubModuleValue
|
||||
from jedi.inference.gradual.typeshed import try_to_load_stub_cached
|
||||
from jedi.inference.value.decorator import Decoratee
|
||||
|
||||
|
||||
def _stub_to_python_value_set(stub_value, ignore_compiled=False):
|
||||
stub_module_context = stub_value.get_root_context()
|
||||
if not stub_module_context.is_stub():
|
||||
return ValueSet([stub_value])
|
||||
|
||||
decorates = None
|
||||
if isinstance(stub_value, Decoratee):
|
||||
decorates = stub_value._original_value
|
||||
|
||||
was_instance = stub_value.is_instance()
|
||||
if was_instance:
|
||||
arguments = getattr(stub_value, '_arguments', None)
|
||||
stub_value = stub_value.py__class__()
|
||||
|
||||
qualified_names = stub_value.get_qualified_names()
|
||||
if qualified_names is None:
|
||||
return NO_VALUES
|
||||
|
||||
was_bound_method = stub_value.is_bound_method()
|
||||
if was_bound_method:
|
||||
# Infer the object first. We can infer the method later.
|
||||
method_name = qualified_names[-1]
|
||||
qualified_names = qualified_names[:-1]
|
||||
was_instance = True
|
||||
arguments = None
|
||||
|
||||
values = _infer_from_stub(stub_module_context, qualified_names, ignore_compiled)
|
||||
if was_instance:
|
||||
values = ValueSet.from_sets(
|
||||
c.execute_with_values() if arguments is None else c.execute(arguments)
|
||||
for c in values
|
||||
if c.is_class()
|
||||
)
|
||||
if was_bound_method:
|
||||
# Now that the instance has been properly created, we can simply get
|
||||
# the method.
|
||||
values = values.py__getattribute__(method_name)
|
||||
if decorates is not None:
|
||||
values = ValueSet(Decoratee(v, decorates) for v in values)
|
||||
return values
|
||||
|
||||
|
||||
def _infer_from_stub(stub_module_context, qualified_names, ignore_compiled):
|
||||
from jedi.inference.compiled.mixed import MixedObject
|
||||
stub_module = stub_module_context.get_value()
|
||||
assert isinstance(stub_module, (StubModuleValue, MixedObject)), stub_module_context
|
||||
non_stubs = stub_module.non_stub_value_set
|
||||
if ignore_compiled:
|
||||
non_stubs = non_stubs.filter(lambda c: not c.is_compiled())
|
||||
for name in qualified_names:
|
||||
non_stubs = non_stubs.py__getattribute__(name)
|
||||
return non_stubs
|
||||
|
||||
|
||||
@to_list
|
||||
def _try_stub_to_python_names(names, prefer_stub_to_compiled=False):
|
||||
for name in names:
|
||||
module_context = name.get_root_context()
|
||||
if not module_context.is_stub():
|
||||
yield name
|
||||
continue
|
||||
|
||||
if name.api_type == 'module':
|
||||
values = convert_values(name.infer(), ignore_compiled=prefer_stub_to_compiled)
|
||||
if values:
|
||||
for v in values:
|
||||
yield v.name
|
||||
continue
|
||||
else:
|
||||
v = name.get_defining_qualified_value()
|
||||
if v is not None:
|
||||
converted = _stub_to_python_value_set(v, ignore_compiled=prefer_stub_to_compiled)
|
||||
if converted:
|
||||
converted_names = converted.goto(name.get_public_name())
|
||||
if converted_names:
|
||||
for n in converted_names:
|
||||
if n.get_root_context().is_stub():
|
||||
# If it's a stub again, it means we're going in
|
||||
# a circle. Probably some imports make it a
|
||||
# stub again.
|
||||
yield name
|
||||
else:
|
||||
yield n
|
||||
continue
|
||||
yield name
|
||||
|
||||
|
||||
def _load_stub_module(module):
|
||||
if module.is_stub():
|
||||
return module
|
||||
return try_to_load_stub_cached(
|
||||
module.inference_state,
|
||||
import_names=module.string_names,
|
||||
python_value_set=ValueSet([module]),
|
||||
parent_module_value=None,
|
||||
sys_path=module.inference_state.get_sys_path(),
|
||||
)
|
||||
|
||||
|
||||
@to_list
|
||||
def _python_to_stub_names(names, fallback_to_python=False):
|
||||
for name in names:
|
||||
module_context = name.get_root_context()
|
||||
if module_context.is_stub():
|
||||
yield name
|
||||
continue
|
||||
|
||||
if name.api_type == 'module':
|
||||
found_name = False
|
||||
for n in name.goto():
|
||||
if n.api_type == 'module':
|
||||
values = convert_values(n.infer(), only_stubs=True)
|
||||
for v in values:
|
||||
yield v.name
|
||||
found_name = True
|
||||
else:
|
||||
for x in _python_to_stub_names([n], fallback_to_python=fallback_to_python):
|
||||
yield x
|
||||
found_name = True
|
||||
if found_name:
|
||||
continue
|
||||
else:
|
||||
v = name.get_defining_qualified_value()
|
||||
if v is not None:
|
||||
converted = to_stub(v)
|
||||
if converted:
|
||||
converted_names = converted.goto(name.get_public_name())
|
||||
if converted_names:
|
||||
yield from converted_names
|
||||
continue
|
||||
if fallback_to_python:
|
||||
# This is the part where if we haven't found anything, just return
|
||||
# the stub name.
|
||||
yield name
|
||||
|
||||
|
||||
def convert_names(names, only_stubs=False, prefer_stubs=False, prefer_stub_to_compiled=True):
|
||||
if only_stubs and prefer_stubs:
|
||||
raise ValueError("You cannot use both of only_stubs and prefer_stubs.")
|
||||
|
||||
with debug.increase_indent_cm('convert names'):
|
||||
if only_stubs or prefer_stubs:
|
||||
return _python_to_stub_names(names, fallback_to_python=prefer_stubs)
|
||||
else:
|
||||
return _try_stub_to_python_names(
|
||||
names, prefer_stub_to_compiled=prefer_stub_to_compiled)
|
||||
|
||||
|
||||
def convert_values(values, only_stubs=False, prefer_stubs=False, ignore_compiled=True):
|
||||
assert not (only_stubs and prefer_stubs)
|
||||
with debug.increase_indent_cm('convert values'):
|
||||
if only_stubs or prefer_stubs:
|
||||
return ValueSet.from_sets(
|
||||
to_stub(value)
|
||||
or (ValueSet({value}) if prefer_stubs else NO_VALUES)
|
||||
for value in values
|
||||
)
|
||||
else:
|
||||
return ValueSet.from_sets(
|
||||
_stub_to_python_value_set(stub_value, ignore_compiled=ignore_compiled)
|
||||
or ValueSet({stub_value})
|
||||
for stub_value in values
|
||||
)
|
||||
|
||||
|
||||
def to_stub(value):
|
||||
if value.is_stub():
|
||||
return ValueSet([value])
|
||||
|
||||
was_instance = value.is_instance()
|
||||
if was_instance:
|
||||
value = value.py__class__()
|
||||
|
||||
qualified_names = value.get_qualified_names()
|
||||
stub_module = _load_stub_module(value.get_root_context().get_value())
|
||||
if stub_module is None or qualified_names is None:
|
||||
return NO_VALUES
|
||||
|
||||
was_bound_method = value.is_bound_method()
|
||||
if was_bound_method:
|
||||
# Infer the object first. We can infer the method later.
|
||||
method_name = qualified_names[-1]
|
||||
qualified_names = qualified_names[:-1]
|
||||
was_instance = True
|
||||
|
||||
stub_values = ValueSet([stub_module])
|
||||
for name in qualified_names:
|
||||
stub_values = stub_values.py__getattribute__(name)
|
||||
|
||||
if was_instance:
|
||||
stub_values = ValueSet.from_sets(
|
||||
c.execute_with_values()
|
||||
for c in stub_values
|
||||
if c.is_class()
|
||||
)
|
||||
if was_bound_method:
|
||||
# Now that the instance has been properly created, we can simply get
|
||||
# the method.
|
||||
stub_values = stub_values.py__getattribute__(method_name)
|
||||
return stub_values
|
101
.venv/Lib/site-packages/jedi/inference/gradual/generics.py
Normal file
101
.venv/Lib/site-packages/jedi/inference/gradual/generics.py
Normal file
@ -0,0 +1,101 @@
|
||||
"""
|
||||
This module is about generics, like the `int` in `List[int]`. It's not about
|
||||
the Generic class.
|
||||
"""
|
||||
|
||||
from jedi import debug
|
||||
from jedi.cache import memoize_method
|
||||
from jedi.inference.utils import to_tuple
|
||||
from jedi.inference.base_value import ValueSet, NO_VALUES
|
||||
from jedi.inference.value.iterable import SequenceLiteralValue
|
||||
from jedi.inference.helpers import is_string
|
||||
|
||||
|
||||
def _resolve_forward_references(context, value_set):
|
||||
for value in value_set:
|
||||
if is_string(value):
|
||||
from jedi.inference.gradual.annotation import _get_forward_reference_node
|
||||
node = _get_forward_reference_node(context, value.get_safe_value())
|
||||
if node is not None:
|
||||
for c in context.infer_node(node):
|
||||
yield c
|
||||
else:
|
||||
yield value
|
||||
|
||||
|
||||
class _AbstractGenericManager:
|
||||
def get_index_and_execute(self, index):
|
||||
try:
|
||||
return self[index].execute_annotation()
|
||||
except IndexError:
|
||||
debug.warning('No param #%s found for annotation %s', index, self)
|
||||
return NO_VALUES
|
||||
|
||||
def get_type_hint(self):
|
||||
return '[%s]' % ', '.join(t.get_type_hint(add_class_info=False) for t in self.to_tuple())
|
||||
|
||||
|
||||
class LazyGenericManager(_AbstractGenericManager):
|
||||
def __init__(self, context_of_index, index_value):
|
||||
self._context_of_index = context_of_index
|
||||
self._index_value = index_value
|
||||
|
||||
@memoize_method
|
||||
def __getitem__(self, index):
|
||||
return self._tuple()[index]()
|
||||
|
||||
def __len__(self):
|
||||
return len(self._tuple())
|
||||
|
||||
@memoize_method
|
||||
@to_tuple
|
||||
def _tuple(self):
|
||||
def lambda_scoping_in_for_loop_sucks(lazy_value):
|
||||
return lambda: ValueSet(_resolve_forward_references(
|
||||
self._context_of_index,
|
||||
lazy_value.infer()
|
||||
))
|
||||
|
||||
if isinstance(self._index_value, SequenceLiteralValue):
|
||||
for lazy_value in self._index_value.py__iter__(contextualized_node=None):
|
||||
yield lambda_scoping_in_for_loop_sucks(lazy_value)
|
||||
else:
|
||||
yield lambda: ValueSet(_resolve_forward_references(
|
||||
self._context_of_index,
|
||||
ValueSet([self._index_value])
|
||||
))
|
||||
|
||||
@to_tuple
|
||||
def to_tuple(self):
|
||||
for callable_ in self._tuple():
|
||||
yield callable_()
|
||||
|
||||
def is_homogenous_tuple(self):
|
||||
if isinstance(self._index_value, SequenceLiteralValue):
|
||||
entries = self._index_value.get_tree_entries()
|
||||
if len(entries) == 2 and entries[1] == '...':
|
||||
return True
|
||||
return False
|
||||
|
||||
def __repr__(self):
|
||||
return '<LazyG>[%s]' % (', '.join(repr(x) for x in self.to_tuple()))
|
||||
|
||||
|
||||
class TupleGenericManager(_AbstractGenericManager):
|
||||
def __init__(self, tup):
|
||||
self._tuple = tup
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self._tuple[index]
|
||||
|
||||
def __len__(self):
|
||||
return len(self._tuple)
|
||||
|
||||
def to_tuple(self):
|
||||
return self._tuple
|
||||
|
||||
def is_homogenous_tuple(self):
|
||||
return False
|
||||
|
||||
def __repr__(self):
|
||||
return '<TupG>[%s]' % (', '.join(repr(x) for x in self.to_tuple()))
|
100
.venv/Lib/site-packages/jedi/inference/gradual/stub_value.py
Normal file
100
.venv/Lib/site-packages/jedi/inference/gradual/stub_value.py
Normal file
@ -0,0 +1,100 @@
|
||||
from jedi.inference.base_value import ValueWrapper
|
||||
from jedi.inference.value.module import ModuleValue
|
||||
from jedi.inference.filters import ParserTreeFilter
|
||||
from jedi.inference.names import StubName, StubModuleName
|
||||
from jedi.inference.gradual.typing import TypingModuleFilterWrapper
|
||||
from jedi.inference.context import ModuleContext
|
||||
|
||||
|
||||
class StubModuleValue(ModuleValue):
|
||||
_module_name_class = StubModuleName
|
||||
|
||||
def __init__(self, non_stub_value_set, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.non_stub_value_set = non_stub_value_set
|
||||
|
||||
def is_stub(self):
|
||||
return True
|
||||
|
||||
def sub_modules_dict(self):
|
||||
"""
|
||||
We have to overwrite this, because it's possible to have stubs that
|
||||
don't have code for all the child modules. At the time of writing this
|
||||
there are for example no stubs for `json.tool`.
|
||||
"""
|
||||
names = {}
|
||||
for value in self.non_stub_value_set:
|
||||
try:
|
||||
method = value.sub_modules_dict
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
names.update(method())
|
||||
names.update(super().sub_modules_dict())
|
||||
return names
|
||||
|
||||
def _get_stub_filters(self, origin_scope):
|
||||
return [StubFilter(
|
||||
parent_context=self.as_context(),
|
||||
origin_scope=origin_scope
|
||||
)] + list(self.iter_star_filters())
|
||||
|
||||
def get_filters(self, origin_scope=None):
|
||||
filters = super().get_filters(origin_scope)
|
||||
next(filters, None) # Ignore the first filter and replace it with our own
|
||||
stub_filters = self._get_stub_filters(origin_scope=origin_scope)
|
||||
yield from stub_filters
|
||||
yield from filters
|
||||
|
||||
def _as_context(self):
|
||||
return StubModuleContext(self)
|
||||
|
||||
|
||||
class StubModuleContext(ModuleContext):
|
||||
def get_filters(self, until_position=None, origin_scope=None):
|
||||
# Make sure to ignore the position, because positions are not relevant
|
||||
# for stubs.
|
||||
return super().get_filters(origin_scope=origin_scope)
|
||||
|
||||
|
||||
class TypingModuleWrapper(StubModuleValue):
|
||||
def get_filters(self, *args, **kwargs):
|
||||
filters = super().get_filters(*args, **kwargs)
|
||||
f = next(filters, None)
|
||||
assert f is not None
|
||||
yield TypingModuleFilterWrapper(f)
|
||||
yield from filters
|
||||
|
||||
def _as_context(self):
|
||||
return TypingModuleContext(self)
|
||||
|
||||
|
||||
class TypingModuleContext(ModuleContext):
|
||||
def get_filters(self, *args, **kwargs):
|
||||
filters = super().get_filters(*args, **kwargs)
|
||||
yield TypingModuleFilterWrapper(next(filters, None))
|
||||
yield from filters
|
||||
|
||||
|
||||
class StubFilter(ParserTreeFilter):
|
||||
name_class = StubName
|
||||
|
||||
def _is_name_reachable(self, name):
|
||||
if not super()._is_name_reachable(name):
|
||||
return False
|
||||
|
||||
# Imports in stub files are only public if they have an "as"
|
||||
# export.
|
||||
definition = name.get_definition()
|
||||
if definition.type in ('import_from', 'import_name'):
|
||||
if name.parent.type not in ('import_as_name', 'dotted_as_name'):
|
||||
return False
|
||||
n = name.value
|
||||
# TODO rewrite direct return
|
||||
if n.startswith('_') and not (n.startswith('__') and n.endswith('__')):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class VersionInfo(ValueWrapper):
|
||||
pass
|
127
.venv/Lib/site-packages/jedi/inference/gradual/type_var.py
Normal file
127
.venv/Lib/site-packages/jedi/inference/gradual/type_var.py
Normal file
@ -0,0 +1,127 @@
|
||||
from jedi import debug
|
||||
from jedi.inference.base_value import ValueSet, NO_VALUES, ValueWrapper
|
||||
from jedi.inference.gradual.base import BaseTypingValue
|
||||
|
||||
|
||||
class TypeVarClass(ValueWrapper):
|
||||
def py__call__(self, arguments):
|
||||
unpacked = arguments.unpack()
|
||||
|
||||
key, lazy_value = next(unpacked, (None, None))
|
||||
var_name = self._find_string_name(lazy_value)
|
||||
# The name must be given, otherwise it's useless.
|
||||
if var_name is None or key is not None:
|
||||
debug.warning('Found a variable without a name %s', arguments)
|
||||
return NO_VALUES
|
||||
|
||||
return ValueSet([TypeVar.create_cached(
|
||||
self.inference_state,
|
||||
self.parent_context,
|
||||
tree_name=self.tree_node.name,
|
||||
var_name=var_name,
|
||||
unpacked_args=unpacked,
|
||||
)])
|
||||
|
||||
def _find_string_name(self, lazy_value):
|
||||
if lazy_value is None:
|
||||
return None
|
||||
|
||||
value_set = lazy_value.infer()
|
||||
if not value_set:
|
||||
return None
|
||||
if len(value_set) > 1:
|
||||
debug.warning('Found multiple values for a type variable: %s', value_set)
|
||||
|
||||
name_value = next(iter(value_set))
|
||||
try:
|
||||
method = name_value.get_safe_value
|
||||
except AttributeError:
|
||||
return None
|
||||
else:
|
||||
safe_value = method(default=None)
|
||||
if isinstance(safe_value, str):
|
||||
return safe_value
|
||||
return None
|
||||
|
||||
|
||||
class TypeVar(BaseTypingValue):
|
||||
def __init__(self, parent_context, tree_name, var_name, unpacked_args):
|
||||
super().__init__(parent_context, tree_name)
|
||||
self._var_name = var_name
|
||||
|
||||
self._constraints_lazy_values = []
|
||||
self._bound_lazy_value = None
|
||||
self._covariant_lazy_value = None
|
||||
self._contravariant_lazy_value = None
|
||||
for key, lazy_value in unpacked_args:
|
||||
if key is None:
|
||||
self._constraints_lazy_values.append(lazy_value)
|
||||
else:
|
||||
if key == 'bound':
|
||||
self._bound_lazy_value = lazy_value
|
||||
elif key == 'covariant':
|
||||
self._covariant_lazy_value = lazy_value
|
||||
elif key == 'contravariant':
|
||||
self._contra_variant_lazy_value = lazy_value
|
||||
else:
|
||||
debug.warning('Invalid TypeVar param name %s', key)
|
||||
|
||||
def py__name__(self):
|
||||
return self._var_name
|
||||
|
||||
def get_filters(self, *args, **kwargs):
|
||||
return iter([])
|
||||
|
||||
def _get_classes(self):
|
||||
if self._bound_lazy_value is not None:
|
||||
return self._bound_lazy_value.infer()
|
||||
if self._constraints_lazy_values:
|
||||
return self.constraints
|
||||
debug.warning('Tried to infer the TypeVar %s without a given type', self._var_name)
|
||||
return NO_VALUES
|
||||
|
||||
def is_same_class(self, other):
|
||||
# Everything can match an undefined type var.
|
||||
return True
|
||||
|
||||
@property
|
||||
def constraints(self):
|
||||
return ValueSet.from_sets(
|
||||
lazy.infer() for lazy in self._constraints_lazy_values
|
||||
)
|
||||
|
||||
def define_generics(self, type_var_dict):
|
||||
try:
|
||||
found = type_var_dict[self.py__name__()]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if found:
|
||||
return found
|
||||
return ValueSet({self})
|
||||
|
||||
def execute_annotation(self):
|
||||
return self._get_classes().execute_annotation()
|
||||
|
||||
def infer_type_vars(self, value_set):
|
||||
def iterate():
|
||||
for v in value_set:
|
||||
cls = v.py__class__()
|
||||
if v.is_function() or v.is_class():
|
||||
cls = TypeWrapper(cls, v)
|
||||
yield cls
|
||||
|
||||
annotation_name = self.py__name__()
|
||||
return {annotation_name: ValueSet(iterate())}
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.py__name__())
|
||||
|
||||
|
||||
class TypeWrapper(ValueWrapper):
|
||||
def __init__(self, wrapped_value, original_value):
|
||||
super().__init__(wrapped_value)
|
||||
self._original_value = original_value
|
||||
|
||||
def execute_annotation(self):
|
||||
return ValueSet({self._original_value})
|
310
.venv/Lib/site-packages/jedi/inference/gradual/typeshed.py
Normal file
310
.venv/Lib/site-packages/jedi/inference/gradual/typeshed.py
Normal file
@ -0,0 +1,310 @@
|
||||
import os
|
||||
import re
|
||||
from functools import wraps
|
||||
from collections import namedtuple
|
||||
from typing import Dict, Mapping, Tuple
|
||||
from pathlib import Path
|
||||
|
||||
from jedi import settings
|
||||
from jedi.file_io import FileIO
|
||||
from jedi.parser_utils import get_cached_code_lines
|
||||
from jedi.inference.base_value import ValueSet, NO_VALUES
|
||||
from jedi.inference.gradual.stub_value import TypingModuleWrapper, StubModuleValue
|
||||
from jedi.inference.value import ModuleValue
|
||||
|
||||
_jedi_path = Path(__file__).parent.parent.parent
|
||||
TYPESHED_PATH = _jedi_path.joinpath('third_party', 'typeshed')
|
||||
DJANGO_INIT_PATH = _jedi_path.joinpath('third_party', 'django-stubs',
|
||||
'django-stubs', '__init__.pyi')
|
||||
|
||||
_IMPORT_MAP = dict(
|
||||
_collections='collections',
|
||||
_socket='socket',
|
||||
)
|
||||
|
||||
PathInfo = namedtuple('PathInfo', 'path is_third_party')
|
||||
|
||||
|
||||
def _merge_create_stub_map(path_infos):
|
||||
map_ = {}
|
||||
for directory_path_info in path_infos:
|
||||
map_.update(_create_stub_map(directory_path_info))
|
||||
return map_
|
||||
|
||||
|
||||
def _create_stub_map(directory_path_info):
|
||||
"""
|
||||
Create a mapping of an importable name in Python to a stub file.
|
||||
"""
|
||||
def generate():
|
||||
try:
|
||||
listed = os.listdir(directory_path_info.path)
|
||||
except (FileNotFoundError, NotADirectoryError):
|
||||
return
|
||||
|
||||
for entry in listed:
|
||||
path = os.path.join(directory_path_info.path, entry)
|
||||
if os.path.isdir(path):
|
||||
init = os.path.join(path, '__init__.pyi')
|
||||
if os.path.isfile(init):
|
||||
yield entry, PathInfo(init, directory_path_info.is_third_party)
|
||||
elif entry.endswith('.pyi') and os.path.isfile(path):
|
||||
name = entry[:-4]
|
||||
if name != '__init__':
|
||||
yield name, PathInfo(path, directory_path_info.is_third_party)
|
||||
|
||||
# Create a dictionary from the tuple generator.
|
||||
return dict(generate())
|
||||
|
||||
|
||||
def _get_typeshed_directories(version_info):
|
||||
check_version_list = ['2and3', '3']
|
||||
for base in ['stdlib', 'third_party']:
|
||||
base_path = TYPESHED_PATH.joinpath(base)
|
||||
base_list = os.listdir(base_path)
|
||||
for base_list_entry in base_list:
|
||||
match = re.match(r'(\d+)\.(\d+)$', base_list_entry)
|
||||
if match is not None:
|
||||
if match.group(1) == '3' and int(match.group(2)) <= version_info.minor:
|
||||
check_version_list.append(base_list_entry)
|
||||
|
||||
for check_version in check_version_list:
|
||||
is_third_party = base != 'stdlib'
|
||||
yield PathInfo(str(base_path.joinpath(check_version)), is_third_party)
|
||||
|
||||
|
||||
_version_cache: Dict[Tuple[int, int], Mapping[str, PathInfo]] = {}
|
||||
|
||||
|
||||
def _cache_stub_file_map(version_info):
|
||||
"""
|
||||
Returns a map of an importable name in Python to a stub file.
|
||||
"""
|
||||
# TODO this caches the stub files indefinitely, maybe use a time cache
|
||||
# for that?
|
||||
version = version_info[:2]
|
||||
try:
|
||||
return _version_cache[version]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
_version_cache[version] = file_set = \
|
||||
_merge_create_stub_map(_get_typeshed_directories(version_info))
|
||||
return file_set
|
||||
|
||||
|
||||
def import_module_decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(inference_state, import_names, parent_module_value, sys_path, prefer_stubs):
|
||||
python_value_set = inference_state.module_cache.get(import_names)
|
||||
if python_value_set is None:
|
||||
if parent_module_value is not None and parent_module_value.is_stub():
|
||||
parent_module_values = parent_module_value.non_stub_value_set
|
||||
else:
|
||||
parent_module_values = [parent_module_value]
|
||||
if import_names == ('os', 'path'):
|
||||
# This is a huge exception, we follow a nested import
|
||||
# ``os.path``, because it's a very important one in Python
|
||||
# that is being achieved by messing with ``sys.modules`` in
|
||||
# ``os``.
|
||||
python_value_set = ValueSet.from_sets(
|
||||
func(inference_state, (n,), None, sys_path,)
|
||||
for n in ['posixpath', 'ntpath', 'macpath', 'os2emxpath']
|
||||
)
|
||||
else:
|
||||
python_value_set = ValueSet.from_sets(
|
||||
func(inference_state, import_names, p, sys_path,)
|
||||
for p in parent_module_values
|
||||
)
|
||||
inference_state.module_cache.add(import_names, python_value_set)
|
||||
|
||||
if not prefer_stubs or import_names[0] in settings.auto_import_modules:
|
||||
return python_value_set
|
||||
|
||||
stub = try_to_load_stub_cached(inference_state, import_names, python_value_set,
|
||||
parent_module_value, sys_path)
|
||||
if stub is not None:
|
||||
return ValueSet([stub])
|
||||
return python_value_set
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def try_to_load_stub_cached(inference_state, import_names, *args, **kwargs):
|
||||
if import_names is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
return inference_state.stub_module_cache[import_names]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# TODO is this needed? where are the exceptions coming from that make this
|
||||
# necessary? Just remove this line.
|
||||
inference_state.stub_module_cache[import_names] = None
|
||||
inference_state.stub_module_cache[import_names] = result = \
|
||||
_try_to_load_stub(inference_state, import_names, *args, **kwargs)
|
||||
return result
|
||||
|
||||
|
||||
def _try_to_load_stub(inference_state, import_names, python_value_set,
|
||||
parent_module_value, sys_path):
|
||||
"""
|
||||
Trying to load a stub for a set of import_names.
|
||||
|
||||
This is modelled to work like "PEP 561 -- Distributing and Packaging Type
|
||||
Information", see https://www.python.org/dev/peps/pep-0561.
|
||||
"""
|
||||
if parent_module_value is None and len(import_names) > 1:
|
||||
try:
|
||||
parent_module_value = try_to_load_stub_cached(
|
||||
inference_state, import_names[:-1], NO_VALUES,
|
||||
parent_module_value=None, sys_path=sys_path)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# 1. Try to load foo-stubs folders on path for import name foo.
|
||||
if len(import_names) == 1:
|
||||
# foo-stubs
|
||||
for p in sys_path:
|
||||
init = os.path.join(p, *import_names) + '-stubs' + os.path.sep + '__init__.pyi'
|
||||
m = _try_to_load_stub_from_file(
|
||||
inference_state,
|
||||
python_value_set,
|
||||
file_io=FileIO(init),
|
||||
import_names=import_names,
|
||||
)
|
||||
if m is not None:
|
||||
return m
|
||||
if import_names[0] == 'django' and python_value_set:
|
||||
return _try_to_load_stub_from_file(
|
||||
inference_state,
|
||||
python_value_set,
|
||||
file_io=FileIO(str(DJANGO_INIT_PATH)),
|
||||
import_names=import_names,
|
||||
)
|
||||
|
||||
# 2. Try to load pyi files next to py files.
|
||||
for c in python_value_set:
|
||||
try:
|
||||
method = c.py__file__
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
file_path = method()
|
||||
file_paths = []
|
||||
if c.is_namespace():
|
||||
file_paths = [os.path.join(p, '__init__.pyi') for p in c.py__path__()]
|
||||
elif file_path is not None and file_path.suffix == '.py':
|
||||
file_paths = [str(file_path) + 'i']
|
||||
|
||||
for file_path in file_paths:
|
||||
m = _try_to_load_stub_from_file(
|
||||
inference_state,
|
||||
python_value_set,
|
||||
# The file path should end with .pyi
|
||||
file_io=FileIO(file_path),
|
||||
import_names=import_names,
|
||||
)
|
||||
if m is not None:
|
||||
return m
|
||||
|
||||
# 3. Try to load typeshed
|
||||
m = _load_from_typeshed(inference_state, python_value_set, parent_module_value, import_names)
|
||||
if m is not None:
|
||||
return m
|
||||
|
||||
# 4. Try to load pyi file somewhere if python_value_set was not defined.
|
||||
if not python_value_set:
|
||||
if parent_module_value is not None:
|
||||
check_path = parent_module_value.py__path__() or []
|
||||
# In case import_names
|
||||
names_for_path = (import_names[-1],)
|
||||
else:
|
||||
check_path = sys_path
|
||||
names_for_path = import_names
|
||||
|
||||
for p in check_path:
|
||||
m = _try_to_load_stub_from_file(
|
||||
inference_state,
|
||||
python_value_set,
|
||||
file_io=FileIO(os.path.join(p, *names_for_path) + '.pyi'),
|
||||
import_names=import_names,
|
||||
)
|
||||
if m is not None:
|
||||
return m
|
||||
|
||||
# If no stub is found, that's fine, the calling function has to deal with
|
||||
# it.
|
||||
return None
|
||||
|
||||
|
||||
def _load_from_typeshed(inference_state, python_value_set, parent_module_value, import_names):
|
||||
import_name = import_names[-1]
|
||||
map_ = None
|
||||
if len(import_names) == 1:
|
||||
map_ = _cache_stub_file_map(inference_state.grammar.version_info)
|
||||
import_name = _IMPORT_MAP.get(import_name, import_name)
|
||||
elif isinstance(parent_module_value, ModuleValue):
|
||||
if not parent_module_value.is_package():
|
||||
# Only if it's a package (= a folder) something can be
|
||||
# imported.
|
||||
return None
|
||||
paths = parent_module_value.py__path__()
|
||||
# Once the initial package has been loaded, the sub packages will
|
||||
# always be loaded, regardless if they are there or not. This makes
|
||||
# sense, IMO, because stubs take preference, even if the original
|
||||
# library doesn't provide a module (it could be dynamic). ~dave
|
||||
map_ = _merge_create_stub_map([PathInfo(p, is_third_party=False) for p in paths])
|
||||
|
||||
if map_ is not None:
|
||||
path_info = map_.get(import_name)
|
||||
if path_info is not None and (not path_info.is_third_party or python_value_set):
|
||||
return _try_to_load_stub_from_file(
|
||||
inference_state,
|
||||
python_value_set,
|
||||
file_io=FileIO(path_info.path),
|
||||
import_names=import_names,
|
||||
)
|
||||
|
||||
|
||||
def _try_to_load_stub_from_file(inference_state, python_value_set, file_io, import_names):
|
||||
try:
|
||||
stub_module_node = parse_stub_module(inference_state, file_io)
|
||||
except OSError:
|
||||
# The file that you're looking for doesn't exist (anymore).
|
||||
return None
|
||||
else:
|
||||
return create_stub_module(
|
||||
inference_state, inference_state.latest_grammar, python_value_set,
|
||||
stub_module_node, file_io, import_names
|
||||
)
|
||||
|
||||
|
||||
def parse_stub_module(inference_state, file_io):
|
||||
return inference_state.parse(
|
||||
file_io=file_io,
|
||||
cache=True,
|
||||
diff_cache=settings.fast_parser,
|
||||
cache_path=settings.cache_directory,
|
||||
use_latest_grammar=True
|
||||
)
|
||||
|
||||
|
||||
def create_stub_module(inference_state, grammar, python_value_set,
|
||||
stub_module_node, file_io, import_names):
|
||||
if import_names == ('typing',):
|
||||
module_cls = TypingModuleWrapper
|
||||
else:
|
||||
module_cls = StubModuleValue
|
||||
file_name = os.path.basename(file_io.path)
|
||||
stub_module_value = module_cls(
|
||||
python_value_set, inference_state, stub_module_node,
|
||||
file_io=file_io,
|
||||
string_names=import_names,
|
||||
# The code was loaded with latest_grammar, so use
|
||||
# that.
|
||||
code_lines=get_cached_code_lines(grammar, file_io.path),
|
||||
is_package=file_name == '__init__.pyi',
|
||||
)
|
||||
return stub_module_value
|
485
.venv/Lib/site-packages/jedi/inference/gradual/typing.py
Normal file
485
.venv/Lib/site-packages/jedi/inference/gradual/typing.py
Normal file
@ -0,0 +1,485 @@
|
||||
"""
|
||||
We need to somehow work with the typing objects. Since the typing objects are
|
||||
pretty bare we need to add all the Jedi customizations to make them work as
|
||||
values.
|
||||
|
||||
This file deals with all the typing.py cases.
|
||||
"""
|
||||
import itertools
|
||||
|
||||
from jedi import debug
|
||||
from jedi.inference.compiled import builtin_from_name, create_simple_object
|
||||
from jedi.inference.base_value import ValueSet, NO_VALUES, Value, \
|
||||
LazyValueWrapper, ValueWrapper
|
||||
from jedi.inference.lazy_value import LazyKnownValues
|
||||
from jedi.inference.arguments import repack_with_argument_clinic
|
||||
from jedi.inference.filters import FilterWrapper
|
||||
from jedi.inference.names import NameWrapper, ValueName
|
||||
from jedi.inference.value.klass import ClassMixin
|
||||
from jedi.inference.gradual.base import BaseTypingValue, \
|
||||
BaseTypingClassWithGenerics, BaseTypingInstance
|
||||
from jedi.inference.gradual.type_var import TypeVarClass
|
||||
from jedi.inference.gradual.generics import LazyGenericManager, TupleGenericManager
|
||||
|
||||
_PROXY_CLASS_TYPES = 'Tuple Generic Protocol Callable Type'.split()
|
||||
_TYPE_ALIAS_TYPES = {
|
||||
'List': 'builtins.list',
|
||||
'Dict': 'builtins.dict',
|
||||
'Set': 'builtins.set',
|
||||
'FrozenSet': 'builtins.frozenset',
|
||||
'ChainMap': 'collections.ChainMap',
|
||||
'Counter': 'collections.Counter',
|
||||
'DefaultDict': 'collections.defaultdict',
|
||||
'Deque': 'collections.deque',
|
||||
}
|
||||
_PROXY_TYPES = 'Optional Union ClassVar'.split()
|
||||
|
||||
|
||||
class TypingModuleName(NameWrapper):
|
||||
def infer(self):
|
||||
return ValueSet(self._remap())
|
||||
|
||||
def _remap(self):
|
||||
name = self.string_name
|
||||
inference_state = self.parent_context.inference_state
|
||||
try:
|
||||
actual = _TYPE_ALIAS_TYPES[name]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
yield TypeAlias.create_cached(
|
||||
inference_state, self.parent_context, self.tree_name, actual)
|
||||
return
|
||||
|
||||
if name in _PROXY_CLASS_TYPES:
|
||||
yield ProxyTypingClassValue.create_cached(
|
||||
inference_state, self.parent_context, self.tree_name)
|
||||
elif name in _PROXY_TYPES:
|
||||
yield ProxyTypingValue.create_cached(
|
||||
inference_state, self.parent_context, self.tree_name)
|
||||
elif name == 'runtime':
|
||||
# We don't want anything here, not sure what this function is
|
||||
# supposed to do, since it just appears in the stubs and shouldn't
|
||||
# have any effects there (because it's never executed).
|
||||
return
|
||||
elif name == 'TypeVar':
|
||||
cls, = self._wrapped_name.infer()
|
||||
yield TypeVarClass.create_cached(inference_state, cls)
|
||||
elif name == 'Any':
|
||||
yield AnyClass.create_cached(
|
||||
inference_state, self.parent_context, self.tree_name)
|
||||
elif name == 'TYPE_CHECKING':
|
||||
# This is needed for e.g. imports that are only available for type
|
||||
# checking or are in cycles. The user can then check this variable.
|
||||
yield builtin_from_name(inference_state, 'True')
|
||||
elif name == 'overload':
|
||||
yield OverloadFunction.create_cached(
|
||||
inference_state, self.parent_context, self.tree_name)
|
||||
elif name == 'NewType':
|
||||
v, = self._wrapped_name.infer()
|
||||
yield NewTypeFunction.create_cached(inference_state, v)
|
||||
elif name == 'cast':
|
||||
cast_fn, = self._wrapped_name.infer()
|
||||
yield CastFunction.create_cached(inference_state, cast_fn)
|
||||
elif name == 'TypedDict':
|
||||
# TODO doesn't even exist in typeshed/typing.py, yet. But will be
|
||||
# added soon.
|
||||
yield TypedDictClass.create_cached(
|
||||
inference_state, self.parent_context, self.tree_name)
|
||||
else:
|
||||
# Not necessary, as long as we are not doing type checking:
|
||||
# no_type_check & no_type_check_decorator
|
||||
# Everything else shouldn't be relevant...
|
||||
yield from self._wrapped_name.infer()
|
||||
|
||||
|
||||
class TypingModuleFilterWrapper(FilterWrapper):
|
||||
name_wrapper_class = TypingModuleName
|
||||
|
||||
|
||||
class ProxyWithGenerics(BaseTypingClassWithGenerics):
|
||||
def execute_annotation(self):
|
||||
string_name = self._tree_name.value
|
||||
|
||||
if string_name == 'Union':
|
||||
# This is kind of a special case, because we have Unions (in Jedi
|
||||
# ValueSets).
|
||||
return self.gather_annotation_classes().execute_annotation()
|
||||
elif string_name == 'Optional':
|
||||
# Optional is basically just saying it's either None or the actual
|
||||
# type.
|
||||
return self.gather_annotation_classes().execute_annotation() \
|
||||
| ValueSet([builtin_from_name(self.inference_state, 'None')])
|
||||
elif string_name == 'Type':
|
||||
# The type is actually already given in the index_value
|
||||
return self._generics_manager[0]
|
||||
elif string_name == 'ClassVar':
|
||||
# For now don't do anything here, ClassVars are always used.
|
||||
return self._generics_manager[0].execute_annotation()
|
||||
|
||||
mapped = {
|
||||
'Tuple': Tuple,
|
||||
'Generic': Generic,
|
||||
'Protocol': Protocol,
|
||||
'Callable': Callable,
|
||||
}
|
||||
cls = mapped[string_name]
|
||||
return ValueSet([cls(
|
||||
self.parent_context,
|
||||
self,
|
||||
self._tree_name,
|
||||
generics_manager=self._generics_manager,
|
||||
)])
|
||||
|
||||
def gather_annotation_classes(self):
|
||||
return ValueSet.from_sets(self._generics_manager.to_tuple())
|
||||
|
||||
def _create_instance_with_generics(self, generics_manager):
|
||||
return ProxyWithGenerics(
|
||||
self.parent_context,
|
||||
self._tree_name,
|
||||
generics_manager
|
||||
)
|
||||
|
||||
def infer_type_vars(self, value_set):
|
||||
annotation_generics = self.get_generics()
|
||||
|
||||
if not annotation_generics:
|
||||
return {}
|
||||
|
||||
annotation_name = self.py__name__()
|
||||
if annotation_name == 'Optional':
|
||||
# Optional[T] is equivalent to Union[T, None]. In Jedi unions
|
||||
# are represented by members within a ValueSet, so we extract
|
||||
# the T from the Optional[T] by removing the None value.
|
||||
none = builtin_from_name(self.inference_state, 'None')
|
||||
return annotation_generics[0].infer_type_vars(
|
||||
value_set.filter(lambda x: x != none),
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
class ProxyTypingValue(BaseTypingValue):
|
||||
index_class = ProxyWithGenerics
|
||||
|
||||
def with_generics(self, generics_tuple):
|
||||
return self.index_class.create_cached(
|
||||
self.inference_state,
|
||||
self.parent_context,
|
||||
self._tree_name,
|
||||
generics_manager=TupleGenericManager(generics_tuple)
|
||||
)
|
||||
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
return ValueSet(
|
||||
self.index_class.create_cached(
|
||||
self.inference_state,
|
||||
self.parent_context,
|
||||
self._tree_name,
|
||||
generics_manager=LazyGenericManager(
|
||||
context_of_index=contextualized_node.context,
|
||||
index_value=index_value,
|
||||
)
|
||||
) for index_value in index_value_set
|
||||
)
|
||||
|
||||
|
||||
class _TypingClassMixin(ClassMixin):
|
||||
def py__bases__(self):
|
||||
return [LazyKnownValues(
|
||||
self.inference_state.builtins_module.py__getattribute__('object')
|
||||
)]
|
||||
|
||||
def get_metaclasses(self):
|
||||
return []
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return ValueName(self, self._tree_name)
|
||||
|
||||
|
||||
class TypingClassWithGenerics(ProxyWithGenerics, _TypingClassMixin):
|
||||
def infer_type_vars(self, value_set):
|
||||
type_var_dict = {}
|
||||
annotation_generics = self.get_generics()
|
||||
|
||||
if not annotation_generics:
|
||||
return type_var_dict
|
||||
|
||||
annotation_name = self.py__name__()
|
||||
if annotation_name == 'Type':
|
||||
return annotation_generics[0].infer_type_vars(
|
||||
# This is basically a trick to avoid extra code: We execute the
|
||||
# incoming classes to be able to use the normal code for type
|
||||
# var inference.
|
||||
value_set.execute_annotation(),
|
||||
)
|
||||
|
||||
elif annotation_name == 'Callable':
|
||||
if len(annotation_generics) == 2:
|
||||
return annotation_generics[1].infer_type_vars(
|
||||
value_set.execute_annotation(),
|
||||
)
|
||||
|
||||
elif annotation_name == 'Tuple':
|
||||
tuple_annotation, = self.execute_annotation()
|
||||
return tuple_annotation.infer_type_vars(value_set)
|
||||
|
||||
return type_var_dict
|
||||
|
||||
def _create_instance_with_generics(self, generics_manager):
|
||||
return TypingClassWithGenerics(
|
||||
self.parent_context,
|
||||
self._tree_name,
|
||||
generics_manager
|
||||
)
|
||||
|
||||
|
||||
class ProxyTypingClassValue(ProxyTypingValue, _TypingClassMixin):
|
||||
index_class = TypingClassWithGenerics
|
||||
|
||||
|
||||
class TypeAlias(LazyValueWrapper):
|
||||
def __init__(self, parent_context, origin_tree_name, actual):
|
||||
self.inference_state = parent_context.inference_state
|
||||
self.parent_context = parent_context
|
||||
self._origin_tree_name = origin_tree_name
|
||||
self._actual = actual # e.g. builtins.list
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return ValueName(self, self._origin_tree_name)
|
||||
|
||||
def py__name__(self):
|
||||
return self.name.string_name
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self._actual)
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
module_name, class_name = self._actual.split('.')
|
||||
|
||||
# TODO use inference_state.import_module?
|
||||
from jedi.inference.imports import Importer
|
||||
module, = Importer(
|
||||
self.inference_state, [module_name], self.inference_state.builtins_module
|
||||
).follow()
|
||||
classes = module.py__getattribute__(class_name)
|
||||
# There should only be one, because it's code that we control.
|
||||
assert len(classes) == 1, classes
|
||||
cls = next(iter(classes))
|
||||
return cls
|
||||
|
||||
def gather_annotation_classes(self):
|
||||
return ValueSet([self._get_wrapped_value()])
|
||||
|
||||
def get_signatures(self):
|
||||
return []
|
||||
|
||||
|
||||
class Callable(BaseTypingInstance):
|
||||
def py__call__(self, arguments):
|
||||
"""
|
||||
def x() -> Callable[[Callable[..., _T]], _T]: ...
|
||||
"""
|
||||
# The 0th index are the arguments.
|
||||
try:
|
||||
param_values = self._generics_manager[0]
|
||||
result_values = self._generics_manager[1]
|
||||
except IndexError:
|
||||
debug.warning('Callable[...] defined without two arguments')
|
||||
return NO_VALUES
|
||||
else:
|
||||
from jedi.inference.gradual.annotation import infer_return_for_callable
|
||||
return infer_return_for_callable(arguments, param_values, result_values)
|
||||
|
||||
|
||||
class Tuple(BaseTypingInstance):
|
||||
def _is_homogenous(self):
|
||||
# To specify a variable-length tuple of homogeneous type, Tuple[T, ...]
|
||||
# is used.
|
||||
return self._generics_manager.is_homogenous_tuple()
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
if self._is_homogenous():
|
||||
return self._generics_manager.get_index_and_execute(0)
|
||||
else:
|
||||
if isinstance(index, int):
|
||||
return self._generics_manager.get_index_and_execute(index)
|
||||
|
||||
debug.dbg('The getitem type on Tuple was %s' % index)
|
||||
return NO_VALUES
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
if self._is_homogenous():
|
||||
yield LazyKnownValues(self._generics_manager.get_index_and_execute(0))
|
||||
else:
|
||||
for v in self._generics_manager.to_tuple():
|
||||
yield LazyKnownValues(v.execute_annotation())
|
||||
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
if self._is_homogenous():
|
||||
return self._generics_manager.get_index_and_execute(0)
|
||||
|
||||
return ValueSet.from_sets(
|
||||
self._generics_manager.to_tuple()
|
||||
).execute_annotation()
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
tuple_, = self.inference_state.builtins_module \
|
||||
.py__getattribute__('tuple').execute_annotation()
|
||||
return tuple_
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._wrapped_value.name
|
||||
|
||||
def infer_type_vars(self, value_set):
|
||||
# Circular
|
||||
from jedi.inference.gradual.annotation import merge_pairwise_generics, merge_type_var_dicts
|
||||
|
||||
value_set = value_set.filter(
|
||||
lambda x: x.py__name__().lower() == 'tuple',
|
||||
)
|
||||
|
||||
if self._is_homogenous():
|
||||
# The parameter annotation is of the form `Tuple[T, ...]`,
|
||||
# so we treat the incoming tuple like a iterable sequence
|
||||
# rather than a positional container of elements.
|
||||
return self._class_value.get_generics()[0].infer_type_vars(
|
||||
value_set.merge_types_of_iterate(),
|
||||
)
|
||||
|
||||
else:
|
||||
# The parameter annotation has only explicit type parameters
|
||||
# (e.g: `Tuple[T]`, `Tuple[T, U]`, `Tuple[T, U, V]`, etc.) so we
|
||||
# treat the incoming values as needing to match the annotation
|
||||
# exactly, just as we would for non-tuple annotations.
|
||||
|
||||
type_var_dict = {}
|
||||
for element in value_set:
|
||||
try:
|
||||
method = element.get_annotated_class_object
|
||||
except AttributeError:
|
||||
# This might still happen, because the tuple name matching
|
||||
# above is not 100% correct, so just catch the remaining
|
||||
# cases here.
|
||||
continue
|
||||
|
||||
py_class = method()
|
||||
merge_type_var_dicts(
|
||||
type_var_dict,
|
||||
merge_pairwise_generics(self._class_value, py_class),
|
||||
)
|
||||
|
||||
return type_var_dict
|
||||
|
||||
|
||||
class Generic(BaseTypingInstance):
|
||||
pass
|
||||
|
||||
|
||||
class Protocol(BaseTypingInstance):
|
||||
pass
|
||||
|
||||
|
||||
class AnyClass(BaseTypingValue):
|
||||
def execute_annotation(self):
|
||||
debug.warning('Used Any - returned no results')
|
||||
return NO_VALUES
|
||||
|
||||
|
||||
class OverloadFunction(BaseTypingValue):
|
||||
@repack_with_argument_clinic('func, /')
|
||||
def py__call__(self, func_value_set):
|
||||
# Just pass arguments through.
|
||||
return func_value_set
|
||||
|
||||
|
||||
class NewTypeFunction(ValueWrapper):
|
||||
def py__call__(self, arguments):
|
||||
ordered_args = arguments.unpack()
|
||||
next(ordered_args, (None, None))
|
||||
_, second_arg = next(ordered_args, (None, None))
|
||||
if second_arg is None:
|
||||
return NO_VALUES
|
||||
return ValueSet(
|
||||
NewType(
|
||||
self.inference_state,
|
||||
contextualized_node.context,
|
||||
contextualized_node.node,
|
||||
second_arg.infer(),
|
||||
) for contextualized_node in arguments.get_calling_nodes())
|
||||
|
||||
|
||||
class NewType(Value):
|
||||
def __init__(self, inference_state, parent_context, tree_node, type_value_set):
|
||||
super().__init__(inference_state, parent_context)
|
||||
self._type_value_set = type_value_set
|
||||
self.tree_node = tree_node
|
||||
|
||||
def py__class__(self):
|
||||
c, = self._type_value_set.py__class__()
|
||||
return c
|
||||
|
||||
def py__call__(self, arguments):
|
||||
return self._type_value_set.execute_annotation()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
from jedi.inference.compiled.value import CompiledValueName
|
||||
return CompiledValueName(self, 'NewType')
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return '<NewType: %s>%s' % (self.tree_node, self._type_value_set)
|
||||
|
||||
|
||||
class CastFunction(ValueWrapper):
|
||||
@repack_with_argument_clinic('type, object, /')
|
||||
def py__call__(self, type_value_set, object_value_set):
|
||||
return type_value_set.execute_annotation()
|
||||
|
||||
|
||||
class TypedDictClass(BaseTypingValue):
|
||||
"""
|
||||
This class has no responsibilities and is just here to make sure that typed
|
||||
dicts can be identified.
|
||||
"""
|
||||
|
||||
|
||||
class TypedDict(LazyValueWrapper):
|
||||
"""Represents the instance version of ``TypedDictClass``."""
|
||||
def __init__(self, definition_class):
|
||||
self.inference_state = definition_class.inference_state
|
||||
self.parent_context = definition_class.parent_context
|
||||
self.tree_node = definition_class.tree_node
|
||||
self._definition_class = definition_class
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return ValueName(self, self.tree_node.name)
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
if isinstance(index, str):
|
||||
return ValueSet.from_sets(
|
||||
name.infer()
|
||||
for filter in self._definition_class.get_filters(is_instance=True)
|
||||
for name in filter.get(index)
|
||||
)
|
||||
return NO_VALUES
|
||||
|
||||
def get_key_values(self):
|
||||
filtered_values = itertools.chain.from_iterable((
|
||||
f.values()
|
||||
for f in self._definition_class.get_filters(is_instance=True)
|
||||
))
|
||||
return ValueSet({
|
||||
create_simple_object(self.inference_state, v.string_name)
|
||||
for v in filtered_values
|
||||
})
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
d, = self.inference_state.builtins_module.py__getattribute__('dict')
|
||||
result, = d.execute_with_values()
|
||||
return result
|
35
.venv/Lib/site-packages/jedi/inference/gradual/utils.py
Normal file
35
.venv/Lib/site-packages/jedi/inference/gradual/utils.py
Normal file
@ -0,0 +1,35 @@
|
||||
from pathlib import Path
|
||||
|
||||
from jedi.inference.gradual.typeshed import TYPESHED_PATH, create_stub_module
|
||||
|
||||
|
||||
def load_proper_stub_module(inference_state, grammar, file_io, import_names, module_node):
|
||||
"""
|
||||
This function is given a random .pyi file and should return the proper
|
||||
module.
|
||||
"""
|
||||
path = file_io.path
|
||||
path = Path(path)
|
||||
assert path.suffix == '.pyi'
|
||||
try:
|
||||
relative_path = path.relative_to(TYPESHED_PATH)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
# /[...]/stdlib/3/os/__init__.pyi -> stdlib/3/os/__init__
|
||||
rest = relative_path.with_suffix('')
|
||||
# Remove the stdlib/3 or third_party/3.6 part
|
||||
import_names = rest.parts[2:]
|
||||
if rest.name == '__init__':
|
||||
import_names = import_names[:-1]
|
||||
|
||||
if import_names is not None:
|
||||
actual_value_set = inference_state.import_module(import_names, prefer_stubs=False)
|
||||
|
||||
stub = create_stub_module(
|
||||
inference_state, grammar, actual_value_set,
|
||||
module_node, file_io, import_names
|
||||
)
|
||||
inference_state.stub_module_cache[import_names] = stub
|
||||
return stub
|
||||
return None
|
Reference in New Issue
Block a user