2 Like described in the :mod:`parso.python.tree` module,
3 there's a need for an ast like module to represent the states of parsed
6 But now there are also structures in Python that need a little bit more than
7 that. An ``Instance`` for example is only a ``Class`` before it is
8 instantiated. This class represents these cases.
10 So, why is there also a ``Class`` class here? Well, there are decorators and
11 they change classes in Python 3.
13 Representation modules also define "magic methods". Those methods look like
14 ``py__foo__`` and are typically mappable to the Python equivalents ``__call__``
15 and others. Here's a list:
17 ====================================== ========================================
18 **Method** **Description**
19 -------------------------------------- ----------------------------------------
20 py__call__(arguments: Array) On callable objects, returns types.
21 py__bool__() Returns True/False/None; None means that
23 py__bases__() Returns a list of base classes.
24 py__iter__() Returns a generator of a set of types.
25 py__class__() Returns the class of an instance.
26 py__simple_getitem__(index: int/str) Returns a a set of types of the index.
27 Can raise an IndexError/KeyError.
28 py__getitem__(indexes: ValueSet) Returns a a set of types of the index.
29 py__file__() Only on modules. Returns None if does
31 py__package__() -> List[str] Only on modules. For the import system.
32 py__path__() Only on modules. For the import system.
33 py__get__(call_object) Only on instances. Simulates
35 py__doc__() Returns the docstring for a value.
36 ====================================== ========================================
39 from jedi
import debug
40 from jedi
.parser_utils
import get_cached_parent_scope
, expr_is_dotted
, \
42 from jedi
.inference
.cache
import inference_state_method_cache
, CachedMetaClass
, \
43 inference_state_method_generator_cache
44 from jedi
.inference
import compiled
45 from jedi
.inference
.lazy_value
import LazyKnownValues
, LazyTreeValue
46 from jedi
.inference
.filters
import ParserTreeFilter
47 from jedi
.inference
.names
import TreeNameDefinition
, ValueName
48 from jedi
.inference
.arguments
import unpack_arglist
, ValuesArguments
49 from jedi
.inference
.base_value
import ValueSet
, iterator_to_value_set
, \
51 from jedi
.inference
.context
import ClassContext
52 from jedi
.inference
.value
.function
import FunctionAndClassBase
53 from jedi
.inference
.gradual
.generics
import LazyGenericManager
, TupleGenericManager
54 from jedi
.plugins
import plugin_manager
57 class ClassName(TreeNameDefinition
):
58 def __init__(self
, class_value
, tree_name
, name_context
, apply_decorators
):
59 super().__init
__(name_context
, tree_name
)
60 self
._apply
_decorators
= apply_decorators
61 self
._class
_value
= class_value
63 @iterator_to_value_set
65 # We're using a different value to infer, so we cannot call super().
66 from jedi
.inference
.syntax_tree
import tree_name_to_values
67 inferred
= tree_name_to_values(
68 self
.parent_context
.inference_state
, self
.parent_context
, self
.tree_name
)
70 for result_value
in inferred
:
71 if self
._apply
_decorators
:
72 yield from result_value
.py__get__(instance
=None, class_value
=self
._class
_value
)
78 type_
= super().api_type
79 if type_
== 'function':
80 definition
= self
.tree_name
.get_definition()
81 if definition
is None:
83 if function_is_property(definition
):
84 # This essentially checks if there is an @property before
85 # the function. @property could be something different, but
86 # any programmer that redefines property as something that
87 # is not really a property anymore, should be shot. (i.e.
88 # this is a heuristic).
93 class ClassFilter(ParserTreeFilter
):
94 def __init__(self
, class_value
, node_context
=None, until_position
=None,
95 origin_scope
=None, is_instance
=False):
97 class_value
.as_context(), node_context
,
98 until_position
=until_position
,
99 origin_scope
=origin_scope
,
101 self
._class
_value
= class_value
102 self
._is
_instance
= is_instance
104 def _convert_names(self
, names
):
107 class_value
=self
._class
_value
,
109 name_context
=self
._node
_context
,
110 apply_decorators
=not self
._is
_instance
,
114 def _equals_origin_scope(self
):
115 node
= self
._origin
_scope
116 while node
is not None:
117 if node
== self
._parser
_scope
or node
== self
.parent_context
:
119 node
= get_cached_parent_scope(self
._parso
_cache
_node
, node
)
122 def _access_possible(self
, name
):
123 # Filter for name mangling of private variables like __foo
124 return not name
.value
.startswith('__') or name
.value
.endswith('__') \
125 or self
._equals
_origin
_scope
()
127 def _filter(self
, names
):
128 names
= super()._filter
(names
)
129 return [name
for name
in names
if self
._access
_possible
(name
)]
136 def is_class_mixin(self
):
139 def py__call__(self
, arguments
):
140 from jedi
.inference
.value
import TreeInstance
142 from jedi
.inference
.gradual
.typing
import TypedDict
143 if self
.is_typeddict():
144 return ValueSet([TypedDict(self
)])
145 return ValueSet([TreeInstance(self
.inference_state
, self
.parent_context
, self
, arguments
)])
147 def py__class__(self
):
148 return compiled
.builtin_from_name(self
.inference_state
, 'type')
152 return ValueName(self
, self
.tree_node
.name
)
154 def py__name__(self
):
155 return self
.name
.string_name
157 @inference_state_method_generator_cache()
161 # TODO Do a proper mro resolution. Currently we are just listing
162 # classes. However, it's a complicated algorithm.
163 for lazy_cls
in self
.py__bases__():
164 # TODO there's multiple different mro paths possible if this yields
165 # multiple possibilities. Could be changed to be more correct.
166 for cls
in lazy_cls
.infer():
167 # TODO detect for TypeError: duplicate base class str,
168 # e.g. `class X(str, str): pass`
170 mro_method
= cls
.py__mro__
171 except AttributeError:
172 # TODO add a TypeError like:
174 >>> class Y(lambda: test): pass
175 Traceback (most recent call last):
176 File "<stdin>", line 1, in <module>
177 TypeError: function() argument 1 must be code, not str
179 Traceback (most recent call last):
180 File "<stdin>", line 1, in <module>
181 TypeError: int() takes at most 2 arguments (3 given)
183 debug
.warning('Super class of %s is not a class: %s', self
, cls
)
185 for cls_new
in mro_method():
186 if cls_new
not in mro
:
190 def get_filters(self
, origin_scope
=None, is_instance
=False,
191 include_metaclasses
=True, include_type_when_class
=True):
192 if include_metaclasses
:
193 metaclasses
= self
.get_metaclasses()
195 yield from self
.get_metaclass_filters(metaclasses
, is_instance
)
197 for cls
in self
.py__mro__():
198 if cls
.is_compiled():
199 yield from cls
.get_filters(is_instance
=is_instance
)
202 self
, node_context
=cls
.as_context(),
203 origin_scope
=origin_scope
,
204 is_instance
=is_instance
206 if not is_instance
and include_type_when_class
:
207 from jedi
.inference
.compiled
import builtin_from_name
208 type_
= builtin_from_name(self
.inference_state
, 'type')
209 assert isinstance(type_
, ClassValue
)
211 # We are not using execute_with_values here, because the
212 # plugin function for type would get executed instead of an
214 args
= ValuesArguments([])
215 for instance
in type_
.py__call__(args
):
216 instance_filters
= instance
.get_filters()
217 # Filter out self filters
218 next(instance_filters
, None)
219 next(instance_filters
, None)
220 x
= next(instance_filters
, None)
224 def get_signatures(self
):
225 # Since calling staticmethod without a function is illegal, the Jedi
226 # plugin doesn't return anything. Therefore call directly and get what
227 # we want: An instance of staticmethod.
228 metaclasses
= self
.get_metaclasses()
230 sigs
= self
.get_metaclass_signatures(metaclasses
)
233 args
= ValuesArguments([])
234 init_funcs
= self
.py__call__(args
).py__getattribute__('__init__')
235 return [sig
.bind(self
) for sig
in init_funcs
.get_signatures()]
237 def _as_context(self
):
238 return ClassContext(self
)
240 def get_type_hint(self
, add_class_info
=True):
242 return 'Type[%s]' % self
.py__name__()
243 return self
.py__name__()
245 @inference_state_method_cache(default
=False)
246 def is_typeddict(self
):
247 # TODO Do a proper mro resolution. Currently we are just listing
248 # classes. However, it's a complicated algorithm.
249 from jedi
.inference
.gradual
.typing
import TypedDictClass
250 for lazy_cls
in self
.py__bases__():
251 if not isinstance(lazy_cls
, LazyTreeValue
):
253 tree_node
= lazy_cls
.data
254 # Only resolve simple classes, stuff like Iterable[str] are more
255 # intensive to resolve and if generics are involved, we know it's
257 if not expr_is_dotted(tree_node
):
260 for cls
in lazy_cls
.infer():
261 if isinstance(cls
, TypedDictClass
):
264 method
= cls
.is_typeddict
265 except AttributeError:
266 # We're only dealing with simple classes, so just returning
267 # here should be fine. This only happens with e.g. compiled
275 def py__getitem__(self
, index_value_set
, contextualized_node
):
276 from jedi
.inference
.gradual
.base
import GenericClass
277 if not index_value_set
:
278 debug
.warning('Class indexes inferred to nothing. Returning class instead')
279 return ValueSet([self
])
284 context_of_index
=contextualized_node
.context
,
285 index_value
=index_value
,
288 for index_value
in index_value_set
291 def with_generics(self
, generics_tuple
):
292 from jedi
.inference
.gradual
.base
import GenericClass
295 TupleGenericManager(generics_tuple
)
298 def define_generics(self
, type_var_dict
):
299 from jedi
.inference
.gradual
.base
import GenericClass
301 def remap_type_vars():
303 The TypeVars in the resulting classes have sometimes different names
304 and we need to check for that, e.g. a signature can be:
306 def iter(iterable: Iterable[_T]) -> Iterator[_T]: ...
308 However, the iterator is defined as Iterator[_T_co], which means it has
309 a different type var name.
311 for type_var
in self
.list_type_vars():
312 yield type_var_dict
.get(type_var
.py__name__(), NO_VALUES
)
315 return ValueSet([GenericClass(
317 TupleGenericManager(tuple(remap_type_vars()))
319 return ValueSet({self}
)
322 class ClassValue(ClassMixin
, FunctionAndClassBase
, metaclass
=CachedMetaClass
):
325 @inference_state_method_cache()
326 def list_type_vars(self
):
328 arglist
= self
.tree_node
.get_super_arglist()
332 for stars
, node
in unpack_arglist(arglist
):
334 continue # These are not relevant for this search.
336 from jedi
.inference
.gradual
.annotation
import find_unknown_type_vars
337 for type_var
in find_unknown_type_vars(self
.parent_context
, node
):
338 if type_var
not in found
:
339 # The order matters and it's therefore a list.
340 found
.append(type_var
)
343 def _get_bases_arguments(self
):
344 arglist
= self
.tree_node
.get_super_arglist()
346 from jedi
.inference
import arguments
347 return arguments
.TreeArguments(self
.inference_state
, self
.parent_context
, arglist
)
350 @inference_state_method_cache(default
=())
351 def py__bases__(self
):
352 args
= self
._get
_bases
_arguments
()
354 lst
= [value
for key
, value
in args
.unpack() if key
is None]
358 if self
.py__name__() == 'object' \
359 and self
.parent_context
.is_builtins_module():
361 return [LazyKnownValues(
362 self
.inference_state
.builtins_module
.py__getattribute__('object')
365 @plugin_manager.decorate()
366 def get_metaclass_filters(self
, metaclasses
, is_instance
):
367 debug
.warning('Unprocessed metaclass %s', metaclasses
)
370 @inference_state_method_cache(default
=NO_VALUES
)
371 def get_metaclasses(self
):
372 args
= self
._get
_bases
_arguments
()
374 m
= [value
for key
, value
in args
.unpack() if key
== 'metaclass']
375 metaclasses
= ValueSet
.from_sets(lazy_value
.infer() for lazy_value
in m
)
376 metaclasses
= ValueSet(m
for m
in metaclasses
if m
.is_class())
380 for lazy_base
in self
.py__bases__():
381 for value
in lazy_base
.infer():
383 values
= value
.get_metaclasses()
388 @plugin_manager.decorate()
389 def get_metaclass_signatures(self
, metaclasses
):