2 Contains all classes and functions to deal with lists, dicts, generators and
5 from jedi
.inference
import compiled
6 from jedi
.inference
import analysis
7 from jedi
.inference
.lazy_value
import LazyKnownValue
, LazyKnownValues
, \
9 from jedi
.inference
.helpers
import get_int_or_none
, is_string
, \
10 reraise_getitem_errors
, SimpleGetItemNotFound
11 from jedi
.inference
.utils
import safe_property
, to_list
12 from jedi
.inference
.cache
import inference_state_method_cache
13 from jedi
.inference
.filters
import LazyAttributeOverwrite
, publish_method
14 from jedi
.inference
.base_value
import ValueSet
, Value
, NO_VALUES
, \
15 ContextualizedNode
, iterate_values
, sentinel
, \
17 from jedi
.parser_utils
import get_sync_comp_fors
18 from jedi
.inference
.context
import CompForContext
19 from jedi
.inference
.value
.dynamic_arrays
import check_array_additions
23 def py__next__(self
, contextualized_node
=None):
24 return self
.py__iter__(contextualized_node
)
26 def py__stop_iteration_returns(self
):
27 return ValueSet([compiled
.builtin_from_name(self
.inference_state
, 'None')])
29 # At the moment, safe values are simple values like "foo", 1 and not
30 # lists/dicts. Therefore as a small speed optimization we can just do the
31 # default instead of resolving the lazy wrapped values, that are just
32 # doing this in the end as well.
33 # This mostly speeds up patterns like `sys.version_info >= (3, 0)` in
35 get_safe_value
= Value
.get_safe_value
38 class GeneratorBase(LazyAttributeOverwrite
, IterableMixin
):
41 def _get_wrapped_value(self
):
42 instance
, = self
._get
_cls
().execute_annotation()
46 generator
, = self
.inference_state
.typing_module
.py__getattribute__('Generator')
52 @publish_method('__iter__')
53 def _iter(self
, arguments
):
54 return ValueSet([self
])
56 @publish_method('send')
57 @publish_method('__next__')
58 def _next(self
, arguments
):
59 return ValueSet
.from_sets(lazy_value
.infer() for lazy_value
in self
.py__iter__())
61 def py__stop_iteration_returns(self
):
62 return ValueSet([compiled
.builtin_from_name(self
.inference_state
, 'None')])
66 return compiled
.CompiledValueName(self
, 'Generator')
68 def get_annotated_class_object(self
):
69 from jedi
.inference
.gradual
.generics
import TupleGenericManager
70 gen_values
= self
.merge_types_of_iterate().py__class__()
71 gm
= TupleGenericManager((gen_values
, NO_VALUES
, NO_VALUES
))
72 return self
._get
_cls
().with_generics(gm
)
75 class Generator(GeneratorBase
):
76 """Handling of `yield` functions."""
77 def __init__(self
, inference_state
, func_execution_context
):
78 super().__init
__(inference_state
)
79 self
._func
_execution
_context
= func_execution_context
81 def py__iter__(self
, contextualized_node
=None):
82 iterators
= self
._func
_execution
_context
.infer_annotations()
84 return iterators
.iterate(contextualized_node
)
85 return self
._func
_execution
_context
.get_yield_lazy_values()
87 def py__stop_iteration_returns(self
):
88 return self
._func
_execution
_context
.get_return_values()
91 return "<%s of %s>" % (type(self
).__name
__, self
._func
_execution
_context
)
94 def comprehension_from_atom(inference_state
, value
, atom
):
95 bracket
= atom
.children
[0]
96 test_list_comp
= atom
.children
[1]
99 if atom
.children
[1].children
[1] == ':':
100 sync_comp_for
= test_list_comp
.children
[3]
101 if sync_comp_for
.type == 'comp_for':
102 sync_comp_for
= sync_comp_for
.children
[1]
104 return DictComprehension(
107 sync_comp_for_node
=sync_comp_for
,
108 key_node
=test_list_comp
.children
[0],
109 value_node
=test_list_comp
.children
[2],
112 cls
= SetComprehension
114 cls
= GeneratorComprehension
116 cls
= ListComprehension
118 sync_comp_for
= test_list_comp
.children
[1]
119 if sync_comp_for
.type == 'comp_for':
120 sync_comp_for
= sync_comp_for
.children
[1]
124 defining_context
=value
,
125 sync_comp_for_node
=sync_comp_for
,
126 entry_node
=test_list_comp
.children
[0],
130 class ComprehensionMixin
:
131 @inference_state_method_cache()
132 def _get_comp_for_context(self
, parent_context
, comp_for
):
133 return CompForContext(parent_context
, comp_for
)
135 def _nested(self
, comp_fors
, parent_context
=None):
136 comp_for
= comp_fors
[0]
138 is_async
= comp_for
.parent
.type == 'comp_for'
140 input_node
= comp_for
.children
[3]
141 parent_context
= parent_context
or self
._defining
_context
142 input_types
= parent_context
.infer_node(input_node
)
144 cn
= ContextualizedNode(parent_context
, input_node
)
145 iterated
= input_types
.iterate(cn
, is_async
=is_async
)
146 exprlist
= comp_for
.children
[1]
147 for i
, lazy_value
in enumerate(iterated
):
148 types
= lazy_value
.infer()
149 dct
= unpack_tuple_to_dict(parent_context
, types
, exprlist
)
150 context
= self
._get
_comp
_for
_context
(
154 with context
.predefine_names(comp_for
, dct
):
156 yield from self
._nested
(comp_fors
[1:], context
)
158 iterated
= context
.infer_node(self
._entry
_node
)
159 if self
.array_type
== 'dict':
160 yield iterated
, context
.infer_node(self
._value
_node
)
164 @inference_state_method_cache(default
=[])
167 comp_fors
= tuple(get_sync_comp_fors(self
._sync
_comp
_for
_node
))
168 yield from self
._nested
(comp_fors
)
170 def py__iter__(self
, contextualized_node
=None):
171 for set_
in self
._iterate
():
172 yield LazyKnownValues(set_
)
175 return "<%s of %s>" % (type(self
).__name
__, self
._sync
_comp
_for
_node
)
179 def _get_generics(self
):
180 return tuple(c_set
.py__class__() for c_set
in self
.get_mapping_item_values())
183 class Sequence(LazyAttributeOverwrite
, IterableMixin
):
184 api_type
= 'instance'
188 return compiled
.CompiledValueName(self
, self
.array_type
)
190 def _get_generics(self
):
191 return (self
.merge_types_of_iterate().py__class__(),)
193 @inference_state_method_cache(default
=())
194 def _cached_generics(self
):
195 return self
._get
_generics
()
197 def _get_wrapped_value(self
):
198 from jedi
.inference
.gradual
.base
import GenericClass
199 from jedi
.inference
.gradual
.generics
import TupleGenericManager
200 klass
= compiled
.builtin_from_name(self
.inference_state
, self
.array_type
)
203 TupleGenericManager(self
._cached
_generics
())
204 ).execute_annotation()
207 def py__bool__(self
):
208 return None # We don't know the length, because of appends.
212 return self
.inference_state
.builtins_module
214 def py__getitem__(self
, index_value_set
, contextualized_node
):
215 if self
.array_type
== 'dict':
216 return self
._dict
_values
()
217 return iterate_values(ValueSet([self
]))
220 class _BaseComprehension(ComprehensionMixin
):
221 def __init__(self
, inference_state
, defining_context
, sync_comp_for_node
, entry_node
):
222 assert sync_comp_for_node
.type == 'sync_comp_for'
223 super().__init
__(inference_state
)
224 self
._defining
_context
= defining_context
225 self
._sync
_comp
_for
_node
= sync_comp_for_node
226 self
._entry
_node
= entry_node
229 class ListComprehension(_BaseComprehension
, Sequence
):
232 def py__simple_getitem__(self
, index
):
233 if isinstance(index
, slice):
234 return ValueSet([self
])
236 all_types
= list(self
.py__iter__())
237 with
reraise_getitem_errors(IndexError, TypeError):
238 lazy_value
= all_types
[index
]
239 return lazy_value
.infer()
242 class SetComprehension(_BaseComprehension
, Sequence
):
246 class GeneratorComprehension(_BaseComprehension
, GeneratorBase
):
251 # TODO merge with _DictMixin?
252 def get_mapping_item_values(self
):
253 return self
._dict
_keys
(), self
._dict
_values
()
255 def get_key_values(self
):
256 # TODO merge with _dict_keys?
257 return self
._dict
_keys
()
260 class DictComprehension(ComprehensionMixin
, Sequence
, _DictKeyMixin
):
263 def __init__(self
, inference_state
, defining_context
, sync_comp_for_node
, key_node
, value_node
):
264 assert sync_comp_for_node
.type == 'sync_comp_for'
265 super().__init
__(inference_state
)
266 self
._defining
_context
= defining_context
267 self
._sync
_comp
_for
_node
= sync_comp_for_node
268 self
._entry
_node
= key_node
269 self
._value
_node
= value_node
271 def py__iter__(self
, contextualized_node
=None):
272 for keys
, values
in self
._iterate
():
273 yield LazyKnownValues(keys
)
275 def py__simple_getitem__(self
, index
):
276 for keys
, values
in self
._iterate
():
278 # Be careful in the future if refactoring, index could be a
280 if k
.get_safe_value(default
=object()) == index
:
282 raise SimpleGetItemNotFound()
284 def _dict_keys(self
):
285 return ValueSet
.from_sets(keys
for keys
, values
in self
._iterate
())
287 def _dict_values(self
):
288 return ValueSet
.from_sets(values
for keys
, values
in self
._iterate
())
290 @publish_method('values')
291 def _imitate_values(self
, arguments
):
292 lazy_value
= LazyKnownValues(self
._dict
_values
())
293 return ValueSet([FakeList(self
.inference_state
, [lazy_value
])])
295 @publish_method('items')
296 def _imitate_items(self
, arguments
):
300 self
.inference_state
,
301 [LazyKnownValues(key
),
302 LazyKnownValues(value
)]
305 for key
, value
in self
._iterate
()
308 return ValueSet([FakeList(self
.inference_state
, lazy_values
)])
310 def exact_key_items(self
):
311 # NOTE: A smarter thing can probably done here to achieve better
312 # completions, but at least like this jedi doesn't crash
316 class SequenceLiteralValue(Sequence
):
317 _TUPLE_LIKE
= 'testlist_star_expr', 'testlist', 'subscriptlist'
318 mapping
= {'(': 'tuple',
322 def __init__(self
, inference_state
, defining_context
, atom
):
323 super().__init
__(inference_state
)
325 self
._defining
_context
= defining_context
327 if self
.atom
.type in self
._TUPLE
_LIKE
:
328 self
.array_type
= 'tuple'
330 self
.array_type
= SequenceLiteralValue
.mapping
[atom
.children
[0]]
331 """The builtin name of the array (list, set, tuple or dict)."""
333 def _get_generics(self
):
334 if self
.array_type
== 'tuple':
335 return tuple(x
.infer().py__class__() for x
in self
.py__iter__())
336 return super()._get
_generics
()
338 def py__simple_getitem__(self
, index
):
339 """Here the index is an int/str. Raises IndexError/KeyError."""
340 if isinstance(index
, slice):
341 return ValueSet([self
])
343 with
reraise_getitem_errors(TypeError, KeyError, IndexError):
344 node
= self
.get_tree_entries()[index
]
345 if node
== ':' or node
.type == 'subscript':
347 return self
._defining
_context
.infer_node(node
)
349 def py__iter__(self
, contextualized_node
=None):
351 While values returns the possible values for any array field, this
352 function returns the value for a certain index.
354 for node
in self
.get_tree_entries():
355 if node
== ':' or node
.type == 'subscript':
356 # TODO this should probably use at least part of the code
357 # of infer_subscript_list.
358 yield LazyKnownValue(Slice(self
._defining
_context
, None, None, None))
360 yield LazyTreeValue(self
._defining
_context
, node
)
361 yield from check_array_additions(self
._defining
_context
, self
)
364 # This function is not really used often. It's more of a try.
365 return len(self
.get_tree_entries())
367 def get_tree_entries(self
):
368 c
= self
.atom
.children
370 if self
.atom
.type in self
._TUPLE
_LIKE
:
374 if array_node
in (']', '}', ')'):
375 return [] # Direct closing bracket, doesn't contain items.
377 if array_node
.type == 'testlist_comp':
378 # filter out (for now) pep 448 single-star unpacking
379 return [value
for value
in array_node
.children
[::2]
380 if value
.type != "star_expr"]
381 elif array_node
.type == 'dictorsetmaker':
383 iterator
= iter(array_node
.children
)
386 # dict with pep 448 double-star unpacking
387 # for now ignoring the values imported by **
389 next(iterator
, None) # Possible comma.
391 op
= next(iterator
, None)
392 if op
is None or op
== ',':
393 if key
.type == "star_expr":
394 # pep 448 single-star unpacking
395 # for now ignoring values imported by *
398 kv
.append(key
) # A set.
400 assert op
== ':' # A dict.
401 kv
.append((key
, next(iterator
)))
402 next(iterator
, None) # Possible comma.
405 if array_node
.type == "star_expr":
406 # pep 448 single-star unpacking
407 # for now ignoring values imported by *
413 return "<%s of %s>" % (self
.__class
__.__name
__, self
.atom
)
416 class DictLiteralValue(_DictMixin
, SequenceLiteralValue
, _DictKeyMixin
):
419 def __init__(self
, inference_state
, defining_context
, atom
):
420 # Intentionally don't call the super class. This is definitely a sign
421 # that the architecture is bad and we should refactor.
422 Sequence
.__init
__(self
, inference_state
)
423 self
._defining
_context
= defining_context
426 def py__simple_getitem__(self
, index
):
427 """Here the index is an int/str. Raises IndexError/KeyError."""
428 compiled_value_index
= compiled
.create_simple_object(self
.inference_state
, index
)
429 for key
, value
in self
.get_tree_entries():
430 for k
in self
._defining
_context
.infer_node(key
):
431 for key_v
in k
.execute_operation(compiled_value_index
, '=='):
432 if key_v
.get_safe_value():
433 return self
._defining
_context
.infer_node(value
)
434 raise SimpleGetItemNotFound('No key found in dictionary %s.' % self
)
436 def py__iter__(self
, contextualized_node
=None):
438 While values returns the possible values for any array field, this
439 function returns the value for a certain index.
443 for k
, _
in self
.get_tree_entries():
444 types |
= self
._defining
_context
.infer_node(k
)
445 # We don't know which dict index comes first, therefore always
446 # yield all the types.
448 yield LazyKnownValues(types
)
450 @publish_method('values')
451 def _imitate_values(self
, arguments
):
452 lazy_value
= LazyKnownValues(self
._dict
_values
())
453 return ValueSet([FakeList(self
.inference_state
, [lazy_value
])])
455 @publish_method('items')
456 def _imitate_items(self
, arguments
):
458 LazyKnownValue(FakeTuple(
459 self
.inference_state
,
460 (LazyTreeValue(self
._defining
_context
, key_node
),
461 LazyTreeValue(self
._defining
_context
, value_node
))
462 )) for key_node
, value_node
in self
.get_tree_entries()
465 return ValueSet([FakeList(self
.inference_state
, lazy_values
)])
467 def exact_key_items(self
):
469 Returns a generator of tuples like dict.items(), where the key is
470 resolved (as a string) and the values are still lazy values.
472 for key_node
, value
in self
.get_tree_entries():
473 for key
in self
._defining
_context
.infer_node(key_node
):
475 yield key
.get_safe_value(), LazyTreeValue(self
._defining
_context
, value
)
477 def _dict_values(self
):
478 return ValueSet
.from_sets(
479 self
._defining
_context
.infer_node(v
)
480 for k
, v
in self
.get_tree_entries()
483 def _dict_keys(self
):
484 return ValueSet
.from_sets(
485 self
._defining
_context
.infer_node(k
)
486 for k
, v
in self
.get_tree_entries()
490 class _FakeSequence(Sequence
):
491 def __init__(self
, inference_state
, lazy_value_list
):
493 type should be one of "tuple", "list"
495 super().__init
__(inference_state
)
496 self
._lazy
_value
_list
= lazy_value_list
498 def py__simple_getitem__(self
, index
):
499 if isinstance(index
, slice):
500 return ValueSet([self
])
502 with
reraise_getitem_errors(IndexError, TypeError):
503 lazy_value
= self
._lazy
_value
_list
[index
]
504 return lazy_value
.infer()
506 def py__iter__(self
, contextualized_node
=None):
507 return self
._lazy
_value
_list
509 def py__bool__(self
):
510 return bool(len(self
._lazy
_value
_list
))
513 return "<%s of %s>" % (type(self
).__name
__, self
._lazy
_value
_list
)
516 class FakeTuple(_FakeSequence
):
520 class FakeList(_FakeSequence
):
524 class FakeDict(_DictMixin
, Sequence
, _DictKeyMixin
):
527 def __init__(self
, inference_state
, dct
):
528 super().__init
__(inference_state
)
531 def py__iter__(self
, contextualized_node
=None):
532 for key
in self
._dct
:
533 yield LazyKnownValue(compiled
.create_simple_object(self
.inference_state
, key
))
535 def py__simple_getitem__(self
, index
):
536 with
reraise_getitem_errors(KeyError, TypeError):
537 lazy_value
= self
._dct
[index
]
538 return lazy_value
.infer()
540 @publish_method('values')
541 def _values(self
, arguments
):
542 return ValueSet([FakeTuple(
543 self
.inference_state
,
544 [LazyKnownValues(self
._dict
_values
())]
547 def _dict_values(self
):
548 return ValueSet
.from_sets(lazy_value
.infer() for lazy_value
in self
._dct
.values())
550 def _dict_keys(self
):
551 return ValueSet
.from_sets(lazy_value
.infer() for lazy_value
in self
.py__iter__())
553 def exact_key_items(self
):
554 return self
._dct
.items()
557 return '<%s: %s>' % (self
.__class
__.__name
__, self
._dct
)
560 class MergedArray(Sequence
):
561 def __init__(self
, inference_state
, arrays
):
562 super().__init
__(inference_state
)
563 self
.array_type
= arrays
[-1].array_type
564 self
._arrays
= arrays
566 def py__iter__(self
, contextualized_node
=None):
567 for array
in self
._arrays
:
568 yield from array
.py__iter__()
570 def py__simple_getitem__(self
, index
):
571 return ValueSet
.from_sets(lazy_value
.infer() for lazy_value
in self
.py__iter__())
574 def unpack_tuple_to_dict(context
, types
, exprlist
):
576 Unpacking tuple assignments in for statements and expr_stmts.
578 if exprlist
.type == 'name':
579 return {exprlist
.value
: types
}
580 elif exprlist
.type == 'atom' and exprlist
.children
[0] in ('(', '['):
581 return unpack_tuple_to_dict(context
, types
, exprlist
.children
[1])
582 elif exprlist
.type in ('testlist', 'testlist_comp', 'exprlist',
583 'testlist_star_expr'):
585 parts
= iter(exprlist
.children
[::2])
587 for lazy_value
in types
.iterate(ContextualizedNode(context
, exprlist
)):
591 except StopIteration:
592 analysis
.add(context
, 'value-error-too-many-values', part
,
593 message
="ValueError: too many values to unpack (expected %s)" % n
)
595 dct
.update(unpack_tuple_to_dict(context
, lazy_value
.infer(), part
))
596 has_parts
= next(parts
, None)
597 if types
and has_parts
is not None:
598 analysis
.add(context
, 'value-error-too-few-values', has_parts
,
599 message
="ValueError: need more than %s values to unpack" % n
)
601 elif exprlist
.type == 'power' or exprlist
.type == 'atom_expr':
602 # Something like ``arr[x], var = ...``.
603 # This is something that is not yet supported, would also be difficult
604 # to write into a dict.
606 elif exprlist
.type == 'star_expr': # `a, *b, c = x` type unpackings
607 # Currently we're not supporting them.
609 raise NotImplementedError
612 class Slice(LazyValueWrapper
):
613 def __init__(self
, python_context
, start
, stop
, step
):
614 self
.inference_state
= python_context
.inference_state
615 self
._context
= python_context
616 # All of them are either a Precedence or None.
621 def _get_wrapped_value(self
):
622 value
= compiled
.builtin_from_name(self
._context
.inference_state
, 'slice')
623 slice_value
, = value
.execute_with_values()
626 def get_safe_value(self
, default
=sentinel
):
628 Imitate CompiledValue.obj behavior and return a ``builtin.slice()``
635 result
= self
._context
.infer_node(element
)
637 # For simplicity, we want slices to be clear defined with just
638 # one type. Otherwise we will return an empty slice object.
642 return get_int_or_none(value
)
645 return slice(get(self
._start
), get(self
._stop
), get(self
._step
))
647 return slice(None, None, None)