diff --git a/jedi/parser/__init__.py b/jedi/parser/__init__.py index 11a5839e..dfd9fcbb 100644 --- a/jedi/parser/__init__.py +++ b/jedi/parser/__init__.py @@ -109,6 +109,7 @@ class Parser(object): 'comp_for': pt.CompFor, 'decorator': pt.Decorator, 'lambdef': pt.Lambda, + 'old_lambdef': pt.Lambda, 'lambdef_nocond': pt.Lambda, } diff --git a/jedi/parser/grammar2.7.txt b/jedi/parser/grammar2.7.txt index 19a29a55..59e4f8c2 100644 --- a/jedi/parser/grammar2.7.txt +++ b/jedi/parser/grammar2.7.txt @@ -122,13 +122,13 @@ arith_expr: term (('+'|'-') term)* term: factor (('*'|'/'|'%'|'//') factor)* factor: ('+'|'-'|'~') factor | power power: atom trailer* ['**' factor] -atom: ('(' [yield_expr|testlist_gexp] ')' | - '[' [listmaker] ']' | - '{' [dictsetmaker] '}' | +atom: ('(' [yield_expr|testlist_comp] ')' | + '[' [testlist_comp] ']' | + '{' [dictorsetmaker] '}' | '`' testlist1 '`' | NAME | NUMBER | STRING+ | '.' '.' '.') -listmaker: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) -testlist_gexp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) +# Modification by David Halter, remove `testlist_gexp` and `listmaker` +testlist_comp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) lambdef: 'lambda' [varargslist] ':' test trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME subscriptlist: subscript (',' subscript)* [','] @@ -136,7 +136,9 @@ subscript: test | [test] ':' [test] [sliceop] sliceop: ':' [test] exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] testlist: test (',' test)* [','] -dictsetmaker: ( (test ':' test (comp_for | (',' test ':' test)* [','])) | +# Modification by David Halter, dictsetmaker -> dictorsetmaker (so that it's +# the same as in the 3.4 grammar). +dictorsetmaker: ( (test ':' test (comp_for | (',' test ':' test)* [','])) | (test (comp_for | (',' test)* [','])) ) classdef: 'class' NAME ['(' [arglist] ')'] ':' suite diff --git a/test/test_parser/test_parser.py b/test/test_parser/test_parser.py index d17a7f6f..ea5e76e6 100644 --- a/test/test_parser/test_parser.py +++ b/test/test_parser/test_parser.py @@ -152,6 +152,6 @@ def test_error_correction_with(): try: f.""" comps = jedi.Script(source).completions() - assert len(comps) > 40 + assert len(comps) > 30 # `open` completions have a closed attribute. assert [1 for c in comps if c.name == 'closed'] diff --git a/test/test_parser/test_tokenize.py b/test/test_parser/test_tokenize.py index bff7c21c..5d6abe11 100644 --- a/test/test_parser/test_tokenize.py +++ b/test/test_parser/test_tokenize.py @@ -27,7 +27,7 @@ asdfasdf""" + "h" def test_simple_no_whitespace(self): # Test a simple one line string, no preceding whitespace - simple_docstring = '"""simple one line docstring"""' + simple_docstring = u'"""simple one line docstring"""' simple_docstring_io = StringIO(simple_docstring) tokens = parser.tokenize.generate_tokens(simple_docstring_io.readline) token_list = list(tokens)