From a91e240c8b12340223198e8e68bebbc877513e14 Mon Sep 17 00:00:00 2001 From: Dave Halter Date: Mon, 23 Feb 2015 14:10:29 +0100 Subject: [PATCH] ALWAYS_BREAK_TOKEN -> ALWAYS_BREAK_TOKENS --- jedi/parser/tokenize.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/jedi/parser/tokenize.py b/jedi/parser/tokenize.py index 53621034..707b82ba 100644 --- a/jedi/parser/tokenize.py +++ b/jedi/parser/tokenize.py @@ -135,8 +135,8 @@ del _compile tabsize = 8 -ALWAYS_BREAK_TOKEN = (';', 'import', 'from', 'class', 'def', 'try', 'except', - 'finally', 'while', 'return') +ALWAYS_BREAK_TOKENS = (';', 'import', 'from', 'class', 'def', 'try', 'except', + 'finally', 'while', 'return') def source_tokens(source): @@ -254,7 +254,7 @@ def generate_tokens(readline): else: # ordinary string yield STRING, token, spos, prefix elif is_identifier(initial): # ordinary name - if token in ALWAYS_BREAK_TOKEN: + if token in ALWAYS_BREAK_TOKENS: paren_level = 0 while True: indent = indents.pop()