mirror of
https://github.com/wxWidgets/Phoenix.git
synced 2026-01-04 19:10:09 +01:00
ignore newline added by Tokenize module since Python 3.6 (fixes #1109)
This commit is contained in:
21
unittests/test_py_introspect.py
Normal file
21
unittests/test_py_introspect.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import wx.py.introspect as inrspct
|
||||||
|
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class py_introspect_Tests(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_getAutoCompleteList(self):
|
||||||
|
# introspect is expecting this! usually inited by wx.py.interpreter
|
||||||
|
sys.ps2 = '... '
|
||||||
|
attributes = inrspct.getAutoCompleteList("wx.")
|
||||||
|
self.assertTrue(len(attributes) > 100)
|
||||||
|
|
||||||
|
|
||||||
|
#---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -228,11 +228,12 @@ def getRoot(command, terminator=None):
|
|||||||
command = rtrimTerminus(command, terminator)
|
command = rtrimTerminus(command, terminator)
|
||||||
if terminator == '.':
|
if terminator == '.':
|
||||||
tokens = getTokens(command)
|
tokens = getTokens(command)
|
||||||
if not tokens:
|
if tokens and tokens[-1][0] is tokenize.ENDMARKER:
|
||||||
return ''
|
|
||||||
if tokens[-1][0] is tokenize.ENDMARKER:
|
|
||||||
# Remove the end marker.
|
# Remove the end marker.
|
||||||
del tokens[-1]
|
del tokens[-1]
|
||||||
|
if tokens and tokens[-1][0] is tokenize.NEWLINE:
|
||||||
|
# Remove newline.
|
||||||
|
del tokens[-1]
|
||||||
if not tokens:
|
if not tokens:
|
||||||
return ''
|
return ''
|
||||||
if terminator == '.' and \
|
if terminator == '.' and \
|
||||||
@@ -258,7 +259,7 @@ def getRoot(command, terminator=None):
|
|||||||
tokentype = token[0]
|
tokentype = token[0]
|
||||||
tokenstring = token[1]
|
tokenstring = token[1]
|
||||||
line = token[4]
|
line = token[4]
|
||||||
if tokentype is tokenize.ENDMARKER:
|
if tokentype in (tokenize.ENDMARKER, tokenize.NEWLINE):
|
||||||
continue
|
continue
|
||||||
if PY3 and tokentype is tokenize.ENCODING:
|
if PY3 and tokentype is tokenize.ENCODING:
|
||||||
line = lastline
|
line = lastline
|
||||||
|
|||||||
Reference in New Issue
Block a user