Skip to content

Commit 42ff90d

Browse files
committed
Removed python 2 compatibility in introspect
1 parent 25a9306 commit 42ff90d

File tree

2 files changed

+8
-17
lines changed

2 files changed

+8
-17
lines changed

pyPad/introspect.py

Lines changed: 7 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,9 @@
88

99
__author__ = "Patrick K. O'Brien <[email protected]>"
1010

11-
import sys
11+
import io
1212
import inspect
1313
import tokenize
14-
import types
15-
from six import BytesIO, PY3, string_types
1614

1715
def getAutoCompleteList(command='', locals=None, includeMagic=1,
1816
includeSingle=1, includeDouble=1):
@@ -179,7 +177,7 @@ def getCallTip(command='', locals=None):
179177
pass
180178
elif inspect.isfunction(obj):
181179
# tip1 is a string like: "getCallTip(command='', locals=None)"
182-
argspec = inspect.getargspec(obj) if not PY3 else inspect.getfullargspec(obj)
180+
argspec = inspect.getfullargspec(obj)
183181
argspec = inspect.formatargspec(*argspec)
184182
if dropSelf:
185183
# The first parameter to a method is a reference to an
@@ -226,8 +224,6 @@ def getRoot(command, terminator=None):
226224
'.'. The terminator and anything after the terminator will be
227225
dropped."""
228226
command = command.split('\n')[-1]
229-
#if command.startswith(sys.ps2):
230-
# command = command[len(sys.ps2):]
231227
command = command.lstrip()
232228
command = rtrimTerminus(command, terminator)
233229
if terminator == '.':
@@ -265,7 +261,7 @@ def getRoot(command, terminator=None):
265261
line = token[4]
266262
if tokentype in (tokenize.ENDMARKER, tokenize.NEWLINE):
267263
continue
268-
if PY3 and tokentype is tokenize.ENCODING:
264+
if tokentype is tokenize.ENCODING:
269265
line = lastline
270266
break
271267
if tokentype in (tokenize.NAME, tokenize.STRING, tokenize.NUMBER) \
@@ -311,27 +307,22 @@ def getTokens(command):
311307
"""Return list of token tuples for command."""
312308

313309
# In case the command is unicode try encoding it
314-
if isinstance(command, string_types):
310+
if isinstance(command, str):
315311
try:
316312
command = command.encode('utf-8')
317313
except UnicodeEncodeError:
318314
pass # otherwise leave it alone
319315

320-
f = BytesIO(command)
316+
f = io.BytesIO(command)
321317
# tokens is a list of token tuples, each looking like:
322318
# (type, string, (srow, scol), (erow, ecol), line)
323319
tokens = []
324320
# Can't use list comprehension:
325321
# tokens = [token for token in tokenize.generate_tokens(f.readline)]
326322
# because of need to append as much as possible before TokenError.
327323
try:
328-
if not PY3:
329-
def eater(*args):
330-
tokens.append(args)
331-
tokenize.tokenize_loop(f.readline, eater)
332-
else:
333-
for t in tokenize.tokenize(f.readline):
334-
tokens.append(t)
324+
for t in tokenize.tokenize(f.readline):
325+
tokens.append(t)
335326
except tokenize.TokenError:
336327
# This is due to a premature EOF, which we expect since we are
337328
# feeding in fragments of Python code.

pyPad/pyPadClient.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
import traceback
1010
import textwrap
1111
import threading
12-
import introspect
12+
from . import introspect
1313
import queue
1414

1515
stdout = sys.stdout

0 commit comments

Comments
 (0)