comparison roundup/cgi/cgitb.py @ 5436:e70fe1d1215b

Python 3 preparation: update tokenize use in cgitb.py. Note that the same interface that has changed incompatibly is also used in tools/pygettext.py. That file also needs fixing, but this patch does *not* attempt such a fix.
author Joseph Myers <jsm@polyomino.org.uk>
date Wed, 25 Jul 2018 11:40:44 +0000
parents 1ab2c81a64df
children 1a835db41674
comparison
equal deleted inserted replaced
5435:12baa5b9b597 5436:e70fe1d1215b
9 9
10 import sys, os, keyword, linecache, tokenize, inspect, cgi 10 import sys, os, keyword, linecache, tokenize, inspect, cgi
11 import pydoc, traceback 11 import pydoc, traceback
12 12
13 from roundup.cgi import templating, TranslationService 13 from roundup.cgi import templating, TranslationService
14 from roundup.anypy.strings import s2b
14 15
15 def get_translator(i18n=None): 16 def get_translator(i18n=None):
16 """Return message translation function (gettext) 17 """Return message translation function (gettext)
17 18
18 Parameters: 19 Parameters:
154 if type == tokenize.NAME and token not in keyword.kwlist: 155 if type == tokenize.NAME and token not in keyword.kwlist:
155 if token not in names: 156 if token not in names:
156 names.append(token) 157 names.append(token)
157 if type == tokenize.NEWLINE: raise IndexError 158 if type == tokenize.NEWLINE: raise IndexError
158 def linereader(file=file, lnum=[lnum]): 159 def linereader(file=file, lnum=[lnum]):
159 line = linecache.getline(file, lnum[0]) 160 line = s2b(linecache.getline(file, lnum[0]))
160 lnum[0] = lnum[0] + 1 161 lnum[0] = lnum[0] + 1
161 return line 162 return line
162 163
164 # The interface that is tokenize.tokenize in Python 3 is
165 # called tokenize.generate_tokens in Python 2. However,
166 # Python 2 has tokenize.tokenize with a different interface,
167 # and Python 3 has an undocumented generate_tokens function,
168 # also with a different interface, so a version check is
169 # needed instead of checking for which functions exist.
170 if sys.version_info[0] > 2:
171 tokenize_fn = tokenize.tokenize
172 else:
173 tokenize_fn = tokenize.generate_tokens
163 try: 174 try:
164 tokenize.tokenize(linereader, tokeneater) 175 for t in tokenize_fn(linereader):
176 tokeneater(*t)
165 except IndexError: 177 except IndexError:
166 pass 178 pass
167 lvals = [] 179 lvals = []
168 for name in names: 180 for name in names:
169 if name in frame.f_code.co_varnames: 181 if name in frame.f_code.co_varnames:

Roundup Issue Tracker: http://roundup-tracker.org/